diff --git a/__pycache__/stats.cpython-36.pyc b/__pycache__/stats.cpython-36.pyc new file mode 100644 index 0000000..8b5e8a7 Binary files /dev/null and b/__pycache__/stats.cpython-36.pyc differ diff --git a/__pycache__/stats.cpython-38.pyc b/__pycache__/stats.cpython-38.pyc new file mode 100644 index 0000000..bdf4607 Binary files /dev/null and b/__pycache__/stats.cpython-38.pyc differ diff --git a/corpus.csv b/corpus.csv new file mode 100644 index 0000000..9c123ab --- /dev/null +++ b/corpus.csv @@ -0,0 +1,5513 @@ +"apple","positive","126415614616154112" +"apple","positive","126404574230740992" +"apple","positive","126402758403305474" +"apple","positive","126397179614068736" +"apple","positive","126395626979196928" +"apple","positive","126394830791254016" +"apple","positive","126379685453119488" +"apple","positive","126377656416612353" +"apple","positive","126373779483004928" +"apple","positive","126366353757179904" +"apple","positive","126366123368267776" +"apple","positive","126365858481188864" +"apple","positive","126360935509135362" +"apple","positive","126360398885687296" +"apple","positive","126358340220616704" +"apple","positive","126357982685569024" +"apple","positive","126354605130002432" +"apple","positive","126352268705538048" +"apple","positive","126350948548354048" +"apple","positive","126350302113824769" +"apple","positive","126349695676203009" +"apple","positive","126344048637259776" +"apple","positive","126342268603998208" +"apple","positive","126325800080392193" +"apple","positive","126324177501302784" +"apple","positive","126323785145126912" +"apple","positive","126322063332999169" +"apple","positive","126319186141130752" +"apple","positive","126318009647235072" +"apple","positive","126315223060709376" +"apple","positive","126315011600678913" +"apple","positive","126314687116750849" +"apple","positive","126312877916307458" +"apple","positive","126311981564178432" +"apple","positive","126307801046847488" +"apple","positive","126302673820594176" +"apple","positive","126301956951117826" +"apple","positive","126287654093471745" +"apple","positive","126284506360578049" +"apple","positive","126267185025916928" +"apple","positive","126263834968211456" +"apple","positive","126256230397259776" +"apple","positive","126213333123743744" +"apple","positive","126195522691280896" +"apple","positive","126183339945234432" +"apple","positive","126180209501286400" +"apple","positive","126164430546403328" +"apple","positive","126148685737361408" +"apple","positive","126140794078892033" +"apple","positive","126134400466419712" +"apple","positive","126130991365500928" +"apple","positive","126116898051076096" +"apple","positive","126116614495154176" +"apple","positive","126112836219973632" +"apple","positive","126107965991297024" +"apple","positive","126104732426186752" +"apple","positive","126097426493878272" +"apple","positive","126095744531832832" +"apple","positive","126093298619252737" +"apple","positive","126084907343691776" +"apple","positive","126079414986485761" +"apple","positive","126076743613284354" +"apple","positive","126076238375817216" +"apple","positive","126075534894571520" +"apple","positive","126064519943426048" +"apple","positive","126063569660936193" +"apple","positive","126063358037340161" +"apple","positive","126059405941809152" +"apple","positive","126059399319003136" +"apple","positive","126057030996852737" +"apple","positive","126049183865114624" +"apple","positive","126040352237961217" +"apple","positive","126040074595999746" +"apple","positive","126039929523404801" +"apple","positive","126034495991328768" +"apple","positive","126026756623831041" +"apple","positive","126019393460244481" +"apple","positive","126015087386431488" +"apple","positive","126009748020658177" +"apple","positive","126008369562652672" +"apple","positive","126002597063696384" +"apple","positive","125999676972470272" +"apple","positive","125995158679461888" +"apple","positive","125979338846900224" +"apple","positive","125978568726560768" +"apple","positive","125978473712979969" +"apple","positive","125974505385500672" +"apple","positive","125960325437722624" +"apple","positive","125959059957485569" +"apple","positive","125954443643588608" +"apple","positive","125947912306954240" +"apple","positive","125947232359948288" +"apple","positive","125943290288803841" +"apple","positive","125940394566483968" +"apple","positive","125932869389524992" +"apple","positive","125930171562852353" +"apple","positive","125925618486489088" +"apple","positive","125924446430183425" +"apple","positive","125922999651139584" +"apple","positive","125910633731461120" +"apple","positive","125909565031198720" +"apple","positive","125907732388790272" +"apple","positive","125902301931126785" +"apple","positive","125901202591461376" +"apple","positive","125900497327636480" +"apple","positive","125898611572740097" +"apple","positive","125850288488841217" +"apple","positive","125840039031738368" +"apple","positive","125816853867151360" +"apple","positive","125794931439702016" +"apple","positive","125728717942161408" +"apple","positive","125727629012770816" +"apple","positive","125722746100531200" +"apple","positive","125717622728818688" +"apple","positive","125714253452812288" +"apple","positive","125713935344214016" +"apple","positive","125712433087123456" +"apple","positive","125708639607599104" +"apple","positive","125706813583798274" +"apple","positive","125701161926930433" +"apple","positive","125699573799845888" +"apple","positive","125688922410975232" +"apple","positive","125685656415510528" +"apple","positive","125681742760771584" +"apple","positive","125680049478316032" +"apple","positive","125677424565424128" +"apple","positive","125673004511412224" +"apple","positive","125667241978114048" +"apple","positive","125665606853861376" +"apple","positive","125664375364255744" +"apple","positive","125662399217930240" +"apple","positive","125652668080336896" +"apple","positive","125648027045199873" +"apple","positive","125645258003464192" +"apple","positive","125643107260829697" +"apple","positive","125633677597229056" +"apple","positive","125633065757310976" +"apple","positive","125628199269961729" +"apple","positive","125623745284018176" +"apple","positive","125618466353983488" +"apple","positive","125616280215617537" +"apple","positive","125610372727193601" +"apple","positive","125608381431025664" +"apple","positive","125596541028282369" +"apple","positive","125595292304281601" +"apple","positive","125585606100267008" +"apple","positive","125562428200202240" +"apple","positive","125561950376701952" +"apple","positive","125550135911518209" +"apple","positive","125547297072357376" +"apple","positive","125539788546781185" +"apple","positive","125537993942515712" +"apple","positive","125537578974851072" +"apple","positive","125536884813336576" +"apple","positive","125533599737978882" +"apple","positive","125524107386302465" +"apple","positive","125523414298533888" +"apple","positive","125501576952553472" +"apple","positive","125501281753251840" +"apple","positive","125495491701125120" +"apple","positive","125459338524499969" +"apple","positive","125458901192810496" +"apple","positive","125455260801179648" +"apple","positive","125445056218923008" +"apple","positive","125423290767507456" +"apple","positive","125422502284505088" +"apple","positive","125416879035658240" +"apple","positive","125407532893224962" +"apple","positive","125402636764712960" +"apple","positive","125402412147146752" +"apple","positive","125393816470568961" +"apple","positive","125374540107886593" +"apple","positive","125356807626559488" +"apple","positive","125343429289984000" +"apple","positive","125338216411828224" +"apple","positive","125333598197911552" +"apple","positive","125330595302744064" +"apple","positive","125319163366473728" +"apple","positive","125313088160411649" +"apple","positive","125309946723188736" +"apple","positive","125281706327552001" +"apple","positive","125279987254300672" +"apple","positive","125279447669669888" +"apple","positive","125264731035537409" +"apple","positive","125256305647693825" +"apple","positive","125252188065902592" +"apple","positive","125251672896323584" +"apple","positive","125243911538098176" +"apple","positive","125238977417580544" +"apple","positive","125238017299451905" +"apple","positive","125165176772247552" +"apple","negative","126418790706712576" +"apple","negative","126417285559762944" +"apple","negative","126416915664084992" +"apple","negative","126416109212680192" +"apple","negative","126411162622496768" +"apple","negative","126410591949697024" +"apple","negative","126409696553861121" +"apple","negative","126408864387182593" +"apple","negative","126408052525105153" +"apple","negative","126407767132078082" +"apple","negative","126405405667627008" +"apple","negative","126405185630253056" +"apple","negative","126405040809312256" +"apple","negative","126400637930979329" +"apple","negative","126394680903614465" +"apple","negative","126393717421645825" +"apple","negative","126393204550537216" +"apple","negative","126392402083708928" +"apple","negative","126391082308206593" +"apple","negative","126389218284015616" +"apple","negative","126388194194362369" +"apple","negative","126388023725268992" +"apple","negative","126385036441296896" +"apple","negative","126382959711358976" +"apple","negative","126382051661328385" +"apple","negative","126381519513194497" +"apple","negative","126380588822298625" +"apple","negative","126380553464315904" +"apple","negative","126377298650861568" +"apple","negative","126375381249966080" +"apple","negative","126372694118768640" +"apple","negative","126372040696541184" +"apple","negative","126363154837020672" +"apple","negative","126361483432038400" +"apple","negative","126358781633368064" +"apple","negative","126358301393956866" +"apple","negative","126358272084152320" +"apple","negative","126357580741226496" +"apple","negative","126357227727626240" +"apple","negative","126354628999778305" +"apple","negative","126351972948393984" +"apple","negative","126348169826148352" +"apple","negative","126344426854416385" +"apple","negative","126343931117047808" +"apple","negative","126343679785959424" +"apple","negative","126343214805426176" +"apple","negative","126334597431697408" +"apple","negative","126334188583530496" +"apple","negative","126331480233353216" +"apple","negative","126331327271284736" +"apple","negative","126328782700285952" +"apple","negative","126327808803880960" +"apple","negative","126325125749542913" +"apple","negative","126324573384871936" +"apple","negative","126324389741473792" +"apple","negative","126324077513293824" +"apple","negative","126321169468100609" +"apple","negative","126320033369563138" +"apple","negative","126312535203921920" +"apple","negative","126311879218966529" +"apple","negative","126310645443461121" +"apple","negative","126309616391950336" +"apple","negative","126308005779210241" +"apple","negative","126307071984545793" +"apple","negative","126304942049853441" +"apple","negative","126302386644975616" +"apple","negative","126299379832336384" +"apple","negative","126297326565330944" +"apple","negative","126297241190281216" +"apple","negative","126295434862936064" +"apple","negative","126292335540699136" +"apple","negative","126286814578348032" +"apple","negative","126283602571964416" +"apple","negative","126282994821509120" +"apple","negative","126281019476291585" +"apple","negative","126280555980529664" +"apple","negative","126279811151831042" +"apple","negative","126270073420791810" +"apple","negative","126264313563459585" +"apple","negative","126258214412091392" +"apple","negative","126257645282799616" +"apple","negative","126251052667375616" +"apple","negative","126247557339947008" +"apple","negative","126243680129523712" +"apple","negative","126240605419487232" +"apple","negative","126239832895795200" +"apple","negative","126238223537152001" +"apple","negative","126221894126022656" +"apple","negative","126189036644728832" +"apple","negative","126188946974720000" +"apple","negative","126188717902802944" +"apple","negative","126188686453907457" +"apple","negative","126182880123695104" +"apple","negative","126167083334643713" +"apple","negative","126163315499081728" +"apple","negative","126163250172801024" +"apple","negative","126158846375903233" +"apple","negative","126156590662422528" +"apple","negative","126155291288023040" +"apple","negative","126153311521996800" +"apple","negative","126150581558591488" +"apple","negative","126148955217203200" +"apple","negative","126148565302128640" +"apple","negative","126143926523539457" +"apple","negative","126141380409036800" +"apple","negative","126141077131497472" +"apple","negative","126140389873827841" +"apple","negative","126132919117938689" +"apple","negative","126131535211536384" +"apple","negative","126129938247061504" +"apple","negative","126129582326816769" +"apple","negative","126128599030956032" +"apple","negative","126127465155403777" +"apple","negative","126126605344047105" +"apple","negative","126121175926571009" +"apple","negative","126118222746497025" +"apple","negative","126106964420857857" +"apple","negative","126099775417364480" +"apple","negative","126096173198082048" +"apple","negative","126094194312876032" +"apple","negative","126089287660863488" +"apple","negative","126088404084588546" +"apple","negative","126085893353250816" +"apple","negative","126084068298334208" +"apple","negative","126082198720888833" +"apple","negative","126082123743502336" +"apple","negative","126079672386723840" +"apple","negative","126075115686465536" +"apple","negative","126073788323479552" +"apple","negative","126073520504569858" +"apple","negative","126072901144281088" +"apple","negative","126070647125327872" +"apple","negative","126069614181486593" +"apple","negative","126068917012668416" +"apple","negative","126063215842037760" +"apple","negative","126060639268507649" +"apple","negative","126054048972537856" +"apple","negative","126053722966069248" +"apple","negative","126050114518261760" +"apple","negative","126044756320075776" +"apple","negative","126044425964109824" +"apple","negative","126042137740574720" +"apple","negative","126042022900547584" +"apple","negative","126037831301869568" +"apple","negative","126036793970786304" +"apple","negative","126034507475337216" +"apple","negative","126033747991736320" +"apple","negative","126031969166434304" +"apple","negative","126030936084189184" +"apple","negative","126029733325582336" +"apple","negative","126021108641181696" +"apple","negative","126018120983904256" +"apple","negative","126016585348558848" +"apple","negative","126014999444467712" +"apple","negative","126014540721827840" +"apple","negative","126012822936231936" +"apple","negative","126012404332113920" +"apple","negative","126012089415380992" +"apple","negative","126008913400303616" +"apple","negative","126006966312108032" +"apple","negative","126006116168642560" +"apple","negative","126006088725303296" +"apple","negative","126004661248471040" +"apple","negative","126003967552524288" +"apple","negative","126001775626031105" +"apple","negative","126001635162992640" +"apple","negative","125999655011098624" +"apple","negative","125996379913986048" +"apple","negative","125995264325599233" +"apple","negative","125994965183635456" +"apple","negative","125994596336533504" +"apple","negative","125989051101741056" +"apple","negative","125988395787882497" +"apple","negative","125987979784224770" +"apple","negative","125987439692099584" +"apple","negative","125982320917364736" +"apple","negative","125980659415138304" +"apple","negative","125978454146551808" +"apple","negative","125976113657823232" +"apple","negative","125974886006005760" +"apple","negative","125974810021998595" +"apple","negative","125974351035117568" +"apple","negative","125973390283653120" +"apple","negative","125969932285513728" +"apple","negative","125969502587465729" +"apple","negative","125966385259098112" +"apple","negative","125965988146585601" +"apple","negative","125965853769478144" +"apple","negative","125964314220830722" +"apple","negative","125963262733991936" +"apple","negative","125961999791308800" +"apple","negative","125961793926475776" +"apple","negative","125960026891362304" +"apple","negative","125958961269702656" +"apple","negative","125958368773943296" +"apple","negative","125956505768960000" +"apple","negative","125956403574747137" +"apple","negative","125956319344721920" +"apple","negative","125954651152592896" +"apple","negative","125953600861126656" +"apple","negative","125950557310562305" +"apple","negative","125945821240885248" +"apple","negative","125943204943114240" +"apple","negative","125937228328341504" +"apple","negative","125934808592433153" +"apple","negative","125930406125117440" +"apple","negative","125929899071516676" +"apple","negative","125929395264299009" +"apple","negative","125920912171216896" +"apple","negative","125876542600519681" +"apple","negative","125863232249405440" +"apple","negative","125846659182764032" +"apple","negative","125845538926112768" +"apple","negative","125836461936361472" +"apple","negative","125826259048607744" +"apple","negative","125824148579692544" +"apple","negative","125824054958637056" +"apple","negative","125822115155947520" +"apple","negative","125819194049699840" +"apple","negative","125815370513793024" +"apple","negative","125814380871946240" +"apple","negative","125812985301172224" +"apple","negative","125811345064067072" +"apple","negative","125807830363156480" +"apple","negative","125806568389361664" +"apple","negative","125806240138928128" +"apple","negative","125799384976863232" +"apple","negative","125794882257305600" +"apple","negative","125794703819030528" +"apple","negative","125731810733867011" +"apple","negative","125729727653756928" +"apple","negative","125728250579259392" +"apple","negative","125722610179907584" +"apple","negative","125722107710672896" +"apple","negative","125717447276904448" +"apple","negative","125717161531551744" +"apple","negative","125713100782575616" +"apple","negative","125712104253702146" +"apple","negative","125711996074209280" +"apple","negative","125710089716899840" +"apple","negative","125708348237680640" +"apple","negative","125706246056706049" +"apple","negative","125701785540235264" +"apple","negative","125694815743651840" +"apple","negative","125692685033021441" +"apple","negative","125692532750430209" +"apple","negative","125691072398639104" +"apple","negative","125689691927351296" +"apple","negative","125681375058735104" +"apple","negative","125681125376000000" +"apple","negative","125679166015283203" +"apple","negative","125675806977556480" +"apple","negative","125673358418391041" +"apple","negative","125665094561574913" +"apple","negative","125664507757461504" +"apple","negative","125663477573500930" +"apple","negative","125661140939321344" +"apple","negative","125659125886623744" +"apple","negative","125657359841361920" +"apple","negative","125656618326175745" +"apple","negative","125656559190683651" +"apple","negative","125654540455378945" +"apple","negative","125651769261965312" +"apple","negative","125649285667749889" +"apple","negative","125645811903250432" +"apple","negative","125643054190305280" +"apple","negative","125642742977138689" +"apple","negative","125641051531784192" +"apple","negative","125639217090011136" +"apple","negative","125633065878958080" +"apple","negative","125631239364427776" +"apple","negative","125626166492147713" +"apple","negative","125621144148639744" +"apple","negative","125619303356710912" +"apple","negative","125607492356018176" +"apple","negative","125603435440644098" +"apple","negative","125601235985367041" +"apple","negative","125599423131697154" +"apple","negative","125598450090917888" +"apple","negative","125588697872728065" +"apple","negative","125588202286366721" +"apple","negative","125586348064247808" +"apple","negative","125583385895768064" +"apple","negative","125578269197217792" +"apple","negative","125561930416013312" +"apple","negative","125556679571025920" +"apple","negative","125547255947198465" +"apple","negative","125544764203466752" +"apple","negative","125533730222784512" +"apple","negative","125521682894041088" +"apple","negative","125510333078048768" +"apple","negative","125476730067615744" +"apple","negative","125475953509015552" +"apple","negative","125471372485992448" +"apple","negative","125464229577891840" +"apple","negative","125458395800154112" +"apple","negative","125442137302110208" +"apple","negative","125441732941840385" +"apple","negative","125441478951575552" +"apple","negative","125435218017525760" +"apple","negative","125420263687995392" +"apple","negative","125408962215555072" +"apple","negative","125408737296003072" +"apple","negative","125408701166256128" +"apple","negative","125407447383937025" +"apple","negative","125406743923671040" +"apple","negative","125405260650000384" +"apple","negative","125405005493706752" +"apple","negative","125404317669785600" +"apple","negative","125400161886277632" +"apple","negative","125399780527570944" +"apple","negative","125395636219678720" +"apple","negative","125394863255588864" +"apple","negative","125394746452619265" +"apple","negative","125394663573172224" +"apple","negative","125380163302199296" +"apple","negative","125371779039502336" +"apple","negative","125369698840887297" +"apple","negative","125368089159286784" +"apple","negative","125365852487942145" +"apple","negative","125365814579826688" +"apple","negative","125365581170999296" +"apple","negative","125355869859876864" +"apple","negative","125355139409252352" +"apple","negative","125347619072512000" +"apple","negative","125346783390990337" +"apple","negative","125341902739484672" +"apple","negative","125341804857008128" +"apple","negative","125338210158125056" +"apple","negative","125336335656558592" +"apple","negative","125334948017213441" +"apple","negative","125334519254482944" +"apple","negative","125333948556521472" +"apple","negative","125330337847975937" +"apple","negative","125330038248849408" +"apple","negative","125329867674886144" +"apple","negative","125327896066785280" +"apple","negative","125313086465904641" +"apple","negative","125311989751877632" +"apple","negative","125309975881977857" +"apple","negative","125309448108519424" +"apple","negative","125309427422203904" +"apple","negative","125307394640199680" +"apple","negative","125305396842856448" +"apple","negative","125304159581900800" +"apple","negative","125303217214062592" +"apple","negative","125302079752384512" +"apple","negative","125301860256063488" +"apple","negative","125301393560047616" +"apple","negative","125301265700892672" +"apple","negative","125295729139908608" +"apple","negative","125294978623746048" +"apple","negative","125277260872822786" +"apple","negative","125276004817190914" +"apple","negative","125271422431014914" +"apple","negative","125269239207706624" +"apple","negative","125269161327865856" +"apple","negative","125268117680160768" +"apple","negative","125267178336419840" +"apple","negative","125265721281351680" +"apple","negative","125261285083447296" +"apple","negative","125247130762883072" +"apple","negative","125245780192792576" +"apple","negative","125245246136258561" +"apple","negative","125245104859529216" +"apple","negative","125236708403970048" +"apple","negative","125236166151774208" +"apple","negative","125232266849947648" +"apple","negative","125230743990444032" +"apple","negative","125230107580317696" +"apple","negative","125227837438435328" +"apple","negative","125224588253741056" +"apple","negative","125223685194915840" +"apple","negative","125212404299735040" +"apple","negative","125204228967903232" +"apple","negative","125202037293064192" +"apple","negative","125129328446017536" +"apple","neutral","126417484017451009" +"apple","neutral","126415742177513472" +"apple","neutral","126415618625912832" +"apple","neutral","126414657836687362" +"apple","neutral","126410146703351808" +"apple","neutral","126409984836763648" +"apple","neutral","126407959495442432" +"apple","neutral","126407672521162753" +"apple","neutral","126407511531192320" +"apple","neutral","126405911697817600" +"apple","neutral","126405821482532864" +"apple","neutral","126405160934178816" +"apple","neutral","126403530838913024" +"apple","neutral","126401882766839811" +"apple","neutral","126400491067416576" +"apple","neutral","126394795802370049" +"apple","neutral","126393452324855808" +"apple","neutral","126389413054910464" +"apple","neutral","126387460463788032" +"apple","neutral","126386085164101634" +"apple","neutral","126384526925639681" +"apple","neutral","126383125059211265" +"apple","neutral","126382776072146944" +"apple","neutral","126381578975842304" +"apple","neutral","126380323733909504" +"apple","neutral","126379730827083776" +"apple","neutral","126377120023842816" +"apple","neutral","126375024595705856" +"apple","neutral","126374630377275392" +"apple","neutral","126370776013213697" +"apple","neutral","126368680459251712" +"apple","neutral","126368285259350017" +"apple","neutral","126368127524159488" +"apple","neutral","126364189097865216" +"apple","neutral","126362867778859008" +"apple","neutral","126360821419884544" +"apple","neutral","126360182308618240" +"apple","neutral","126358012343492608" +"apple","neutral","126357527196741632" +"apple","neutral","126353359962775552" +"apple","neutral","126351669029126144" +"apple","neutral","126348857071239168" +"apple","neutral","126347890196103168" +"apple","neutral","126346633721032705" +"apple","neutral","126346584068861952" +"apple","neutral","126343124174901248" +"apple","neutral","126339328434651136" +"apple","neutral","126336867477094400" +"apple","neutral","126336687382081536" +"apple","neutral","126331354718801921" +"apple","neutral","126330974270271488" +"apple","neutral","126330155441467392" +"apple","neutral","126329388320043008" +"apple","neutral","126329109759524865" +"apple","neutral","126328424624160768" +"apple","neutral","126326886354784256" +"apple","neutral","126325069281624064" +"apple","neutral","126324621279641601" +"apple","neutral","126324256236765185" +"apple","neutral","126323574989520896" +"apple","neutral","126323533696614402" +"apple","neutral","126321197062426624" +"apple","neutral","126320247379730432" +"apple","neutral","126320076063379456" +"apple","neutral","126318553031917569" +"apple","neutral","126317201962700800" +"apple","neutral","126316594971422720" +"apple","neutral","126316179429134336" +"apple","neutral","126315088641658881" +"apple","neutral","126314701721309184" +"apple","neutral","126312423132102657" +"apple","neutral","126311681126187008" +"apple","neutral","126311223343058946" +"apple","neutral","126309939961536513" +"apple","neutral","126308556294205441" +"apple","neutral","126307117274644480" +"apple","neutral","126302918797312000" +"apple","neutral","126302719882444801" +"apple","neutral","126301989486342145" +"apple","neutral","126301301511426049" +"apple","neutral","126300596633481216" +"apple","neutral","126300304600866816" +"apple","neutral","126298592364331008" +"apple","neutral","126294550535872512" +"apple","neutral","126294304628019201" +"apple","neutral","126293879166205952" +"apple","neutral","126293725155569664" +"apple","neutral","126292279009882113" +"apple","neutral","126292233963053056" +"apple","neutral","126292109727768576" +"apple","neutral","126291860305100801" +"apple","neutral","126290154737504256" +"apple","neutral","126289716097196032" +"apple","neutral","126287512296632320" +"apple","neutral","126283761754185728" +"apple","neutral","126283639959990274" +"apple","neutral","126283441657495552" +"apple","neutral","126283440457912320" +"apple","neutral","126283024278110208" +"apple","neutral","126281432644595713" +"apple","neutral","126280507729260544" +"apple","neutral","126279672433614848" +"apple","neutral","126277810431074304" +"apple","neutral","126272713332506624" +"apple","neutral","126267746739699713" +"apple","neutral","126257394622808064" +"apple","neutral","126243528832593920" +"apple","neutral","126232037492404224" +"apple","neutral","126229089651654656" +"apple","neutral","126225922159427584" +"apple","neutral","126219340214304768" +"apple","neutral","126218596786511873" +"apple","neutral","126217194173501441" +"apple","neutral","126213965817708544" +"apple","neutral","126211975595311104" +"apple","neutral","126209902241787904" +"apple","neutral","126205800359280640" +"apple","neutral","126201991125929984" +"apple","neutral","126197405015220225" +"apple","neutral","126192452297170945" +"apple","neutral","126186955296878592" +"apple","neutral","126186795808456704" +"apple","neutral","126185114173583360" +"apple","neutral","126177221571395584" +"apple","neutral","126171911523794944" +"apple","neutral","126165547154018304" +"apple","neutral","126163403063570432" +"apple","neutral","126157019072835584" +"apple","neutral","126149567036137473" +"apple","neutral","126148184358653954" +"apple","neutral","126147867478982656" +"apple","neutral","126146495396319232" +"apple","neutral","126141631291326464" +"apple","neutral","126141628372090880" +"apple","neutral","126141580682854400" +"apple","neutral","126141157146238976" +"apple","neutral","126134865887363072" +"apple","neutral","126130171404230656" +"apple","neutral","126125830094061568" +"apple","neutral","126125799744094208" +"apple","neutral","126125182405447680" +"apple","neutral","126124917992341504" +"apple","neutral","126122372775415808" +"apple","neutral","126118389591711744" +"apple","neutral","126113944891949056" +"apple","neutral","126110863550717952" +"apple","neutral","126110770864979968" +"apple","neutral","126110374549405696" +"apple","neutral","126109969912311810" +"apple","neutral","126107127231152129" +"apple","neutral","126106914684796928" +"apple","neutral","126105236229193728" +"apple","neutral","126105175294357505" +"apple","neutral","126104490511319041" +"apple","neutral","126104322999197696" +"apple","neutral","126104244402126848" +"apple","neutral","126102213956337664" +"apple","neutral","126102037057388544" +"apple","neutral","126098699196698624" +"apple","neutral","126095965391298560" +"apple","neutral","126094392183357443" +"apple","neutral","126094029015355392" +"apple","neutral","126094027140513792" +"apple","neutral","126091878469869568" +"apple","neutral","126089815136538624" +"apple","neutral","126089347639427072" +"apple","neutral","126087892580827137" +"apple","neutral","126086553415057408" +"apple","neutral","126082898783780864" +"apple","neutral","126081812236738560" +"apple","neutral","126079649959772160" +"apple","neutral","126078565346312192" +"apple","neutral","126073142107045888" +"apple","neutral","126072051118260225" +"apple","neutral","126071173640499200" +"apple","neutral","126068964685135872" +"apple","neutral","126066994008162305" +"apple","neutral","126066452787773443" +"apple","neutral","126065983138955265" +"apple","neutral","126065529684369408" +"apple","neutral","126061182720278528" +"apple","neutral","126057389333020672" +"apple","neutral","126056940060155904" +"apple","neutral","126055880394420224" +"apple","neutral","126054998080622593" +"apple","neutral","126054725727698944" +"apple","neutral","126054145617694720" +"apple","neutral","126052649475915776" +"apple","neutral","126051375422504961" +"apple","neutral","126049560878526464" +"apple","neutral","126044185815040000" +"apple","neutral","126044055644807169" +"apple","neutral","126043954641780736" +"apple","neutral","126042506717704192" +"apple","neutral","126041773356232704" +"apple","neutral","126041570876203009" +"apple","neutral","126040050441011200" +"apple","neutral","126039521853845504" +"apple","neutral","126039090578735104" +"apple","neutral","126031463647944704" +"apple","neutral","126030091892432896" +"apple","neutral","126029114850295809" +"apple","neutral","126022527578406912" +"apple","neutral","126021436149211136" +"apple","neutral","126019432194650113" +"apple","neutral","126018538531061760" +"apple","neutral","126017685246050304" +"apple","neutral","126017643747606528" +"apple","neutral","126016494701256704" +"apple","neutral","126016405085757440" +"apple","neutral","126014643826208768" +"apple","neutral","126014277814468608" +"apple","neutral","126014214467895297" +"apple","neutral","126014102379302912" +"apple","neutral","126013626426466304" +"apple","neutral","126012833128390656" +"apple","neutral","126012515019784192" +"apple","neutral","126012034545496065" +"apple","neutral","126012004312956928" +"apple","neutral","126011120694726656" +"apple","neutral","126010471202566144" +"apple","neutral","126009386022879232" +"apple","neutral","126008776322064384" +"apple","neutral","126007705600135168" +"apple","neutral","126006669535744000" +"apple","neutral","126006572420833282" +"apple","neutral","126006349959135232" +"apple","neutral","126004552557273088" +"apple","neutral","126003746135224320" +"apple","neutral","126003567315255296" +"apple","neutral","126001989309054976" +"apple","neutral","126001758853009409" +"apple","neutral","126000843798491136" +"apple","neutral","125999022908510209" +"apple","neutral","125998732046123009" +"apple","neutral","125996412252078080" +"apple","neutral","125996330500890624" +"apple","neutral","125994997609803776" +"apple","neutral","125994518989385729" +"apple","neutral","125993702782025729" +"apple","neutral","125993105722839040" +"apple","neutral","125992838910586880" +"apple","neutral","125992594395250688" +"apple","neutral","125991449455104000" +"apple","neutral","125990236743405568" +"apple","neutral","125990217801940992" +"apple","neutral","125989605634879488" +"apple","neutral","125989196132388864" +"apple","neutral","125989009091592192" +"apple","neutral","125988775548559360" +"apple","neutral","125988651426512899" +"apple","neutral","125984350989860864" +"apple","neutral","125983179877253120" +"apple","neutral","125981074114359297" +"apple","neutral","125980918220464128" +"apple","neutral","125980676653723648" +"apple","neutral","125980615664336896" +"apple","neutral","125979228452818944" +"apple","neutral","125978290367381504" +"apple","neutral","125975779447291904" +"apple","neutral","125974955983769603" +"apple","neutral","125974497546338304" +"apple","neutral","125972882240188416" +"apple","neutral","125971256335024128" +"apple","neutral","125969677997453312" +"apple","neutral","125969128514260992" +"apple","neutral","125967560171720704" +"apple","neutral","125967126912712705" +"apple","neutral","125965569659895808" +"apple","neutral","125963773176582144" +"apple","neutral","125962667541270528" +"apple","neutral","125962608519036928" +"apple","neutral","125961033348153345" +"apple","neutral","125960438981734400" +"apple","neutral","125958702455988225" +"apple","neutral","125958525708021760" +"apple","neutral","125958117086347264" +"apple","neutral","125957972466737152" +"apple","neutral","125957965109932032" +"apple","neutral","125951303770845185" +"apple","neutral","125950941349421057" +"apple","neutral","125950505389273090" +"apple","neutral","125950026181648385" +"apple","neutral","125949784677810176" +"apple","neutral","125948450620702720" +"apple","neutral","125947460592996352" +"apple","neutral","125944856504827904" +"apple","neutral","125944293671182336" +"apple","neutral","125943115449253888" +"apple","neutral","125943078837161984" +"apple","neutral","125943020767019008" +"apple","neutral","125940398915977217" +"apple","neutral","125940300987371521" +"apple","neutral","125939862078619648" +"apple","neutral","125939833775460352" +"apple","neutral","125938918540574720" +"apple","neutral","125938325151432706" +"apple","neutral","125936985796919296" +"apple","neutral","125936323273048065" +"apple","neutral","125935636300570624" +"apple","neutral","125935627056324609" +"apple","neutral","125935503752171520" +"apple","neutral","125935314878476289" +"apple","neutral","125933630613766144" +"apple","neutral","125932876721168384" +"apple","neutral","125930342891790337" +"apple","neutral","125930143066759169" +"apple","neutral","125930002607906816" +"apple","neutral","125927540249473024" +"apple","neutral","125927536847880192" +"apple","neutral","125927533614084097" +"apple","neutral","125927530761953281" +"apple","neutral","125927399010467840" +"apple","neutral","125927174514540544" +"apple","neutral","125927032185044992" +"apple","neutral","125926624930693121" +"apple","neutral","125922989844856833" +"apple","neutral","125922500839342080" +"apple","neutral","125922174648324096" +"apple","neutral","125921393350160384" +"apple","neutral","125920729194704896" +"apple","neutral","125920725595983874" +"apple","neutral","125920721200361472" +"apple","neutral","125920717966544896" +"apple","neutral","125920716297211904" +"apple","neutral","125919221845721090" +"apple","neutral","125918906215968771" +"apple","neutral","125918450920062977" +"apple","neutral","125918447979872258" +"apple","neutral","125918444762828800" +"apple","neutral","125918441013133312" +"apple","neutral","125917264267579393" +"apple","neutral","125917174618525696" +"apple","neutral","125917170571026432" +"apple","neutral","125917164535418880" +"apple","neutral","125917160982855680" +"apple","neutral","125915210337890304" +"apple","neutral","125908946702696448" +"apple","neutral","125907633466130432" +"apple","neutral","125892140940267522" +"apple","neutral","125891898517889024" +"apple","neutral","125887065861787648" +"apple","neutral","125882473312817152" +"apple","neutral","125878880916611072" +"apple","neutral","125866627337162752" +"apple","neutral","125866368758333440" +"apple","neutral","125859792802693120" +"apple","neutral","125859488728236032" +"apple","neutral","125854430171111424" +"apple","neutral","125840474132066304" +"apple","neutral","125830917578162176" +"apple","neutral","125829040740368384" +"apple","neutral","125828984293425152" +"apple","neutral","125826820057731074" +"apple","neutral","125824709421039616" +"apple","neutral","125823389804929024" +"apple","neutral","125821979797364736" +"apple","neutral","125821218258550784" +"apple","neutral","125817967240949760" +"apple","neutral","125815990620659713" +"apple","neutral","125811943054393344" +"apple","neutral","125807897568481280" +"apple","neutral","125804983185719297" +"apple","neutral","125803571601080320" +"apple","neutral","125803457155301376" +"apple","neutral","125801811817922561" +"apple","neutral","125793487479259136" +"apple","neutral","125792596114161665" +"apple","neutral","125792107930714113" +"apple","neutral","125727869363163137" +"apple","neutral","125727349034598401" +"apple","neutral","125727044263874560" +"apple","neutral","125726769297891330" +"apple","neutral","125725274317914112" +"apple","neutral","125725019178409984" +"apple","neutral","125724524732882944" +"apple","neutral","125721197437648896" +"apple","neutral","125714971261812736" +"apple","neutral","125708425752612864" +"apple","neutral","125708240225959936" +"apple","neutral","125706125764083712" +"apple","neutral","125703536632807424" +"apple","neutral","125698733768843264" +"apple","neutral","125695449423286272" +"apple","neutral","125695107734319104" +"apple","neutral","125694587313467393" +"apple","neutral","125692890474233856" +"apple","neutral","125692845054115842" +"apple","neutral","125691975474229248" +"apple","neutral","125690764331196416" +"apple","neutral","125689905954299904" +"apple","neutral","125687710705926144" +"apple","neutral","125686643960193024" +"apple","neutral","125685016389894144" +"apple","neutral","125679996420374530" +"apple","neutral","125677838295764992" +"apple","neutral","125674121722998785" +"apple","neutral","125669834922008576" +"apple","neutral","125667332931596290" +"apple","neutral","125667159547461633" +"apple","neutral","125666909080387584" +"apple","neutral","125665930339565568" +"apple","neutral","125664999036301312" +"apple","neutral","125664891691474944" +"apple","neutral","125663967296229376" +"apple","neutral","125663914552868864" +"apple","neutral","125661036891226113" +"apple","neutral","125660067482697729" +"apple","neutral","125657950185463808" +"apple","neutral","125647236418912256" +"apple","neutral","125645376790331392" +"apple","neutral","125643523792969728" +"apple","neutral","125642256114909184" +"apple","neutral","125642161659199488" +"apple","neutral","125642041140060160" +"apple","neutral","125641351848136704" +"apple","neutral","125640758966484992" +"apple","neutral","125640679325052929" +"apple","neutral","125640515021578240" +"apple","neutral","125638955952640000" +"apple","neutral","125633549847117824" +"apple","neutral","125633468708302848" +"apple","neutral","125633233982459904" +"apple","neutral","125632687879884800" +"apple","neutral","125632582795804672" +"apple","neutral","125631556051140608" +"apple","neutral","125630955154190336" +"apple","neutral","125630836245676033" +"apple","neutral","125630016485732352" +"apple","neutral","125629788563050496" +"apple","neutral","125627732032888833" +"apple","neutral","125626286939979776" +"apple","neutral","125625630254567424" +"apple","neutral","125625566203346944" +"apple","neutral","125620113582993408" +"apple","neutral","125617451705712640" +"apple","neutral","125617133123153921" +"apple","neutral","125616747863736320" +"apple","neutral","125614951787266048" +"apple","neutral","125614910804738049" +"apple","neutral","125607526967410689" +"apple","neutral","125598495137726464" +"apple","neutral","125596991290998784" +"apple","neutral","125595669145722880" +"apple","neutral","125595441562783744" +"apple","neutral","125591434056318977" +"apple","neutral","125589884445536257" +"apple","neutral","125589258898644992" +"apple","neutral","125588749454278656" +"apple","neutral","125588587180863489" +"apple","neutral","125587186723725312" +"apple","neutral","125586682790674434" +"apple","neutral","125585063327956992" +"apple","neutral","125583717354831872" +"apple","neutral","125581507355086848" +"apple","neutral","125581280430669824" +"apple","neutral","125564573167263746" +"apple","neutral","125559846870323200" +"apple","neutral","125559232157327360" +"apple","neutral","125551370676862976" +"apple","neutral","125546017205665792" +"apple","neutral","125545914864640000" +"apple","neutral","125544363945230336" +"apple","neutral","125541112491425792" +"apple","neutral","125538769632886784" +"apple","neutral","125537487455137793" +"apple","neutral","125532364406398977" +"apple","neutral","125532202057482240" +"apple","neutral","125528344480587776" +"apple","neutral","125527718203887616" +"apple","neutral","125512197135806464" +"apple","neutral","125498684401135616" +"apple","neutral","125496516000485376" +"apple","neutral","125493419522002944" +"apple","neutral","125493125098635265" +"apple","neutral","125489264157917184" +"apple","neutral","125486845768368128" +"apple","neutral","125460019859820544" +"apple","neutral","125459423870197760" +"apple","neutral","125459375245635584" +"apple","neutral","125457535951060993" +"apple","neutral","125448837404954624" +"apple","neutral","125433354488254464" +"apple","neutral","125432626482917376" +"apple","neutral","125432518324400128" +"apple","neutral","125425087800291328" +"apple","neutral","125424738662223872" +"apple","neutral","125419216227667968" +"apple","neutral","125417324621737985" +"apple","neutral","125416866243018753" +"apple","neutral","125416811490578435" +"apple","neutral","125410153196560384" +"apple","neutral","125409201907437569" +"apple","neutral","125406528487424003" +"apple","neutral","125406003863883776" +"apple","neutral","125405939015757824" +"apple","neutral","125402652610797569" +"apple","neutral","125398913313284096" +"apple","neutral","125398813543374848" +"apple","neutral","125394805449699329" +"apple","neutral","125379023307153408" +"apple","neutral","125374041556127744" +"apple","neutral","125373658389692416" +"apple","neutral","125373427661029376" +"apple","neutral","125372628746768384" +"apple","neutral","125369026351349760" +"apple","neutral","125368381728763904" +"apple","neutral","125367492976717824" +"apple","neutral","125366519680086016" +"apple","neutral","125365963972542464" +"apple","neutral","125365264656236544" +"apple","neutral","125364154629492737" +"apple","neutral","125364122853453824" +"apple","neutral","125361209137565696" +"apple","neutral","125360952878182400" +"apple","neutral","125360877359742976" +"apple","neutral","125359697770450944" +"apple","neutral","125359622193295360" +"apple","neutral","125357901580746752" +"apple","neutral","125354296752619520" +"apple","neutral","125351067620880385" +"apple","neutral","125350537821569024" +"apple","neutral","125347828754169856" +"apple","neutral","125347618862792705" +"apple","neutral","125345723020607488" +"apple","neutral","125344722196766720" +"apple","neutral","125344351218974720" +"apple","neutral","125338231280644096" +"apple","neutral","125336929825849344" +"apple","neutral","125336798690942977" +"apple","neutral","125335012961828866" +"apple","neutral","125332871174037504" +"apple","neutral","125332609428496384" +"apple","neutral","125327281181835264" +"apple","neutral","125326760769372160" +"apple","neutral","125325397712846848" +"apple","neutral","125324916009615360" +"apple","neutral","125321084525490176" +"apple","neutral","125318029390249984" +"apple","neutral","125317541504626688" +"apple","neutral","125317300860620801" +"apple","neutral","125315460030922752" +"apple","neutral","125315263183851521" +"apple","neutral","125315080081518592" +"apple","neutral","125312789345599489" +"apple","neutral","125312357797863425" +"apple","neutral","125309663913840640" +"apple","neutral","125305567148388352" +"apple","neutral","125300705836793856" +"apple","neutral","125300603059576833" +"apple","neutral","125287442407362560" +"apple","neutral","125283873331494913" +"apple","neutral","125281502866059264" +"apple","neutral","125278676949544960" +"apple","neutral","125276525472911360" +"apple","neutral","125275795252977664" +"apple","neutral","125275280678993920" +"apple","neutral","125273317673414656" +"apple","neutral","125270965268643840" +"apple","neutral","125267017942052866" +"apple","neutral","125266503657472000" +"apple","neutral","125261029834899456" +"apple","neutral","125260105154437121" +"apple","neutral","125257803790159873" +"apple","neutral","125252442836320256" +"apple","neutral","125250721280040961" +"apple","neutral","125250617911418881" +"apple","neutral","125250078108684288" +"apple","neutral","125246898830458880" +"apple","neutral","125245892814045184" +"apple","neutral","125244798671142912" +"apple","neutral","125231250247135233" +"apple","neutral","125227689895407616" +"apple","neutral","125222749034659840" +"apple","neutral","125218106778992640" +"apple","neutral","125211793655218178" +"apple","neutral","125206271560384512" +"apple","neutral","125196751387889665" +"apple","neutral","125193298624258049" +"apple","neutral","125184976579862530" +"apple","neutral","125085987431923713" +"apple","irrelevant","126405660308021248" +"apple","irrelevant","126403953058529280" +"apple","irrelevant","126402391259103232" +"apple","irrelevant","126399172495679488" +"apple","irrelevant","126394266145665025" +"apple","irrelevant","126391727408947200" +"apple","irrelevant","126387209824776192" +"apple","irrelevant","126385587740610563" +"apple","irrelevant","126381904621600768" +"apple","irrelevant","126379095004160001" +"apple","irrelevant","126373281099026432" +"apple","irrelevant","126367728754884609" +"apple","irrelevant","126362562865528832" +"apple","irrelevant","126360606042374144" +"apple","irrelevant","126355839274594304" +"apple","irrelevant","126355573586399232" +"apple","irrelevant","126352049070809089" +"apple","irrelevant","126346705292640257" +"apple","irrelevant","126346563147673600" +"apple","irrelevant","126346004688674816" +"apple","irrelevant","126342441057001472" +"apple","irrelevant","126340074777489408" +"apple","irrelevant","126332817134190592" +"apple","irrelevant","126331879883415552" +"apple","irrelevant","126329876935479296" +"apple","irrelevant","126319126913363968" +"apple","irrelevant","126313259572793345" +"apple","irrelevant","126312509983559681" +"apple","irrelevant","126310736577298432" +"apple","irrelevant","126307114959372289" +"apple","irrelevant","126304243144597505" +"apple","irrelevant","126303928039116800" +"apple","irrelevant","126303310054559744" +"apple","irrelevant","126298834203713536" +"apple","irrelevant","126298134212120577" +"apple","irrelevant","126297754799587328" +"apple","irrelevant","126290039138291712" +"apple","irrelevant","126271901340401665" +"apple","irrelevant","126270990459219968" +"apple","irrelevant","126264647652343808" +"apple","irrelevant","126264035007143936" +"apple","irrelevant","126263600548556800" +"apple","irrelevant","126260304819662849" +"apple","irrelevant","126252530819809280" +"apple","irrelevant","126236984644612096" +"apple","irrelevant","126232767821381632" +"apple","irrelevant","126228762596618240" +"apple","irrelevant","126215978341236736" +"apple","irrelevant","126195701704163328" +"apple","irrelevant","126186608113356800" +"apple","irrelevant","126175729024122880" +"apple","irrelevant","126173465253384193" +"apple","irrelevant","126164921485492224" +"apple","irrelevant","126149195957673984" +"apple","irrelevant","126138637652992001" +"apple","irrelevant","126131070050639874" +"apple","irrelevant","126111632773480448" +"apple","irrelevant","126097345124368385" +"apple","irrelevant","126093841232166912" +"apple","irrelevant","126061579233017856" +"apple","irrelevant","126054622564589569" +"apple","irrelevant","126054568273518592" +"apple","irrelevant","126052637014630400" +"apple","irrelevant","126042611709521921" +"apple","irrelevant","126024290201124864" +"apple","irrelevant","126022958710915072" +"apple","irrelevant","126022708524888064" +"apple","irrelevant","126007147199868928" +"apple","irrelevant","126005063595466753" +"apple","irrelevant","126001383869644800" +"apple","irrelevant","126000885485678592" +"apple","irrelevant","125998496535937024" +"apple","irrelevant","125996653990772737" +"apple","irrelevant","125993886249267200" +"apple","irrelevant","125993438205321218" +"apple","irrelevant","125993305325576193" +"apple","irrelevant","125992545552576512" +"apple","irrelevant","125991634855923712" +"apple","irrelevant","125990804488601600" +"apple","irrelevant","125990754769309696" +"apple","irrelevant","125982640263274496" +"apple","irrelevant","125973789526863872" +"apple","irrelevant","125970459404673026" +"apple","irrelevant","125968277083136000" +"apple","irrelevant","125967413299773440" +"apple","irrelevant","125967315488608257" +"apple","irrelevant","125965369532878849" +"apple","irrelevant","125965364667486209" +"apple","irrelevant","125959699089719297" +"apple","irrelevant","125959482588143616" +"apple","irrelevant","125957826500771840" +"apple","irrelevant","125957742698561537" +"apple","irrelevant","125948329694724097" +"apple","irrelevant","125930962545672192" +"apple","irrelevant","125928640394432513" +"apple","irrelevant","125910538550124545" +"apple","irrelevant","125909170074562561" +"apple","irrelevant","125877369796968448" +"apple","irrelevant","125873952953352192" +"apple","irrelevant","125862601677737985" +"apple","irrelevant","125857117407166464" +"apple","irrelevant","125827656238379008" +"apple","irrelevant","125826633713201152" +"apple","irrelevant","125825293473685505" +"apple","irrelevant","125815316596002816" +"apple","irrelevant","125797001337122817" +"apple","irrelevant","125724424774221826" +"apple","irrelevant","125707107495452673" +"apple","irrelevant","125705666592641024" +"apple","irrelevant","125705646942330880" +"apple","irrelevant","125699684693065728" +"apple","irrelevant","125695680135172096" +"apple","irrelevant","125695094836826112" +"apple","irrelevant","125653144993660928" +"apple","irrelevant","125650076759638016" +"apple","irrelevant","125647972087242754" +"apple","irrelevant","125641150186000384" +"apple","irrelevant","125622089502830592" +"apple","irrelevant","125621628917915648" +"apple","irrelevant","125602732278169601" +"apple","irrelevant","125595437938905088" +"apple","irrelevant","125590191502131200" +"apple","irrelevant","125580342244548608" +"apple","irrelevant","125568051277086721" +"apple","irrelevant","125542968844226560" +"apple","irrelevant","125542941287649280" +"apple","irrelevant","125534067495141376" +"apple","irrelevant","125526544377577472" +"apple","irrelevant","125521498055254016" +"apple","irrelevant","125521344342392832" +"apple","irrelevant","125516954407677952" +"apple","irrelevant","125495075504537600" +"apple","irrelevant","125490577130258432" +"apple","irrelevant","125485687339352064" +"apple","irrelevant","125447473786392576" +"apple","irrelevant","125445752083329025" +"apple","irrelevant","125445407592546304" +"apple","irrelevant","125421514928558080" +"apple","irrelevant","125411589905068033" +"apple","irrelevant","125409044222586880" +"apple","irrelevant","125406418777018368" +"apple","irrelevant","125405429583970305" +"apple","irrelevant","125375333162684416" +"apple","irrelevant","125361267555835905" +"apple","irrelevant","125353260520443904" +"apple","irrelevant","125352405482217473" +"apple","irrelevant","125346522618535937" +"apple","irrelevant","125339193802100736" +"apple","irrelevant","125336882862231552" +"apple","irrelevant","125333140414808065" +"apple","irrelevant","125325092841467904" +"apple","irrelevant","125321769203666944" +"apple","irrelevant","125319263027343360" +"apple","irrelevant","125305753903964161" +"apple","irrelevant","125301831286013952" +"apple","irrelevant","125287180561166336" +"apple","irrelevant","125275735815491584" +"apple","irrelevant","125232405517844481" +"apple","irrelevant","125228207002759168" +"apple","irrelevant","125219664488960000" +"apple","irrelevant","125209676416679936" +"apple","irrelevant","125206785584922624" +"apple","irrelevant","125184213342367744" +"apple","irrelevant","125082707389718529" +"google","positive","126534770095169536" +"google","positive","126534201880219648" +"google","positive","126534020367519744" +"google","positive","126533948925952000" +"google","positive","126533885109600256" +"google","positive","126533562781544448" +"google","positive","126533349727666176" +"google","positive","126533268119109632" +"google","positive","126533166352699392" +"google","positive","126532897715929088" +"google","positive","126531552367751169" +"google","positive","126531180907601920" +"google","positive","126530945976238080" +"google","positive","126530924576907264" +"google","positive","126530807891374082" +"google","positive","126530189579649024" +"google","positive","126530027939569665" +"google","positive","126529770778411008" +"google","positive","126528982807089152" +"google","positive","126528978239496194" +"google","positive","126528804192653312" +"google","positive","126528264117293056" +"google","positive","126526219587039233" +"google","positive","126525469897146368" +"google","positive","126525368860540928" +"google","positive","126525172969766912" +"google","positive","126524301259194368" +"google","positive","126523731710443521" +"google","positive","126523530903953408" +"google","positive","126523525598162944" +"google","positive","126522990585315328" +"google","positive","126522810821644288" +"google","positive","126522714713370624" +"google","positive","126522621251682304" +"google","positive","126522262768726016" +"google","positive","126521613259771904" +"google","positive","126521382220738560" +"google","positive","126521286053724160" +"google","positive","126520518609350656" +"google","positive","126520029410885632" +"google","positive","126519483752914944" +"google","positive","126519329025040384" +"google","positive","126519123772588032" +"google","positive","126519017405030400" +"google","positive","126518882939838464" +"google","positive","126516914678808578" +"google","positive","126516779886456832" +"google","positive","126516304336257025" +"google","positive","126515760855134208" +"google","positive","126514474378203136" +"google","positive","126513620686352384" +"google","positive","126513526385819648" +"google","positive","126513425043030016" +"google","positive","126512728297844736" +"google","positive","126512631937904640" +"google","positive","126512208451600384" +"google","positive","126511837796765696" +"google","positive","126511545160171520" +"google","positive","126511426926944256" +"google","positive","126511000907288576" +"google","positive","126510977335300096" +"google","positive","126510551789604864" +"google","positive","126509929619132417" +"google","positive","126509528287166464" +"google","positive","126508433582211072" +"google","positive","126508393203642368" +"google","positive","126507456019968000" +"google","positive","126507105023819776" +"google","positive","126506850781888512" +"google","positive","126506410195431424" +"google","positive","126506064387637249" +"google","positive","126505384428052481" +"google","positive","126505187752943616" +"google","positive","126505144878772224" +"google","positive","126504782465732608" +"google","positive","126504452680187905" +"google","positive","126504346639802368" +"google","positive","126504216004005888" +"google","positive","126504013939216384" +"google","positive","126503946092158976" +"google","positive","126503805369069568" +"google","positive","126503790412181504" +"google","positive","126503029548654593" +"google","positive","126502415322193920" +"google","positive","126502014560649216" +"google","positive","126501732443361280" +"google","positive","126501468902658048" +"google","positive","126500614552289282" +"google","positive","126499965869625345" +"google","positive","126499581520384001" +"google","positive","126499428965158912" +"google","positive","126499145014980608" +"google","positive","126499143282737152" +"google","positive","126498734409396224" +"google","positive","126498608815149056" +"google","positive","126498587499696128" +"google","positive","126497976314109952" +"google","positive","126497860752646146" +"google","positive","126497655785402368" +"google","positive","126496772586610688" +"google","positive","126496739531304960" +"google","positive","126496342901133313" +"google","positive","126496262668292096" +"google","positive","126496155856142336" +"google","positive","126496005809127424" +"google","positive","126495843116265475" +"google","positive","126495812724338688" +"google","positive","126495283176685569" +"google","positive","126495221373607936" +"google","positive","126494976396898305" +"google","positive","126494883367235585" +"google","positive","126494838689513473" +"google","positive","126494834449063936" +"google","positive","126494645281755136" +"google","positive","126494573966016512" +"google","positive","126494442290020352" +"google","positive","126494344566943744" +"google","positive","126494339248562176" +"google","positive","126494280318582784" +"google","positive","126494221879357440" +"google","positive","126494100252925954" +"google","positive","126494070385283072" +"google","positive","126493889761787904" +"google","positive","126493850914131968" +"google","positive","126493722916560896" +"google","positive","126493683515260928" +"google","positive","126493648757071873" +"google","positive","126493639605092352" +"google","positive","126493624270725120" +"google","positive","126493538945994752" +"google","positive","126493495933403136" +"google","positive","126493472931844096" +"google","positive","126493405760069632" +"google","positive","126493371790397440" +"google","positive","126493365775777792" +"google","positive","126493265661919232" +"google","positive","126493192110612480" +"google","positive","126493141271449600" +"google","positive","126493101807239168" +"google","positive","126493037110099968" +"google","positive","126493034014720000" +"google","positive","126492983968268289" +"google","positive","126492981460082688" +"google","positive","126492970793971712" +"google","positive","126492852615262208" +"google","positive","126492838870515713" +"google","positive","126492656124702721" +"google","positive","126492628152889344" +"google","positive","126492595500236800" +"google","positive","126492495394775043" +"google","positive","126492415749128192" +"google","positive","126492266016673794" +"google","positive","126492259993657345" +"google","positive","126492242373386240" +"google","positive","126492101549621249" +"google","positive","126492040954527744" +"google","positive","126492009748889600" +"google","positive","126491707247300608" +"google","positive","126491689396338688" +"google","positive","126491589852930048" +"google","positive","126491556831182848" +"google","positive","126491420260442112" +"google","positive","126491304036278272" +"google","positive","126491177754165252" +"google","positive","126491109982613505" +"google","positive","126491084854530049" +"google","positive","126491009558392832" +"google","positive","126490970803027969" +"google","positive","126490696214511616" +"google","positive","126490548306579457" +"google","positive","126490282878443520" +"google","positive","126490137944268800" +"google","positive","126489924596793345" +"google","positive","126489848004608000" +"google","positive","126489841096597504" +"google","positive","126489713782685696" +"google","positive","126489671730597888" +"google","positive","126489564415131649" +"google","positive","126489424807735296" +"google","positive","126489397507014656" +"google","positive","126489393526616064" +"google","positive","126489263025033216" +"google","positive","126489200567664640" +"google","positive","126489182263721984" +"google","positive","126489085102661632" +"google","positive","126489066077302784" +"google","positive","126488933772169216" +"google","positive","126488863374983168" +"google","positive","126488589046517760" +"google","positive","126488572772622336" +"google","positive","126488519265894400" +"google","positive","126488454921068544" +"google","positive","126488447098695680" +"google","positive","126488384410619906" +"google","positive","126488304341360640" +"google","positive","126488294325370880" +"google","positive","126488282862338049" +"google","positive","126488249739915264" +"google","positive","126488202935664640" +"google","positive","126488018616987648" +"google","positive","126487901084196864" +"google","positive","126487844830191617" +"google","positive","126487738684948480" +"google","positive","126487523366150144" +"google","positive","126487508589621248" +"google","positive","126487332865056768" +"google","positive","126487323071365120" +"google","positive","126487172487462912" +"google","positive","126487153655029760" +"google","positive","126487090723700737" +"google","positive","126487082905518081" +"google","positive","126486849706401792" +"google","positive","126486332104130561" +"google","positive","126486322675318784" +"google","positive","126485171280166913" +"google","positive","126484977176158208" +"google","positive","126484769285480448" +"google","positive","126484631389356032" +"google","negative","126534476875567104" +"google","negative","126533446293127168" +"google","negative","126532210210783232" +"google","negative","126527166195314688" +"google","negative","126526648928579584" +"google","negative","126524840126582784" +"google","negative","126520550876127232" +"google","negative","126520337289580544" +"google","negative","126518920122335233" +"google","negative","126517970179600385" +"google","negative","126517608697708545" +"google","negative","126516572343910400" +"google","negative","126516408317251585" +"google","negative","126515822750478337" +"google","negative","126515697181409280" +"google","negative","126514511862706176" +"google","negative","126507644742672384" +"google","negative","126506609823334400" +"google","negative","126505594290057216" +"google","negative","126505113362776064" +"google","negative","126503098071007232" +"google","negative","126502732369629184" +"google","negative","126502274204831744" +"google","negative","126501869920075776" +"google","negative","126499555742203904" +"google","negative","126497996350304257" +"google","negative","126497929593761792" +"google","negative","126497514168922112" +"google","negative","126497333209858049" +"google","negative","126496987192373248" +"google","negative","126495219058348032" +"google","negative","126494895501348864" +"google","negative","126494402381225984" +"google","negative","126494319749238784" +"google","negative","126494303173361664" +"google","negative","126494261922381824" +"google","negative","126493910037037058" +"google","negative","126493616519647232" +"google","negative","126493030185316352" +"google","negative","126492741764005890" +"google","negative","126492244151771137" +"google","negative","126491870900666368" +"google","negative","126491825660887040" +"google","negative","126491776482689024" +"google","negative","126491509527805952" +"google","negative","126491502493962240" +"google","negative","126491480087986176" +"google","negative","126490767345725441" +"google","negative","126490750958567424" +"google","negative","126490477288636416" +"google","negative","126489936944832512" +"google","negative","126489813468721152" +"google","negative","126489698314104832" +"google","negative","126488700975726593" +"google","negative","126487669533442048" +"google","negative","126487604035198976" +"google","negative","126487206570373120" +"google","negative","126487152338026496" +"google","negative","126486838549557248" +"google","negative","126486654830645249" +"google","negative","126486634458914816" +"google","neutral","126535080557551616" +"google","neutral","126535043588964352" +"google","neutral","126535016049160192" +"google","neutral","126534917290070016" +"google","neutral","126534362148782080" +"google","neutral","126534144170790912" +"google","neutral","126534083797987328" +"google","neutral","126534080627093504" +"google","neutral","126533944832311298" +"google","neutral","126533740628422656" +"google","neutral","126533730671149056" +"google","neutral","126533567311392769" +"google","neutral","126533547803680770" +"google","neutral","126533491495157760" +"google","neutral","126533487590248448" +"google","neutral","126533485837037568" +"google","neutral","126532820507176960" +"google","neutral","126532798994583552" +"google","neutral","126532569259970560" +"google","neutral","126532361587396610" +"google","neutral","126532316859338752" +"google","neutral","126532178082410496" +"google","neutral","126532141302558720" +"google","neutral","126532121773883392" +"google","neutral","126532064823619584" +"google","neutral","126532054136524800" +"google","neutral","126532019999096832" +"google","neutral","126531893649874945" +"google","neutral","126531814071336960" +"google","neutral","126531693522857984" +"google","neutral","126531628087513088" +"google","neutral","126531561691693056" +"google","neutral","126531560194326530" +"google","neutral","126531556255870976" +"google","neutral","126531542733430784" +"google","neutral","126531524945395713" +"google","neutral","126531522487533568" +"google","neutral","126531297249202176" +"google","neutral","126531285475799040" +"google","neutral","126531278517448705" +"google","neutral","126531161215344640" +"google","neutral","126530795987939328" +"google","neutral","126530766829129728" +"google","neutral","126530553477468160" +"google","neutral","126530551384506368" +"google","neutral","126530550394650625" +"google","neutral","126530532640169986" +"google","neutral","126530487840813056" +"google","neutral","126530423026233346" +"google","neutral","126529904731881472" +"google","neutral","126529833487446016" +"google","neutral","126529492582797313" +"google","neutral","126529490737303552" +"google","neutral","126529470814363648" +"google","neutral","126529468981452800" +"google","neutral","126529403067969537" +"google","neutral","126529287858819072" +"google","neutral","126529224642281472" +"google","neutral","126529168715431936" +"google","neutral","126529111819681792" +"google","neutral","126529099018674176" +"google","neutral","126529019284946944" +"google","neutral","126528767228248064" +"google","neutral","126528551766867968" +"google","neutral","126528448050118656" +"google","neutral","126528342613700608" +"google","neutral","126528318542589952" +"google","neutral","126528316978102272" +"google","neutral","126528307004051458" +"google","neutral","126528290352660480" +"google","neutral","126528288729473024" +"google","neutral","126528287831891968" +"google","neutral","126528286871396352" +"google","neutral","126528248329940992" +"google","neutral","126528157573578754" +"google","neutral","126528078057963520" +"google","neutral","126527897065373696" +"google","neutral","126527760662413312" +"google","neutral","126527757151776768" +"google","neutral","126527756036091904" +"google","neutral","126527743528673280" +"google","neutral","126527523529043968" +"google","neutral","126527262190346240" +"google","neutral","126527122218041345" +"google","neutral","126527077905207296" +"google","neutral","126526945008689152" +"google","neutral","126526850452299776" +"google","neutral","126526838251065344" +"google","neutral","126526793380409344" +"google","neutral","126526782001262592" +"google","neutral","126526594973052928" +"google","neutral","126526457974489088" +"google","neutral","126526371785752576" +"google","neutral","126525872915226624" +"google","neutral","126525730526998528" +"google","neutral","126525539442892800" +"google","neutral","126525506018492416" +"google","neutral","126525491954987008" +"google","neutral","126525475723022336" +"google","neutral","126525465715425282" +"google","neutral","126525210483638272" +"google","neutral","126525169756946432" +"google","neutral","126525031135195136" +"google","neutral","126525007525462016" +"google","neutral","126524889296412672" +"google","neutral","126524843339431936" +"google","neutral","126524841544257536" +"google","neutral","126524784120037376" +"google","neutral","126524764071264257" +"google","neutral","126524648874717184" +"google","neutral","126524290945384449" +"google","neutral","126524242635403264" +"google","neutral","126524229997953024" +"google","neutral","126524074179567617" +"google","neutral","126524050037145601" +"google","neutral","126524003266473984" +"google","neutral","126523791735144448" +"google","neutral","126523776195231744" +"google","neutral","126523549493112832" +"google","neutral","126523519357030400" +"google","neutral","126523435772948480" +"google","neutral","126523421231300608" +"google","neutral","126523245649342464" +"google","neutral","126522997522702336" +"google","neutral","126522979080339456" +"google","neutral","126522978971299840" +"google","neutral","126522853779705856" +"google","neutral","126522802466598912" +"google","neutral","126522667703599104" +"google","neutral","126522664939565057" +"google","neutral","126522423771267072" +"google","neutral","126522225674301440" +"google","neutral","126521973672120320" +"google","neutral","126521718272573440" +"google","neutral","126521582809128961" +"google","neutral","126521573330010112" +"google","neutral","126521564979150848" +"google","neutral","126521489116766208" +"google","neutral","126521384657616896" +"google","neutral","126521228524658688" +"google","neutral","126521130197585920" +"google","neutral","126521102783619072" +"google","neutral","126520984122572801" +"google","neutral","126520920352358401" +"google","neutral","126520914413236224" +"google","neutral","126520707969581056" +"google","neutral","126520695218913282" +"google","neutral","126520611190218752" +"google","neutral","126520531934654465" +"google","neutral","126520511483219968" +"google","neutral","126520508903718912" +"google","neutral","126520504822661120" +"google","neutral","126520501442056193" +"google","neutral","126520500246687745" +"google","neutral","126520116795015169" +"google","neutral","126520107089395712" +"google","neutral","126520074717765632" +"google","neutral","126520020871294977" +"google","neutral","126519912821825538" +"google","neutral","126519469500669952" +"google","neutral","126519331956862976" +"google","neutral","126519306832982016" +"google","neutral","126519290722648064" +"google","neutral","126519251631747072" +"google","neutral","126519229515169792" +"google","neutral","126519227006988288" +"google","neutral","126519072195223552" +"google","neutral","126519044798029824" +"google","neutral","126519034488426496" +"google","neutral","126519019699314688" +"google","neutral","126518955526455296" +"google","neutral","126518773401391104" +"google","neutral","126518709358575616" +"google","neutral","126518669554626560" +"google","neutral","126518471893848064" +"google","neutral","126518469507289090" +"google","neutral","126518466315431936" +"google","neutral","126518461055774721" +"google","neutral","126518313160409088" +"google","neutral","126518280633593856" +"google","neutral","126518252355584001" +"google","neutral","126518222487961600" +"google","neutral","126518218343972864" +"google","neutral","126518146063532032" +"google","neutral","126518121946292224" +"google","neutral","126518025863168000" +"google","neutral","126517966589267968" +"google","neutral","126517947329036288" +"google","neutral","126517930027515904" +"google","neutral","126517793792344064" +"google","neutral","126517583347326976" +"google","neutral","126517558152134656" +"google","neutral","126517526363504641" +"google","neutral","126517510915899392" +"google","neutral","126517487503294464" +"google","neutral","126517179272278016" +"google","neutral","126517076889321472" +"google","neutral","126516982936895488" +"google","neutral","126516936862474241" +"google","neutral","126516898845310977" +"google","neutral","126516776652640256" +"google","neutral","126516749242871809" +"google","neutral","126516744742387712" +"google","neutral","126516481679818752" +"google","neutral","126516342319890432" +"google","neutral","126516212749443072" +"google","neutral","126516054452211712" +"google","neutral","126515992737218560" +"google","neutral","126515637978796032" +"google","neutral","126515624984846336" +"google","neutral","126515576507084800" +"google","neutral","126515415592607744" +"google","neutral","126515341567344641" +"google","neutral","126515215255871488" +"google","neutral","126515104723374080" +"google","neutral","126514999375048705" +"google","neutral","126514945373376512" +"google","neutral","126514719828885504" +"google","neutral","126514719770161152" +"google","neutral","126514718188900352" +"google","neutral","126514697875894272" +"google","neutral","126514441314500608" +"google","neutral","126514438680489985" +"google","neutral","126514435949993984" +"google","neutral","126513983338450944" +"google","neutral","126513869467299840" +"google","neutral","126513499890397185" +"google","neutral","126513430508212224" +"google","neutral","126513429103128577" +"google","neutral","126513426968215552" +"google","neutral","126513317320736768" +"google","neutral","126513312857985024" +"google","neutral","126513307589935104" +"google","neutral","126513117944496128" +"google","neutral","126513115830554625" +"google","neutral","126513110264717312" +"google","neutral","126512968958607360" +"google","neutral","126512932933734402" +"google","neutral","126512842194161664" +"google","neutral","126512471845515264" +"google","neutral","126512460629946370" +"google","neutral","126512277720535040" +"google","neutral","126512269847826432" +"google","neutral","126512265036959744" +"google","neutral","126512261543116800" +"google","neutral","126512199857475585" +"google","neutral","126512130752126976" +"google","neutral","126511907569020928" +"google","neutral","126511799947366401" +"google","neutral","126511624256372736" +"google","neutral","126511578777526273" +"google","neutral","126511507088478208" +"google","neutral","126511425760935936" +"google","neutral","126511400825790464" +"google","neutral","126511380365983744" +"google","neutral","126511358861778944" +"google","neutral","126511243371610113" +"google","neutral","126510976358035456" +"google","neutral","126510915771301888" +"google","neutral","126510815246422016" +"google","neutral","126510801803673600" +"google","neutral","126510781801046016" +"google","neutral","126510731549085697" +"google","neutral","126510402359140352" +"google","neutral","126510393114898432" +"google","neutral","126510377558216704" +"google","neutral","126510284536942592" +"google","neutral","126510143541231616" +"google","neutral","126510049039368192" +"google","neutral","126509936518762496" +"google","neutral","126509922505596928" +"google","neutral","126509783665745920" +"google","neutral","126509619819462657" +"google","neutral","126509495907135489" +"google","neutral","126509474935611392" +"google","neutral","126509381327134720" +"google","neutral","126509283260108800" +"google","neutral","126509226720903168" +"google","neutral","126509135842914304" +"google","neutral","126508885992415232" +"google","neutral","126508842367463424" +"google","neutral","126508789254979584" +"google","neutral","126508753997668352" +"google","neutral","126508642060083200" +"google","neutral","126508495255257088" +"google","neutral","126508398522019840" +"google","neutral","126508044187217920" +"google","neutral","126508037400825857" +"google","neutral","126508035416928256" +"google","neutral","126507982543532034" +"google","neutral","126507925337411584" +"google","neutral","126507911844339712" +"google","neutral","126507657472393216" +"google","neutral","126507418539671552" +"google","neutral","126506926069645312" +"google","neutral","126506902145347584" +"google","neutral","126506895124086784" +"google","neutral","126506846499520512" +"google","neutral","126506795958153216" +"google","neutral","126506701259157504" +"google","neutral","126506651271430144" +"google","neutral","126506310228377600" +"google","neutral","126506306201858049" +"google","neutral","126506273662447616" +"google","neutral","126506272152502272" +"google","neutral","126506232432439296" +"google","neutral","126506073099218945" +"google","neutral","126505781553147904" +"google","neutral","126505768299147264" +"google","neutral","126505670446022656" +"google","neutral","126505612690456576" +"google","neutral","126505607116238848" +"google","neutral","126505469601775616" +"google","neutral","126505424823402496" +"google","neutral","126505412068511745" +"google","neutral","126505392191705088" +"google","neutral","126505364307984384" +"google","neutral","126505359161573377" +"google","neutral","126505133587709953" +"google","neutral","126505125853401088" +"google","neutral","126505094664568833" +"google","neutral","126505086041067520" +"google","neutral","126504998799552513" +"google","neutral","126504923687960576" +"google","neutral","126504910844989440" +"google","neutral","126504893354741760" +"google","neutral","126504842469457922" +"google","neutral","126504774047772672" +"google","neutral","126504709036056576" +"google","neutral","126504479595044864" +"google","neutral","126504427275300864" +"google","neutral","126504377862205441" +"google","neutral","126504319859175424" +"google","neutral","126504126665334784" +"google","neutral","126504115508494337" +"google","neutral","126504105530236928" +"google","neutral","126504077831045122" +"google","neutral","126503993768804352" +"google","neutral","126503947560165376" +"google","neutral","126503755331022849" +"google","neutral","126503706760974337" +"google","neutral","126503703514578944" +"google","neutral","126503693355982849" +"google","neutral","126503688595447810" +"google","neutral","126503683142852608" +"google","neutral","126503627706732544" +"google","neutral","126503364103118848" +"google","neutral","126503349657935872" +"google","neutral","126503348613558272" +"google","neutral","126503090487705601" +"google","neutral","126503088461848576" +"google","neutral","126502770130960384" +"google","neutral","126502761608122368" +"google","neutral","126502730264088576" +"google","neutral","126502630578069504" +"google","neutral","126502626085969920" +"google","neutral","126502616128684032" +"google","neutral","126502614086070273" +"google","neutral","126502326356815872" +"google","neutral","126502296916987904" +"google","neutral","126502291799945217" +"google","neutral","126502250041454593" +"google","neutral","126502128737976321" +"google","neutral","126502101944778752" +"google","neutral","126502100308996097" +"google","neutral","126501988132327425" +"google","neutral","126501888756682752" +"google","neutral","126501882331017216" +"google","neutral","126501647378690048" +"google","neutral","126501582274707457" +"google","neutral","126501535160074240" +"google","neutral","126501463752060928" +"google","neutral","126501463726899200" +"google","neutral","126501463672369152" +"google","neutral","126501463663976448" +"google","neutral","126501463659790337" +"google","neutral","126501463626223616" +"google","neutral","126501463554924544" +"google","neutral","126501463529754624" +"google","neutral","126501463487815680" +"google","neutral","126501428897382400" +"google","neutral","126501392163684353" +"google","neutral","126501360089825280" +"google","neutral","126501209887621121" +"google","neutral","126501176559677441" +"google","neutral","126501155999203328" +"google","neutral","126500867280093184" +"google","neutral","126500856026771456" +"google","neutral","126500659833995265" +"google","neutral","126500532478148610" +"google","neutral","126500276994711553" +"google","neutral","126500259974234112" +"google","neutral","126500105762250752" +"google","neutral","126499963176886272" +"google","neutral","126499722805522433" +"google","neutral","126499712164560896" +"google","neutral","126499577443532801" +"google","neutral","126499521822867458" +"google","neutral","126499521344712704" +"google","neutral","126499346022805504" +"google","neutral","126499314393546752" +"google","neutral","126499307284217856" +"google","neutral","126499293895987201" +"google","neutral","126499217807122433" +"google","neutral","126498840831475712" +"google","neutral","126498825992019969" +"google","neutral","126498759143211008" +"google","neutral","126498660811935744" +"google","neutral","126498593770184704" +"google","neutral","126498481534808064" +"google","neutral","126498259228303360" +"google","neutral","126498218501607426" +"google","neutral","126498101124005889" +"google","neutral","126497969972330497" +"google","neutral","126497822009860096" +"google","neutral","126497642493648897" +"google","neutral","126497423815213056" +"google","neutral","126497346870718464" +"google","neutral","126497339933327360" +"google","neutral","126497339073495040" +"google","neutral","126497335831306240" +"google","neutral","126497288301457408" +"google","neutral","126497256382803968" +"google","neutral","126497160345829376" +"google","neutral","126497100866387969" +"google","neutral","126497008197435392" +"google","neutral","126496951746301952" +"google","neutral","126496930204352512" +"google","neutral","126496891797118976" +"google","neutral","126496853742198784" +"google","neutral","126496835933179904" +"google","neutral","126496835920609280" +"google","neutral","126496802634612736" +"google","neutral","126496726143086592" +"google","neutral","126496688163655680" +"google","neutral","126496634120056832" +"google","neutral","126496571541045248" +"google","neutral","126496553811709952" +"google","neutral","126496553748803586" +"google","neutral","126496493648609280" +"google","neutral","126496395132796929" +"google","neutral","126496303738912769" +"google","neutral","126496230984519680" +"google","neutral","126496228572790784" +"google","neutral","126496131554344960" +"google","neutral","126496131545960449" +"google","neutral","126496131529183232" +"google","neutral","126496131441102848" +"google","neutral","126496131411742721" +"google","neutral","126496131382382592" +"google","neutral","126496131378188289" +"google","neutral","126496131374006272" +"google","neutral","126496131353018368" +"google","neutral","126496068467822593" +"google","neutral","126495741014315008" +"google","neutral","126495706356789248" +"google","neutral","126495690636529664" +"google","neutral","126495653747634176" +"google","neutral","126495340026273792" +"google","neutral","126495319792959488" +"google","neutral","126495268538560512" +"google","neutral","126495169536208896" +"google","neutral","126495125303083009" +"google","neutral","126495072102522880" +"google","neutral","126495001868906496" +"google","neutral","126494909548077056" +"google","neutral","126494729474015232" +"google","neutral","126494715024637952" +"google","neutral","126494709605609472" +"google","neutral","126494691016441857" +"google","neutral","126494645403398145" +"google","neutral","126494569184505856" +"google","neutral","126494532857643008" +"google","neutral","126494358508806144" +"google","neutral","126494314057568256" +"google","neutral","126494270650724353" +"google","neutral","126494239143116801" +"google","neutral","126494171065364480" +"google","neutral","126494094276042752" +"google","neutral","126493962499391488" +"google","neutral","126493954429550593" +"google","neutral","126493895625424896" +"google","neutral","126493811638677504" +"google","neutral","126493785533321216" +"google","neutral","126493625285750784" +"google","neutral","126493567182061569" +"google","neutral","126493558365630465" +"google","neutral","126493553055633408" +"google","neutral","126493550618742784" +"google","neutral","126493370028797953" +"google","neutral","126493352370765824" +"google","neutral","126493335518068736" +"google","neutral","126493327540502528" +"google","neutral","126493322268250114" +"google","neutral","126493312650719232" +"google","neutral","126493280832724992" +"google","neutral","126493225811841024" +"google","neutral","126493189254291457" +"google","neutral","126493174477758464" +"google","neutral","126493155855052800" +"google","neutral","126493015907897344" +"google","neutral","126492985834733568" +"google","neutral","126492945057718272" +"google","neutral","126492886517809152" +"google","neutral","126492864174755841" +"google","neutral","126492839998783488" +"google","neutral","126492795027456000" +"google","neutral","126492775930798080" +"google","neutral","126492775842713600" +"google","neutral","126492759262633984" +"google","neutral","126492737766834177" +"google","neutral","126492735598366720" +"google","neutral","126492726601584640" +"google","neutral","126492719987179520" +"google","neutral","126492692984238080" +"google","neutral","126492604799004672" +"google","neutral","126492471759867904" +"google","neutral","126492440914956290" +"google","neutral","126492364138221569" +"google","neutral","126492325219278848" +"google","neutral","126492310866362368" +"google","neutral","126492274766004226" +"google","neutral","126492105026703360" +"google","neutral","126492088270454785" +"google","neutral","126492059308785664" +"google","neutral","126491876860755969" +"google","neutral","126491752260575232" +"google","neutral","126491726276853760" +"google","neutral","126491658341715969" +"google","neutral","126491610199490560" +"google","neutral","126491517924810752" +"google","neutral","126491513038442496" +"google","neutral","126491459078729728" +"google","neutral","126491267411619840" +"google","neutral","126491255436881920" +"google","neutral","126491202810953728" +"google","neutral","126491025643552768" +"google","neutral","126490806315008000" +"google","neutral","126490801982275584" +"google","neutral","126490797225934850" +"google","neutral","126490763151421440" +"google","neutral","126490760114733056" +"google","neutral","126490712299675649" +"google","neutral","126490709921497088" +"google","neutral","126490662416826369" +"google","neutral","126490566837026817" +"google","neutral","126490278143082496" +"google","neutral","126490265463701504" +"google","neutral","126490235877081088" +"google","neutral","126490166897541120" +"google","neutral","126490158815125504" +"google","neutral","126490134869852161" +"google","neutral","126490118050684928" +"google","neutral","126490034865045504" +"google","neutral","126490011901231104" +"google","neutral","126489950614073345" +"google","neutral","126489908998176769" +"google","neutral","126489751325908992" +"google","neutral","126489719012990976" +"google","neutral","126489665116192768" +"google","neutral","126489609889783808" +"google","neutral","126489523776536576" +"google","neutral","126489506672160768" +"google","neutral","126489489328705536" +"google","neutral","126489300828307456" +"google","neutral","126489263490596864" +"google","neutral","126489146029129729" +"google","neutral","126489064319881216" +"google","neutral","126489048717074432" +"google","neutral","126488983164289026" +"google","neutral","126488935026266112" +"google","neutral","126488912037289984" +"google","neutral","126488727315943425" +"google","neutral","126488649905864705" +"google","neutral","126488582218203136" +"google","neutral","126488561888399360" +"google","neutral","126488352135450625" +"google","neutral","126487924043821057" +"google","neutral","126487912433975297" +"google","neutral","126487846038147073" +"google","neutral","126487807156944899" +"google","neutral","126487788433584129" +"google","neutral","126487744787660800" +"google","neutral","126487541569433600" +"google","neutral","126487465203736576" +"google","neutral","126487372039847937" +"google","neutral","126487167823388673" +"google","neutral","126487043462266880" +"google","neutral","126486964408033280" +"google","neutral","126486941364527104" +"google","neutral","126486926827065344" +"google","neutral","126486798640754688" +"google","neutral","126486616343724032" +"google","neutral","126486384902017024" +"google","neutral","126486348713570304" +"google","neutral","126486111689256960" +"google","neutral","126486051530354689" +"google","neutral","126485712836112384" +"google","neutral","126485684113522689" +"google","neutral","126485085942845440" +"google","neutral","126484200269426688" +"google","neutral","126484162302586880" +"google","neutral","126484021369778177" +"google","neutral","126484018211454976" +"google","irrelevant","126535062148759552" +"google","irrelevant","126534927637417984" +"google","irrelevant","126534908670783489" +"google","irrelevant","126534871299538944" +"google","irrelevant","126534769105305600" +"google","irrelevant","126534678156029953" +"google","irrelevant","126534649995464704" +"google","irrelevant","126534648800096256" +"google","irrelevant","126534648611340288" +"google","irrelevant","126534647264972800" +"google","irrelevant","126534525089091584" +"google","irrelevant","126534223950651392" +"google","irrelevant","126534127435530240" +"google","irrelevant","126534054739836929" +"google","irrelevant","126534037929074688" +"google","irrelevant","126533966156148736" +"google","irrelevant","126533775411781632" +"google","irrelevant","126533688321253376" +"google","irrelevant","126533686282825728" +"google","irrelevant","126533684252774401" +"google","irrelevant","126533682273071105" +"google","irrelevant","126533680301752320" +"google","irrelevant","126533636576129024" +"google","irrelevant","126533618997792768" +"google","irrelevant","126533452618137600" +"google","irrelevant","126533364789424129" +"google","irrelevant","126533325702701056" +"google","irrelevant","126533318928896001" +"google","irrelevant","126533134819921920" +"google","irrelevant","126533119573630976" +"google","irrelevant","126533008760111104" +"google","irrelevant","126532931236794369" +"google","irrelevant","126532704287199232" +"google","irrelevant","126532561315958784" +"google","irrelevant","126532543158820864" +"google","irrelevant","126532472258301952" +"google","irrelevant","126532467954950144" +"google","irrelevant","126532295665532928" +"google","irrelevant","126532294122024960" +"google","irrelevant","126532119278264320" +"google","irrelevant","126532046360289280" +"google","irrelevant","126531989846241280" +"google","irrelevant","126531827073679360" +"google","irrelevant","126531797419950080" +"google","irrelevant","126531615181651968" +"google","irrelevant","126531519824142337" +"google","irrelevant","126531381152059392" +"google","irrelevant","126531281650589696" +"google","irrelevant","126531280253882368" +"google","irrelevant","126531278685216768" +"google","irrelevant","126531098279804928" +"google","irrelevant","126530329820397568" +"google","irrelevant","126530251684720641" +"google","irrelevant","126530242612432898" +"google","irrelevant","126530163029704705" +"google","irrelevant","126530054023946240" +"google","irrelevant","126530000303292416" +"google","irrelevant","126529908850700289" +"google","irrelevant","126529643829399553" +"google","irrelevant","126529610614718464" +"google","irrelevant","126529609771659264" +"google","irrelevant","126529491819433985" +"google","irrelevant","126529490582118400" +"google","irrelevant","126529267503861760" +"google","irrelevant","126529265142472704" +"google","irrelevant","126529171773067265" +"google","irrelevant","126529003879272448" +"google","irrelevant","126529001866002432" +"google","irrelevant","126528999554949120" +"google","irrelevant","126528997763989504" +"google","irrelevant","126528993187999744" +"google","irrelevant","126528938326495232" +"google","irrelevant","126528826279866369" +"google","irrelevant","126528658834853888" +"google","irrelevant","126528444476555264" +"google","irrelevant","126528018469494784" +"google","irrelevant","126527955475251202" +"google","irrelevant","126527760746295296" +"google","irrelevant","126527053133656066" +"google","irrelevant","126527051292356609" +"google","irrelevant","126526946946457601" +"google","irrelevant","126526850477469696" +"google","irrelevant","126526815010426880" +"google","irrelevant","126526781019787264" +"google","irrelevant","126526765995802624" +"google","irrelevant","126526602686369792" +"google","irrelevant","126526465280970752" +"google","irrelevant","126526113131413504" +"google","irrelevant","126526082953379840" +"google","irrelevant","126526068835368961" +"google","irrelevant","126526020923834368" +"google","irrelevant","126526019602628608" +"google","irrelevant","126526019208351744" +"google","irrelevant","126526017660653568" +"google","irrelevant","126525994348711937" +"google","irrelevant","126525853738860544" +"google","irrelevant","126525817713991680" +"google","irrelevant","126525815084158976" +"google","irrelevant","126525130426957824" +"google","irrelevant","126524896665796608" +"google","irrelevant","126524657875697664" +"google","irrelevant","126524361065758720" +"google","irrelevant","126524217889001472" +"google","irrelevant","126524216412618752" +"google","irrelevant","126524213401096192" +"google","irrelevant","126523942314840064" +"google","irrelevant","126523941035577344" +"google","irrelevant","126523916817674240" +"google","irrelevant","126523574470189057" +"google","irrelevant","126523560096313344" +"google","irrelevant","126523556958961664" +"google","irrelevant","126523548914290688" +"google","irrelevant","126523356773232641" +"google","irrelevant","126523270420905984" +"google","irrelevant","126523229400600576" +"google","irrelevant","126523147091578880" +"google","irrelevant","126523008524365825" +"google","irrelevant","126522989633212417" +"google","irrelevant","126522783650955264" +"google","irrelevant","126522732195233792" +"google","irrelevant","126522730001600512" +"google","irrelevant","126522728101584897" +"google","irrelevant","126522726239313920" +"google","irrelevant","126522723441721345" +"google","irrelevant","126522646149087233" +"google","irrelevant","126522540834304000" +"google","irrelevant","126522492700471297" +"google","irrelevant","126522478280450048" +"google","irrelevant","126522273963319298" +"google","irrelevant","126522127775047680" +"google","irrelevant","126521734076698625" +"google","irrelevant","126521694583132160" +"google","irrelevant","126521635464425472" +"google","irrelevant","126521523220652032" +"google","irrelevant","126521505097068544" +"google","irrelevant","126521233603960832" +"google","irrelevant","126520774025678848" +"google","irrelevant","126520080543649792" +"google","irrelevant","126519943234732032" +"google","irrelevant","126519837173358592" +"google","irrelevant","126519715085549568" +"google","irrelevant","126519472172445696" +"google","irrelevant","126519390245109760" +"google","irrelevant","126519359714766848" +"google","irrelevant","126518917635125248" +"google","irrelevant","126518845983830016" +"google","irrelevant","126518328373153792" +"google","irrelevant","126517747894075392" +"google","irrelevant","126517575336214529" +"google","irrelevant","126517570806358018" +"google","irrelevant","126517567694180352" +"google","irrelevant","126517492049915904" +"google","irrelevant","126517413788401664" +"google","irrelevant","126517183139430400" +"google","irrelevant","126516892029558784" +"google","irrelevant","126516806360899584" +"google","irrelevant","126516804108562433" +"google","irrelevant","126516802011402240" +"google","irrelevant","126516602316406784" +"google","irrelevant","126516523408949248" +"google","irrelevant","126516376566366208" +"google","irrelevant","126516048961863681" +"google","irrelevant","126516048626335744" +"google","irrelevant","126515979059609603" +"google","irrelevant","126515953516298241" +"google","irrelevant","126515696497725440" +"google","irrelevant","126515695352688640" +"google","irrelevant","126515693993738240" +"google","irrelevant","126515640889655296" +"google","irrelevant","126515639362920449" +"google","irrelevant","126515633314729984" +"google","irrelevant","126515631980937216" +"google","irrelevant","126515608887115776" +"google","irrelevant","126515498467860480" +"google","irrelevant","126515353110065152" +"google","irrelevant","126515159995912193" +"google","irrelevant","126515137212456960" +"google","irrelevant","126514888951595008" +"google","irrelevant","126514866059100160" +"google","irrelevant","126514704347693056" +"google","irrelevant","126514555743518720" +"google","irrelevant","126514131544178688" +"google","irrelevant","126513979706183682" +"google","irrelevant","126513961553244160" +"google","irrelevant","126513949163257856" +"google","irrelevant","126513706912841729" +"google","irrelevant","126513429409312768" +"google","irrelevant","126513410128089088" +"google","irrelevant","126513333191979008" +"google","irrelevant","126512924385738752" +"google","irrelevant","126512627110252544" +"google","irrelevant","126512625713545216" +"google","irrelevant","126512053660827648" +"google","irrelevant","126511257170886656" +"google","irrelevant","126510558764736513" +"google","irrelevant","126510399884496897" +"google","irrelevant","126509931657564160" +"google","irrelevant","126509472834269184" +"google","irrelevant","126509298279907328" +"google","irrelevant","126509273994891264" +"google","irrelevant","126509203148902400" +"google","irrelevant","126508947980029952" +"google","irrelevant","126508734884220929" +"google","irrelevant","126508667007803392" +"google","irrelevant","126508103368835072" +"google","irrelevant","126507946707398656" +"google","irrelevant","126507876469583872" +"google","irrelevant","126507802251370497" +"google","irrelevant","126507794777128964" +"google","irrelevant","126507720546328576" +"google","irrelevant","126507456649101314" +"google","irrelevant","126507441985826816" +"google","irrelevant","126507086522748929" +"google","irrelevant","126507016368816129" +"google","irrelevant","126506951063511041" +"google","irrelevant","126506683131379713" +"google","irrelevant","126506375886016513" +"google","irrelevant","126506159938088961" +"google","irrelevant","126506135082631168" +"google","irrelevant","126505911584948225" +"google","irrelevant","126505668181098496" +"google","irrelevant","126505497888161792" +"google","irrelevant","126505070098518016" +"google","irrelevant","126504896307539968" +"google","irrelevant","126504863369670657" +"google","irrelevant","126504793039572992" +"google","irrelevant","126504734864576512" +"google","irrelevant","126504701142376448" +"google","irrelevant","126504580145094656" +"google","irrelevant","126504516479754241" +"google","irrelevant","126504512960724992" +"google","irrelevant","126504398925996032" +"google","irrelevant","126504335734607873" +"google","irrelevant","126504297763581952" +"google","irrelevant","126504285436514304" +"google","irrelevant","126504090053259265" +"google","irrelevant","126504049393672192" +"google","irrelevant","126503517149069313" +"google","irrelevant","126503460836343810" +"google","irrelevant","126503277117452288" +"google","irrelevant","126502927337660416" +"google","irrelevant","126502611657564160" +"google","irrelevant","126502546486464512" +"google","irrelevant","126502150472871937" +"google","irrelevant","126502129060950016" +"google","irrelevant","126501952317165568" +"google","irrelevant","126501402397782016" +"google","irrelevant","126501224148242432" +"google","irrelevant","126501185321566208" +"google","irrelevant","126501161502126080" +"google","irrelevant","126501085794930688" +"google","irrelevant","126500586681143296" +"google","irrelevant","126500532671086592" +"google","irrelevant","126500515872915457" +"google","irrelevant","126500195570683905" +"google","irrelevant","126500127354535937" +"google","irrelevant","126500080659341315" +"google","irrelevant","126500021863579648" +"google","irrelevant","126500018000633858" +"google","irrelevant","126499879638925312" +"google","irrelevant","126499873821437952" +"google","irrelevant","126499835481292801" +"google","irrelevant","126499741176561664" +"google","irrelevant","126499498133426176" +"google","irrelevant","126499369905160192" +"google","irrelevant","126499359528468480" +"google","irrelevant","126499266851119104" +"google","irrelevant","126499256503762944" +"google","irrelevant","126499194247725056" +"google","irrelevant","126499065423859712" +"google","irrelevant","126498881142927360" +"google","irrelevant","126498880048214016" +"google","irrelevant","126498875405115392" +"google","irrelevant","126498743993376768" +"google","irrelevant","126498725471334401" +"google","irrelevant","126498584110710784" +"google","irrelevant","126498451289673728" +"google","irrelevant","126498295219621888" +"google","irrelevant","126498266799013888" +"google","irrelevant","126497733262585856" +"google","irrelevant","126497725633150977" +"google","irrelevant","126497618258964480" +"google","irrelevant","126497611296407552" +"google","irrelevant","126497446955188224" +"google","irrelevant","126497409340686336" +"google","irrelevant","126497063742603264" +"google","irrelevant","126497027273129984" +"google","irrelevant","126496716789792768" +"google","irrelevant","126496706257895424" +"google","irrelevant","126496681717014528" +"google","irrelevant","126496237879959553" +"google","irrelevant","126496216052801536" +"google","irrelevant","126496203822211072" +"google","irrelevant","126496202693939200" +"google","irrelevant","126496202509389824" +"google","irrelevant","126496200668098561" +"google","irrelevant","126495762568851456" +"google","irrelevant","126495739663757313" +"google","irrelevant","126495620025417729" +"google","irrelevant","126495448436453377" +"google","irrelevant","126495346569392129" +"google","irrelevant","126495269201264640" +"google","irrelevant","126495253535531008" +"google","irrelevant","126495253321613312" +"google","irrelevant","126495251757150208" +"google","irrelevant","126495232417218560" +"google","irrelevant","126495208505479168" +"google","irrelevant","126495101131309056" +"google","irrelevant","126495097180262400" +"google","irrelevant","126495028548874241" +"google","irrelevant","126495020156063744" +"google","irrelevant","126494978561146881" +"google","irrelevant","126494884726190080" +"google","irrelevant","126494808847040513" +"google","irrelevant","126494774428565504" +"google","irrelevant","126494752517537792" +"google","irrelevant","126494681617010689" +"google","irrelevant","126494434887090177" +"google","irrelevant","126494286316445696" +"google","irrelevant","126494260269821952" +"google","irrelevant","126494247082934272" +"google","irrelevant","126494176551514112" +"google","irrelevant","126494166145437696" +"google","irrelevant","126494156368523267" +"google","irrelevant","126494152375533568" +"google","irrelevant","126494104187183104" +"google","irrelevant","126494033882259458" +"google","irrelevant","126493930794659840" +"google","irrelevant","126493860804308992" +"google","irrelevant","126493833608441856" +"google","irrelevant","126493751215525889" +"google","irrelevant","126493741354713088" +"google","irrelevant","126493715933052928" +"google","irrelevant","126493543249358848" +"google","irrelevant","126493540053303296" +"google","irrelevant","126493537540907008" +"google","irrelevant","126493534336458752" +"google","irrelevant","126493525578747905" +"google","irrelevant","126493517622149121" +"google","irrelevant","126493505047629824" +"google","irrelevant","126493282355261440" +"google","irrelevant","126493154219266048" +"google","irrelevant","126493144824020993" +"google","irrelevant","126493116554424320" +"google","irrelevant","126493078717603840" +"google","irrelevant","126493008244912128" +"google","irrelevant","126492972735922177" +"google","irrelevant","126492948740313088" +"google","irrelevant","126492905476067328" +"google","irrelevant","126492820130373632" +"google","irrelevant","126492770348171264" +"google","irrelevant","126492723673960448" +"google","irrelevant","126492704707321856" +"google","irrelevant","126492543146926080" +"google","irrelevant","126492542610051072" +"google","irrelevant","126492533860728832" +"google","irrelevant","126492487111028736" +"google","irrelevant","126492457276940288" +"google","irrelevant","126492452990369792" +"google","irrelevant","126492366776446976" +"google","irrelevant","126492339559608320" +"google","irrelevant","126492333519802368" +"google","irrelevant","126492258504683520" +"google","irrelevant","126492256868900865" +"google","irrelevant","126492248547405825" +"google","irrelevant","126492186060656640" +"google","irrelevant","126492179525931009" +"google","irrelevant","126492130154774528" +"google","irrelevant","126492053248020481" +"google","irrelevant","126492052371406848" +"google","irrelevant","126492019009916928" +"google","irrelevant","126492011896373249" +"google","irrelevant","126492010864574464" +"google","irrelevant","126491986927685632" +"google","irrelevant","126491961271136256" +"google","irrelevant","126491942077992961" +"google","irrelevant","126491928320688128" +"google","irrelevant","126491895512838144" +"google","irrelevant","126491807205965825" +"google","irrelevant","126491801723994114" +"google","irrelevant","126491743301537792" +"google","irrelevant","126491727673569280" +"google","irrelevant","126491723353427968" +"google","irrelevant","126491662879965184" +"google","irrelevant","126491659008610304" +"google","irrelevant","126491616641953792" +"google","irrelevant","126491544558632960" +"google","irrelevant","126491450035814400" +"google","irrelevant","126491409871155200" +"google","irrelevant","126491356481859585" +"google","irrelevant","126491323774672896" +"google","irrelevant","126491290627080194" +"google","irrelevant","126491272088260609" +"google","irrelevant","126491237720141825" +"google","irrelevant","126491078797950976" +"google","irrelevant","126491075870343168" +"google","irrelevant","126490998816768001" +"google","irrelevant","126490976565985281" +"google","irrelevant","126490918885920768" +"google","irrelevant","126490858735407104" +"google","irrelevant","126490790150144000" +"google","irrelevant","126490759766618112" +"google","irrelevant","126490644616200192" +"google","irrelevant","126490589125550080" +"google","irrelevant","126490558230302721" +"google","irrelevant","126490549367738368" +"google","irrelevant","126490540794576896" +"google","irrelevant","126490516186595328" +"google","irrelevant","126490479859736576" +"google","irrelevant","126490448436015104" +"google","irrelevant","126490435836325888" +"google","irrelevant","126490420808126464" +"google","irrelevant","126490021493616640" +"google","irrelevant","126489969677176832" +"google","irrelevant","126489957538873344" +"google","irrelevant","126489935313256449" +"google","irrelevant","126489934088511490" +"google","irrelevant","126489915042168833" +"google","irrelevant","126489907349831680" +"google","irrelevant","126489892678144000" +"google","irrelevant","126489830677942272" +"google","irrelevant","126489823103041537" +"google","irrelevant","126489808913694720" +"google","irrelevant","126489703418568705" +"google","irrelevant","126489580835848193" +"google","irrelevant","126489542436995072" +"google","irrelevant","126489540906070018" +"google","irrelevant","126489462648745984" +"google","irrelevant","126489460551585792" +"google","irrelevant","126489430830743552" +"google","irrelevant","126489415248920576" +"google","irrelevant","126489326786850816" +"google","irrelevant","126489254535774208" +"google","irrelevant","126489247581609986" +"google","irrelevant","126489192866910208" +"google","irrelevant","126489107143737344" +"google","irrelevant","126489099858214914" +"google","irrelevant","126489084003762176" +"google","irrelevant","126489023928741890" +"google","irrelevant","126489012423770113" +"google","irrelevant","126488974595334144" +"google","irrelevant","126488887026655232" +"google","irrelevant","126488858815774721" +"google","irrelevant","126488807922085889" +"google","irrelevant","126488559795453954" +"google","irrelevant","126488553352994817" +"google","irrelevant","126488503428190208" +"google","irrelevant","126488427779727360" +"google","irrelevant","126488387531194369" +"google","irrelevant","126488315041030144" +"google","irrelevant","126488313187143681" +"google","irrelevant","126488305859698688" +"google","irrelevant","126488293717192704" +"google","irrelevant","126488289988452352" +"google","irrelevant","126488289157976064" +"google","irrelevant","126488234359406593" +"google","irrelevant","126488066977308673" +"google","irrelevant","126488061369532417" +"google","irrelevant","126488048962772992" +"google","irrelevant","126487986425696259" +"google","irrelevant","126487974975242240" +"google","irrelevant","126487943509581824" +"google","irrelevant","126487924698128384" +"google","irrelevant","126487924249329664" +"google","irrelevant","126487911465095169" +"google","irrelevant","126487879139590144" +"google","irrelevant","126487855362088961" +"google","irrelevant","126487852824526848" +"google","irrelevant","126487831572004864" +"google","irrelevant","126487783077449728" +"google","irrelevant","126487738294861824" +"google","irrelevant","126487624444674048" +"google","irrelevant","126487457381359617" +"google","irrelevant","126487454797668353" +"google","irrelevant","126487422249861120" +"google","irrelevant","126487408500940800" +"google","irrelevant","126487406131150848" +"google","irrelevant","126487403220320258" +"google","irrelevant","126487385461633024" +"google","irrelevant","126487318650568704" +"google","irrelevant","126487165969510400" +"google","irrelevant","126487155886407680" +"google","irrelevant","126487143555153920" +"google","irrelevant","126487140317147136" +"google","irrelevant","126486968682037248" +"google","irrelevant","126486956149448704" +"google","irrelevant","126486814520381440" +"google","irrelevant","126486790818373632" +"google","irrelevant","126486753598119936" +"google","irrelevant","126486526631743488" +"google","irrelevant","126486211824058368" +"google","irrelevant","126486125874384896" +"google","irrelevant","126485882265018368" +"google","irrelevant","126485702056751105" +"google","irrelevant","126485474016628736" +"google","irrelevant","126484568239906817" +"google","irrelevant","126484213737340928" +"google","irrelevant","126484000075292672" +"microsoft","positive","126803641486163969" +"microsoft","positive","126792129832951808" +"microsoft","positive","126788430679113728" +"microsoft","positive","126780006964805632" +"microsoft","positive","126779403605770241" +"microsoft","positive","126779217911349248" +"microsoft","positive","126774092274741248" +"microsoft","positive","126768366345138176" +"microsoft","positive","126767508253454336" +"microsoft","positive","126764085609111552" +"microsoft","positive","126762337087655936" +"microsoft","positive","126755785391869954" +"microsoft","positive","126755232393867264" +"microsoft","positive","126754593714606085" +"microsoft","positive","126754500278104064" +"microsoft","positive","126750973329817601" +"microsoft","positive","126749591956762624" +"microsoft","positive","126748497096622080" +"microsoft","positive","126748156003221504" +"microsoft","positive","126748155021762561" +"microsoft","positive","126748153952206849" +"microsoft","positive","126747965393084416" +"microsoft","positive","126747962817781760" +"microsoft","positive","126747960900984832" +"microsoft","positive","126744770713362432" +"microsoft","positive","126743288320491521" +"microsoft","positive","126742063961214976" +"microsoft","positive","126741671965769728" +"microsoft","positive","126738939594813440" +"microsoft","positive","126736431929507840" +"microsoft","positive","126735438948995072" +"microsoft","positive","126734527551913984" +"microsoft","positive","126732577301217280" +"microsoft","positive","126732240368570369" +"microsoft","positive","126732148144209920" +"microsoft","positive","126731873517965313" +"microsoft","positive","126730153454870529" +"microsoft","positive","126729713568849920" +"microsoft","positive","126725535677157376" +"microsoft","positive","126725332031127552" +"microsoft","positive","126721324042305536" +"microsoft","positive","126716103123673088" +"microsoft","positive","126715314007314434" +"microsoft","positive","126714241427312641" +"microsoft","positive","126709513947594753" +"microsoft","positive","126705831126384640" +"microsoft","positive","126695555685560320" +"microsoft","positive","126693834846515200" +"microsoft","positive","126692533869871106" +"microsoft","positive","126691809417113600" +"microsoft","positive","126689578886246400" +"microsoft","positive","126689007512993792" +"microsoft","positive","126688284343672832" +"microsoft","positive","126688181688078338" +"microsoft","positive","126685198531297281" +"microsoft","positive","126677986522054657" +"microsoft","positive","126674853230149632" +"microsoft","positive","126670032951443456" +"microsoft","positive","126669652469350401" +"microsoft","positive","126668529046007808" +"microsoft","positive","126665091381854208" +"microsoft","positive","126663720075141121" +"microsoft","positive","126659125751971840" +"microsoft","positive","126658961263951873" +"microsoft","positive","126658937155108866" +"microsoft","positive","126658528965439488" +"microsoft","positive","126650473322262529" +"microsoft","positive","126638821948403712" +"microsoft","positive","126637126010929152" +"microsoft","positive","126633708315873280" +"microsoft","positive","126629320948060161" +"microsoft","positive","126611718376919041" +"microsoft","positive","126610651916410881" +"microsoft","positive","126608712407322624" +"microsoft","positive","126605340270788608" +"microsoft","positive","126603756971360256" +"microsoft","positive","126599751402668032" +"microsoft","positive","126596256138137600" +"microsoft","positive","126583935139454976" +"microsoft","positive","126581165636333568" +"microsoft","positive","126579574921371648" +"microsoft","positive","126578340902617088" +"microsoft","positive","126570919513686018" +"microsoft","positive","126559269603647488" +"microsoft","positive","126555956975910912" +"microsoft","positive","126553559847288832" +"microsoft","positive","126552645497405440" +"microsoft","positive","126532025552347136" +"microsoft","positive","126529895923843072" +"microsoft","positive","126499160995282944" +"microsoft","positive","126494986983325696" +"microsoft","positive","126492789939765248" +"microsoft","positive","126479912407273472" +"microsoft","negative","126802977813037057" +"microsoft","negative","126795554079510528" +"microsoft","negative","126793207613894659" +"microsoft","negative","126790861831938048" +"microsoft","negative","126789739180326913" +"microsoft","negative","126788223409197058" +"microsoft","negative","126786271921184768" +"microsoft","negative","126784810755690496" +"microsoft","negative","126782820709441536" +"microsoft","negative","126781543879421952" +"microsoft","negative","126778114385772548" +"microsoft","negative","126771575772020736" +"microsoft","negative","126771575654596608" +"microsoft","negative","126771575591665665" +"microsoft","negative","126766935634485249" +"microsoft","negative","126763744557662209" +"microsoft","negative","126763108080427009" +"microsoft","negative","126761988608098304" +"microsoft","negative","126759080860725248" +"microsoft","negative","126758070176059392" +"microsoft","negative","126754989409452032" +"microsoft","negative","126754753697943552" +"microsoft","negative","126753683630333952" +"microsoft","negative","126750481069510656" +"microsoft","negative","126748219261726721" +"microsoft","negative","126748091083788288" +"microsoft","negative","126747859960856576" +"microsoft","negative","126747247563112448" +"microsoft","negative","126746946093330434" +"microsoft","negative","126744670465306624" +"microsoft","negative","126744386137624576" +"microsoft","negative","126744132763910144" +"microsoft","negative","126744130784198656" +"microsoft","negative","126744129135845377" +"microsoft","negative","126743725677346816" +"microsoft","negative","126740373883191296" +"microsoft","negative","126738867087884288" +"microsoft","negative","126730748882460672" +"microsoft","negative","126730580934135809" +"microsoft","negative","126730265551843329" +"microsoft","negative","126727758108823553" +"microsoft","negative","126726935572262912" +"microsoft","negative","126726458776358913" +"microsoft","negative","126724991495569408" +"microsoft","negative","126722961381134336" +"microsoft","negative","126722095743893504" +"microsoft","negative","126720736848117760" +"microsoft","negative","126719328035942400" +"microsoft","negative","126718833493938176" +"microsoft","negative","126716806688804865" +"microsoft","negative","126714471543619584" +"microsoft","negative","126712110095925250" +"microsoft","negative","126705293055889408" +"microsoft","negative","126704033779023872" +"microsoft","negative","126700315507572736" +"microsoft","negative","126700014385897472" +"microsoft","negative","126692062757269505" +"microsoft","negative","126691815394000896" +"microsoft","negative","126690175656001538" +"microsoft","negative","126689051960033280" +"microsoft","negative","126686283769712640" +"microsoft","negative","126686075287642112" +"microsoft","negative","126682137972834305" +"microsoft","negative","126681641388216320" +"microsoft","negative","126681389570596865" +"microsoft","negative","126680630087008257" +"microsoft","negative","126679134817624066" +"microsoft","negative","126677032837971968" +"microsoft","negative","126675041353076738" +"microsoft","negative","126672651459633152" +"microsoft","negative","126671856404144128" +"microsoft","negative","126668133405696000" +"microsoft","negative","126666768541421571" +"microsoft","negative","126666110237032448" +"microsoft","negative","126662601139695616" +"microsoft","negative","126647829656641536" +"microsoft","negative","126647697800310784" +"microsoft","negative","126647264943943682" +"microsoft","negative","126646630702260224" +"microsoft","negative","126645939208327168" +"microsoft","negative","126644987474608128" +"microsoft","negative","126644615280463872" +"microsoft","negative","126643061567668224" +"microsoft","negative","126642517310570497" +"microsoft","negative","126642328466243584" +"microsoft","negative","126642036572041216" +"microsoft","negative","126640632478445568" +"microsoft","negative","126637450008346624" +"microsoft","negative","126636886977556480" +"microsoft","negative","126636535813636096" +"microsoft","negative","126635317108289536" +"microsoft","negative","126628406258450432" +"microsoft","negative","126622538557177856" +"microsoft","negative","126618374972248064" +"microsoft","negative","126615502259879938" +"microsoft","negative","126614764339212288" +"microsoft","negative","126606967736238080" +"microsoft","negative","126606323625361408" +"microsoft","negative","126603861933817856" +"microsoft","negative","126602276872794112" +"microsoft","negative","126602276805685248" +"microsoft","negative","126599337361932288" +"microsoft","negative","126595347198902272" +"microsoft","negative","126594799062102016" +"microsoft","negative","126586083470360576" +"microsoft","negative","126583473929588736" +"microsoft","negative","126579470424473600" +"microsoft","negative","126579121303207936" +"microsoft","negative","126577595553824770" +"microsoft","negative","126574432159408129" +"microsoft","negative","126573688941318144" +"microsoft","negative","126573680665964544" +"microsoft","negative","126572762411171840" +"microsoft","negative","126572362316513280" +"microsoft","negative","126570851389800448" +"microsoft","negative","126570732430966785" +"microsoft","negative","126570180070481920" +"microsoft","negative","126567507350913024" +"microsoft","negative","126566106252062720" +"microsoft","negative","126565636703924225" +"microsoft","negative","126564844211154944" +"microsoft","negative","126560723794010112" +"microsoft","negative","126558437017530368" +"microsoft","negative","126546665342640128" +"microsoft","negative","126546043193147394" +"microsoft","negative","126543225501650944" +"microsoft","negative","126525932625334272" +"microsoft","negative","126519630868119552" +"microsoft","negative","126518620095393792" +"microsoft","negative","126517164416049152" +"microsoft","negative","126515911321591808" +"microsoft","negative","126513363529383937" +"microsoft","negative","126510880761458688" +"microsoft","negative","126508179281547264" +"microsoft","negative","126504435227701248" +"microsoft","negative","126488915996721153" +"microsoft","negative","126485347935862784" +"microsoft","negative","126484955047006209" +"microsoft","neutral","126809228194217984" +"microsoft","neutral","126808500356644864" +"microsoft","neutral","126808393892634625" +"microsoft","neutral","126805369619087360" +"microsoft","neutral","126805239209803777" +"microsoft","neutral","126804966491955201" +"microsoft","neutral","126804937438003200" +"microsoft","neutral","126804497493266433" +"microsoft","neutral","126803763603312640" +"microsoft","neutral","126803594216341506" +"microsoft","neutral","126803513190789121" +"microsoft","neutral","126803044426985472" +"microsoft","neutral","126802586178293760" +"microsoft","neutral","126802366996561920" +"microsoft","neutral","126800882477174784" +"microsoft","neutral","126800407585501184" +"microsoft","neutral","126800313029099520" +"microsoft","neutral","126799514270040064" +"microsoft","neutral","126798908335734785" +"microsoft","neutral","126798906129526784" +"microsoft","neutral","126797598425223169" +"microsoft","neutral","126797364823457792" +"microsoft","neutral","126797364433387520" +"microsoft","neutral","126797363418370048" +"microsoft","neutral","126797339758305280" +"microsoft","neutral","126797209244151808" +"microsoft","neutral","126797207159582720" +"microsoft","neutral","126797206232645635" +"microsoft","neutral","126796929366638592" +"microsoft","neutral","126795614649466880" +"microsoft","neutral","126795256225210368" +"microsoft","neutral","126795026771607553" +"microsoft","neutral","126795013752504320" +"microsoft","neutral","126795009986002946" +"microsoft","neutral","126794825998663680" +"microsoft","neutral","126793516398546945" +"microsoft","neutral","126792687138508801" +"microsoft","neutral","126792112313352192" +"microsoft","neutral","126791726559010816" +"microsoft","neutral","126791479925555200" +"microsoft","neutral","126790637243740160" +"microsoft","neutral","126789918826565632" +"microsoft","neutral","126789806905769984" +"microsoft","neutral","126789806356303872" +"microsoft","neutral","126789710705213440" +"microsoft","neutral","126789040015020033" +"microsoft","neutral","126788856476471296" +"microsoft","neutral","126788488183037953" +"microsoft","neutral","126786825913245698" +"microsoft","neutral","126786517531242496" +"microsoft","neutral","126786021340884992" +"microsoft","neutral","126785035599749121" +"microsoft","neutral","126784430185521154" +"microsoft","neutral","126782184748097536" +"microsoft","neutral","126781699811061760" +"microsoft","neutral","126781384600727552" +"microsoft","neutral","126781241423962112" +"microsoft","neutral","126780592036646913" +"microsoft","neutral","126779798986047488" +"microsoft","neutral","126779783760723968" +"microsoft","neutral","126779778090008576" +"microsoft","neutral","126779040282587138" +"microsoft","neutral","126778210783473665" +"microsoft","neutral","126776771159916546" +"microsoft","neutral","126775752610942976" +"microsoft","neutral","126774185577021445" +"microsoft","neutral","126773807875751936" +"microsoft","neutral","126773769262997504" +"microsoft","neutral","126773680196947968" +"microsoft","neutral","126773504644349954" +"microsoft","neutral","126773458519605248" +"microsoft","neutral","126772660024774656" +"microsoft","neutral","126772118879879169" +"microsoft","neutral","126771594470232064" +"microsoft","neutral","126769762452770819" +"microsoft","neutral","126768953027608576" +"microsoft","neutral","126768749243146240" +"microsoft","neutral","126768739692720128" +"microsoft","neutral","126768259734315008" +"microsoft","neutral","126768125386571776" +"microsoft","neutral","126768055769513984" +"microsoft","neutral","126768000949948416" +"microsoft","neutral","126767824386531329" +"microsoft","neutral","126767467077976065" +"microsoft","neutral","126767049312714752" +"microsoft","neutral","126766670109872128" +"microsoft","neutral","126766092629712896" +"microsoft","neutral","126765064299614209" +"microsoft","neutral","126764662510452736" +"microsoft","neutral","126764605623111680" +"microsoft","neutral","126764604218028033" +"microsoft","neutral","126762652402860033" +"microsoft","neutral","126762576557248512" +"microsoft","neutral","126761994098442241" +"microsoft","neutral","126761880801910784" +"microsoft","neutral","126761759041265664" +"microsoft","neutral","126761312016547840" +"microsoft","neutral","126761103677071362" +"microsoft","neutral","126761034341040128" +"microsoft","neutral","126758826337771520" +"microsoft","neutral","126758715981434881" +"microsoft","neutral","126756834257285120" +"microsoft","neutral","126755580604981249" +"microsoft","neutral","126755433108082688" +"microsoft","neutral","126755073664618496" +"microsoft","neutral","126754216160145409" +"microsoft","neutral","126753468160540672" +"microsoft","neutral","126752950654746624" +"microsoft","neutral","126752714385391616" +"microsoft","neutral","126752712300826624" +"microsoft","neutral","126752262281371649" +"microsoft","neutral","126751213692792832" +"microsoft","neutral","126751180557778945" +"microsoft","neutral","126750292212584449" +"microsoft","neutral","126749618229878784" +"microsoft","neutral","126749567428468736" +"microsoft","neutral","126749486545506304" +"microsoft","neutral","126749360745758722" +"microsoft","neutral","126749318198730752" +"microsoft","neutral","126748764869378048" +"microsoft","neutral","126748146582818816" +"microsoft","neutral","126747712082292736" +"microsoft","neutral","126747117120274432" +"microsoft","neutral","126746786806247424" +"microsoft","neutral","126746562062848001" +"microsoft","neutral","126746140791144448" +"microsoft","neutral","126745920967680000" +"microsoft","neutral","126745707414691840" +"microsoft","neutral","126745457232846849" +"microsoft","neutral","126745293378166784" +"microsoft","neutral","126745275686600705" +"microsoft","neutral","126745248121618434" +"microsoft","neutral","126744229912379392" +"microsoft","neutral","126743926903283713" +"microsoft","neutral","126743901968150528" +"microsoft","neutral","126743843348561920" +"microsoft","neutral","126743100809945090" +"microsoft","neutral","126743096896655362" +"microsoft","neutral","126742966890004481" +"microsoft","neutral","126742062635810817" +"microsoft","neutral","126741965915168771" +"microsoft","neutral","126741817365504000" +"microsoft","neutral","126741679024783360" +"microsoft","neutral","126741473726177280" +"microsoft","neutral","126740991087611904" +"microsoft","neutral","126740408926609409" +"microsoft","neutral","126740066138722305" +"microsoft","neutral","126739377232687105" +"microsoft","neutral","126737802657402880" +"microsoft","neutral","126737400658538496" +"microsoft","neutral","126737368001683457" +"microsoft","neutral","126737116049833984" +"microsoft","neutral","126736393987817472" +"microsoft","neutral","126735639508037632" +"microsoft","neutral","126735624299487232" +"microsoft","neutral","126735411811848192" +"microsoft","neutral","126735013667545088" +"microsoft","neutral","126734902514286593" +"microsoft","neutral","126734632120102912" +"microsoft","neutral","126734120058490881" +"microsoft","neutral","126733687944515584" +"microsoft","neutral","126733589328048128" +"microsoft","neutral","126733404870942720" +"microsoft","neutral","126732739083898881" +"microsoft","neutral","126732384602296320" +"microsoft","neutral","126732343254851584" +"microsoft","neutral","126732316000264192" +"microsoft","neutral","126731883659804672" +"microsoft","neutral","126731759646810113" +"microsoft","neutral","126729624217600001" +"microsoft","neutral","126729179994656769" +"microsoft","neutral","126728328358014980" +"microsoft","neutral","126728277896347649" +"microsoft","neutral","126727087770963968" +"microsoft","neutral","126726863811903488" +"microsoft","neutral","126726826923012096" +"microsoft","neutral","126726199123771392" +"microsoft","neutral","126725918629695489" +"microsoft","neutral","126725691059351552" +"microsoft","neutral","126725006729281536" +"microsoft","neutral","126724433791549440" +"microsoft","neutral","126724174378045440" +"microsoft","neutral","126723741651714048" +"microsoft","neutral","126723269326938112" +"microsoft","neutral","126723018989907968" +"microsoft","neutral","126722729444515840" +"microsoft","neutral","126722549584379905" +"microsoft","neutral","126722463265599488" +"microsoft","neutral","126722284776980480" +"microsoft","neutral","126722253579763712" +"microsoft","neutral","126722158394228736" +"microsoft","neutral","126722000549974016" +"microsoft","neutral","126721061797629953" +"microsoft","neutral","126720971515248641" +"microsoft","neutral","126719738293391360" +"microsoft","neutral","126719724376702976" +"microsoft","neutral","126718237252648960" +"microsoft","neutral","126717789829472256" +"microsoft","neutral","126717208557649920" +"microsoft","neutral","126716851706277889" +"microsoft","neutral","126716781510410240" +"microsoft","neutral","126716221575004160" +"microsoft","neutral","126715906096238592" +"microsoft","neutral","126715414716747777" +"microsoft","neutral","126714453105446912" +"microsoft","neutral","126713704099221504" +"microsoft","neutral","126713393599086592" +"microsoft","neutral","126713363689517056" +"microsoft","neutral","126713356739547136" +"microsoft","neutral","126713203240607744" +"microsoft","neutral","126712975309537280" +"microsoft","neutral","126711721376235520" +"microsoft","neutral","126710782615494657" +"microsoft","neutral","126710485604237313" +"microsoft","neutral","126709729924874241" +"microsoft","neutral","126709714565349376" +"microsoft","neutral","126708211108683776" +"microsoft","neutral","126708014081257472" +"microsoft","neutral","126707947744145409" +"microsoft","neutral","126707946955616256" +"microsoft","neutral","126707601504346112" +"microsoft","neutral","126707418536218624" +"microsoft","neutral","126707389142544384" +"microsoft","neutral","126706701968736256" +"microsoft","neutral","126706701306052609" +"microsoft","neutral","126706255413772288" +"microsoft","neutral","126705857206562816" +"microsoft","neutral","126705491685556225" +"microsoft","neutral","126705440640860161" +"microsoft","neutral","126705187942449152" +"microsoft","neutral","126705130681794560" +"microsoft","neutral","126704776506380288" +"microsoft","neutral","126704323542523904" +"microsoft","neutral","126704037738459136" +"microsoft","neutral","126703842732683265" +"microsoft","neutral","126703823837335552" +"microsoft","neutral","126703372207271936" +"microsoft","neutral","126702051001516032" +"microsoft","neutral","126701413907701760" +"microsoft","neutral","126701318978019328" +"microsoft","neutral","126701312447496192" +"microsoft","neutral","126701230331412480" +"microsoft","neutral","126700724389281792" +"microsoft","neutral","126700451163942912" +"microsoft","neutral","126699934694129664" +"microsoft","neutral","126699730028855296" +"microsoft","neutral","126699437711040512" +"microsoft","neutral","126699406765465601" +"microsoft","neutral","126699405356171264" +"microsoft","neutral","126699162879275008" +"microsoft","neutral","126698863447916544" +"microsoft","neutral","126698757503991808" +"microsoft","neutral","126698756845481984" +"microsoft","neutral","126698755884978177" +"microsoft","neutral","126698717930717184" +"microsoft","neutral","126698707252027392" +"microsoft","neutral","126698704290852865" +"microsoft","neutral","126698703410040834" +"microsoft","neutral","126698504910413827" +"microsoft","neutral","126698182141947904" +"microsoft","neutral","126697870689710080" +"microsoft","neutral","126697419953012737" +"microsoft","neutral","126697038187474944" +"microsoft","neutral","126696856959983616" +"microsoft","neutral","126696843299135488" +"microsoft","neutral","126696759077511168" +"microsoft","neutral","126696757294927874" +"microsoft","neutral","126696688046968832" +"microsoft","neutral","126696671894704129" +"microsoft","neutral","126696627955171328" +"microsoft","neutral","126696501849227264" +"microsoft","neutral","126696344881594368" +"microsoft","neutral","126695550975356928" +"microsoft","neutral","126695264487604224" +"microsoft","neutral","126694893841158145" +"microsoft","neutral","126694267560271872" +"microsoft","neutral","126693945123147776" +"microsoft","neutral","126693187346632704" +"microsoft","neutral","126693021365452800" +"microsoft","neutral","126692888741548032" +"microsoft","neutral","126692854981595136" +"microsoft","neutral","126692811859963904" +"microsoft","neutral","126692357277102080" +"microsoft","neutral","126691842736668673" +"microsoft","neutral","126691830774505472" +"microsoft","neutral","126691604839927808" +"microsoft","neutral","126691578361298945" +"microsoft","neutral","126691206976643072" +"microsoft","neutral","126691146209558528" +"microsoft","neutral","126691006207893505" +"microsoft","neutral","126690823306870784" +"microsoft","neutral","126690446872297473" +"microsoft","neutral","126690445119070210" +"microsoft","neutral","126690443588153345" +"microsoft","neutral","126690396720988160" +"microsoft","neutral","126690392568639488" +"microsoft","neutral","126690357042884608" +"microsoft","neutral","126690074946568192" +"microsoft","neutral","126689592832294913" +"microsoft","neutral","126689002114924544" +"microsoft","neutral","126688185228079104" +"microsoft","neutral","126688055691190274" +"microsoft","neutral","126687847762771970" +"microsoft","neutral","126686806585520128" +"microsoft","neutral","126686450019344384" +"microsoft","neutral","126686367479631872" +"microsoft","neutral","126686111656448000" +"microsoft","neutral","126686012884787200" +"microsoft","neutral","126685318899449856" +"microsoft","neutral","126685194278289408" +"microsoft","neutral","126685077823422466" +"microsoft","neutral","126684141306650625" +"microsoft","neutral","126683940764401665" +"microsoft","neutral","126683909722357760" +"microsoft","neutral","126683288239751169" +"microsoft","neutral","126683225123852288" +"microsoft","neutral","126681783222808578" +"microsoft","neutral","126680710403719168" +"microsoft","neutral","126680178196877312" +"microsoft","neutral","126680085959942144" +"microsoft","neutral","126679050742800385" +"microsoft","neutral","126678934237626368" +"microsoft","neutral","126678725491294209" +"microsoft","neutral","126678690334638080" +"microsoft","neutral","126678683393081344" +"microsoft","neutral","126678622449831937" +"microsoft","neutral","126678492464152576" +"microsoft","neutral","126678454639927297" +"microsoft","neutral","126677952644648960" +"microsoft","neutral","126677913058820097" +"microsoft","neutral","126677721647554561" +"microsoft","neutral","126677403480231937" +"microsoft","neutral","126677050265305088" +"microsoft","neutral","126676455936622593" +"microsoft","neutral","126676302873899008" +"microsoft","neutral","126675994378637312" +"microsoft","neutral","126675231896117248" +"microsoft","neutral","126674948671545344" +"microsoft","neutral","126674919969923072" +"microsoft","neutral","126674748674547712" +"microsoft","neutral","126674460131606529" +"microsoft","neutral","126673949907107840" +"microsoft","neutral","126673753680773122" +"microsoft","neutral","126673364231270400" +"microsoft","neutral","126672797731790848" +"microsoft","neutral","126671949320556544" +"microsoft","neutral","126671882211692545" +"microsoft","neutral","126671664011419648" +"microsoft","neutral","126671654540677122" +"microsoft","neutral","126671006302617600" +"microsoft","neutral","126669870090829824" +"microsoft","neutral","126669451369263106" +"microsoft","neutral","126668947503325184" +"microsoft","neutral","126668764199665664" +"microsoft","neutral","126668119354781696" +"microsoft","neutral","126667815116750848" +"microsoft","neutral","126667274080894976" +"microsoft","neutral","126666679785766914" +"microsoft","neutral","126666425715798016" +"microsoft","neutral","126665951172243456" +"microsoft","neutral","126665581519835136" +"microsoft","neutral","126664658651332608" +"microsoft","neutral","126664524156764160" +"microsoft","neutral","126664410029764608" +"microsoft","neutral","126664404891746304" +"microsoft","neutral","126663746767699969" +"microsoft","neutral","126662968434900992" +"microsoft","neutral","126662905084129280" +"microsoft","neutral","126661971566264320" +"microsoft","neutral","126660704496402433" +"microsoft","neutral","126659682076082176" +"microsoft","neutral","126659577033928704" +"microsoft","neutral","126659465985536000" +"microsoft","neutral","126659266315698177" +"microsoft","neutral","126658871363239936" +"microsoft","neutral","126658110084489216" +"microsoft","neutral","126657343965507584" +"microsoft","neutral","126656580715429888" +"microsoft","neutral","126656236157538304" +"microsoft","neutral","126656052971323393" +"microsoft","neutral","126655887740903424" +"microsoft","neutral","126655887178870784" +"microsoft","neutral","126655373015924736" +"microsoft","neutral","126654466735878144" +"microsoft","neutral","126653861371973632" +"microsoft","neutral","126653619327082496" +"microsoft","neutral","126653015754149888" +"microsoft","neutral","126652714804461569" +"microsoft","neutral","126652661251571712" +"microsoft","neutral","126652256404779008" +"microsoft","neutral","126651931287498752" +"microsoft","neutral","126651699380232193" +"microsoft","neutral","126651497718095873" +"microsoft","neutral","126651446589526016" +"microsoft","neutral","126651412443693057" +"microsoft","neutral","126651402549346304" +"microsoft","neutral","126651401010028544" +"microsoft","neutral","126651399730769922" +"microsoft","neutral","126651363718467585" +"microsoft","neutral","126651360602112000" +"microsoft","neutral","126650859290501121" +"microsoft","neutral","126650839279476736" +"microsoft","neutral","126650651370467328" +"microsoft","neutral","126649108202143744" +"microsoft","neutral","126647567877541889" +"microsoft","neutral","126647558469722112" +"microsoft","neutral","126646015863427072" +"microsoft","neutral","126645991947513857" +"microsoft","neutral","126645459568705536" +"microsoft","neutral","126644332177530880" +"microsoft","neutral","126644191299252224" +"microsoft","neutral","126643947190759425" +"microsoft","neutral","126642779064504320" +"microsoft","neutral","126642612412227585" +"microsoft","neutral","126642611598540801" +"microsoft","neutral","126642532863049728" +"microsoft","neutral","126642137424076800" +"microsoft","neutral","126641727254695937" +"microsoft","neutral","126641223229386752" +"microsoft","neutral","126640775730700288" +"microsoft","neutral","126640203828969472" +"microsoft","neutral","126638865971806209" +"microsoft","neutral","126638751307931648" +"microsoft","neutral","126638261421613056" +"microsoft","neutral","126637865550618624" +"microsoft","neutral","126637677335429121" +"microsoft","neutral","126637516148318208" +"microsoft","neutral","126637437953900546" +"microsoft","neutral","126637411114553344" +"microsoft","neutral","126636379479343106" +"microsoft","neutral","126636379324170240" +"microsoft","neutral","126636194653147136" +"microsoft","neutral","126635954520854528" +"microsoft","neutral","126635633245564928" +"microsoft","neutral","126635573921316864" +"microsoft","neutral","126635080100757504" +"microsoft","neutral","126635053139763200" +"microsoft","neutral","126634784326828032" +"microsoft","neutral","126634780954595328" +"microsoft","neutral","126634396773130240" +"microsoft","neutral","126632917643427841" +"microsoft","neutral","126632860386988033" +"microsoft","neutral","126632280985845760" +"microsoft","neutral","126631882149474305" +"microsoft","neutral","126631805288849408" +"microsoft","neutral","126631188394811392" +"microsoft","neutral","126631077468049408" +"microsoft","neutral","126630800413310976" +"microsoft","neutral","126630728690704384" +"microsoft","neutral","126628957566156800" +"microsoft","neutral","126628527155052544" +"microsoft","neutral","126626502937812992" +"microsoft","neutral","126626166261022720" +"microsoft","neutral","126625578567729152" +"microsoft","neutral","126625418118832128" +"microsoft","neutral","126625193786490881" +"microsoft","neutral","126622411587190784" +"microsoft","neutral","126622356817981440" +"microsoft","neutral","126622297917374464" +"microsoft","neutral","126621573271650304" +"microsoft","neutral","126621144177577984" +"microsoft","neutral","126621142743138305" +"microsoft","neutral","126619580096462848" +"microsoft","neutral","126619293977812992" +"microsoft","neutral","126619258099744768" +"microsoft","neutral","126619256015167488" +"microsoft","neutral","126617626435780608" +"microsoft","neutral","126617142761230337" +"microsoft","neutral","126616809079193600" +"microsoft","neutral","126616636621996032" +"microsoft","neutral","126615663119843328" +"microsoft","neutral","126615649077301248" +"microsoft","neutral","126615565673570305" +"microsoft","neutral","126615350916816896" +"microsoft","neutral","126614792092909568" +"microsoft","neutral","126614365280542720" +"microsoft","neutral","126614116927422464" +"microsoft","neutral","126614037608935424" +"microsoft","neutral","126613919623159808" +"microsoft","neutral","126613753499353088" +"microsoft","neutral","126613640278315008" +"microsoft","neutral","126613630551719936" +"microsoft","neutral","126613447487127553" +"microsoft","neutral","126613275331919872" +"microsoft","neutral","126611886979555328" +"microsoft","neutral","126611785666134016" +"microsoft","neutral","126611604925194240" +"microsoft","neutral","126611557370183681" +"microsoft","neutral","126611372552355841" +"microsoft","neutral","126610800382181376" +"microsoft","neutral","126610131671715840" +"microsoft","neutral","126610091855192064" +"microsoft","neutral","126610035655704577" +"microsoft","neutral","126609904298491904" +"microsoft","neutral","126609815278592000" +"microsoft","neutral","126608763456200704" +"microsoft","neutral","126607853640364032" +"microsoft","neutral","126607793280126976" +"microsoft","neutral","126607106953580546" +"microsoft","neutral","126606671874228225" +"microsoft","neutral","126606546556829696" +"microsoft","neutral","126606023174791168" +"microsoft","neutral","126604925881954305" +"microsoft","neutral","126604903408869378" +"microsoft","neutral","126604828150480896" +"microsoft","neutral","126604631894790144" +"microsoft","neutral","126604425832837120" +"microsoft","neutral","126604425635692545" +"microsoft","neutral","126604152536178688" +"microsoft","neutral","126603598724476928" +"microsoft","neutral","126602791807496192" +"microsoft","neutral","126602212511203328" +"microsoft","neutral","126601735417499648" +"microsoft","neutral","126601553409875968" +"microsoft","neutral","126600989150158849" +"microsoft","neutral","126600074825445376" +"microsoft","neutral","126599980847869952" +"microsoft","neutral","126599671400497153" +"microsoft","neutral","126599087389806592" +"microsoft","neutral","126598871760642048" +"microsoft","neutral","126598867847348224" +"microsoft","neutral","126598515248992257" +"microsoft","neutral","126597883477762048" +"microsoft","neutral","126597538794057728" +"microsoft","neutral","126596825875611648" +"microsoft","neutral","126596088999313408" +"microsoft","neutral","126596045575684096" +"microsoft","neutral","126595447455367168" +"microsoft","neutral","126595014846459904" +"microsoft","neutral","126593877321842690" +"microsoft","neutral","126593636627513344" +"microsoft","neutral","126593450253623297" +"microsoft","neutral","126592300070608896" +"microsoft","neutral","126592158802264064" +"microsoft","neutral","126592000395984896" +"microsoft","neutral","126591976408748032" +"microsoft","neutral","126591072884359168" +"microsoft","neutral","126591029993406464" +"microsoft","neutral","126590959290032128" +"microsoft","neutral","126590231137886209" +"microsoft","neutral","126588750628257792" +"microsoft","neutral","126588693577338881" +"microsoft","neutral","126588643782574080" +"microsoft","neutral","126588145549574144" +"microsoft","neutral","126586994536091648" +"microsoft","neutral","126586306464727041" +"microsoft","neutral","126585952394166272" +"microsoft","neutral","126585097297199104" +"microsoft","neutral","126584640453611521" +"microsoft","neutral","126583805229285376" +"microsoft","neutral","126582210420674560" +"microsoft","neutral","126582173787619328" +"microsoft","neutral","126581768529788929" +"microsoft","neutral","126581659612094464" +"microsoft","neutral","126581308158779392" +"microsoft","neutral","126581270435201024" +"microsoft","neutral","126580539636449281" +"microsoft","neutral","126580273965047810" +"microsoft","neutral","126580014819983360" +"microsoft","neutral","126579602524082176" +"microsoft","neutral","126579169646751744" +"microsoft","neutral","126579166589108224" +"microsoft","neutral","126577845421096960" +"microsoft","neutral","126577204078456832" +"microsoft","neutral","126577114005782528" +"microsoft","neutral","126577085081849856" +"microsoft","neutral","126576350067818497" +"microsoft","neutral","126576121784442880" +"microsoft","neutral","126573645421228032" +"microsoft","neutral","126573265178202112" +"microsoft","neutral","126573186774089728" +"microsoft","neutral","126572513483436032" +"microsoft","neutral","126572384126894080" +"microsoft","neutral","126572306230288385" +"microsoft","neutral","126572275876106240" +"microsoft","neutral","126572275016286208" +"microsoft","neutral","126570339105914881" +"microsoft","neutral","126569564963217408" +"microsoft","neutral","126569115082166273" +"microsoft","neutral","126568785921572864" +"microsoft","neutral","126568308584628224" +"microsoft","neutral","126568122261061632" +"microsoft","neutral","126567868065263616" +"microsoft","neutral","126567546165014528" +"microsoft","neutral","126566918911041536" +"microsoft","neutral","126566540010192896" +"microsoft","neutral","126566417276469248" +"microsoft","neutral","126566305716387840" +"microsoft","neutral","126565140702298112" +"microsoft","neutral","126564015873867777" +"microsoft","neutral","126564004612161536" +"microsoft","neutral","126563375189725184" +"microsoft","neutral","126563374384422912" +"microsoft","neutral","126563373230997504" +"microsoft","neutral","126563286392123392" +"microsoft","neutral","126562736242040833" +"microsoft","neutral","126562457375350785" +"microsoft","neutral","126561633978945536" +"microsoft","neutral","126561530258001920" +"microsoft","neutral","126561428319649793" +"microsoft","neutral","126559900036894720" +"microsoft","neutral","126559880860545024" +"microsoft","neutral","126557628531875840" +"microsoft","neutral","126557171742810112" +"microsoft","neutral","126555866332798976" +"microsoft","neutral","126554531713650688" +"microsoft","neutral","126553189511208960" +"microsoft","neutral","126552864876269568" +"microsoft","neutral","126552665063829504" +"microsoft","neutral","126549688857010177" +"microsoft","neutral","126548837958557696" +"microsoft","neutral","126545396913815552" +"microsoft","neutral","126545157708451842" +"microsoft","neutral","126542911654477824" +"microsoft","neutral","126542412226105345" +"microsoft","neutral","126541046615244801" +"microsoft","neutral","126538706944401409" +"microsoft","neutral","126537896999124992" +"microsoft","neutral","126536228538564608" +"microsoft","neutral","126536172867559424" +"microsoft","neutral","126535951764819968" +"microsoft","neutral","126534265205829632" +"microsoft","neutral","126534195777519616" +"microsoft","neutral","126532191831343105" +"microsoft","neutral","126528995394199552" +"microsoft","neutral","126528296354725890" +"microsoft","neutral","126528197750829056" +"microsoft","neutral","126526992609849344" +"microsoft","neutral","126525976925585408" +"microsoft","neutral","126524240324337664" +"microsoft","neutral","126523034906529793" +"microsoft","neutral","126522766261366784" +"microsoft","neutral","126522655892447233" +"microsoft","neutral","126519858035822594" +"microsoft","neutral","126519595682119681" +"microsoft","neutral","126514963924787201" +"microsoft","neutral","126514187647201280" +"microsoft","neutral","126508567053340672" +"microsoft","neutral","126508512930050048" +"microsoft","neutral","126507878382174208" +"microsoft","neutral","126507753484193792" +"microsoft","neutral","126507677919617024" +"microsoft","neutral","126507292777652224" +"microsoft","neutral","126506577946615808" +"microsoft","neutral","126500912578564097" +"microsoft","neutral","126500486047207425" +"microsoft","neutral","126500411522809856" +"microsoft","neutral","126499066229170176" +"microsoft","neutral","126498215204892672" +"microsoft","neutral","126496197220380672" +"microsoft","neutral","126494059987603456" +"microsoft","neutral","126493220279558144" +"microsoft","neutral","126488619002236928" +"microsoft","neutral","126487011849809920" +"microsoft","neutral","126485244651126784" +"microsoft","neutral","126484490049687552" +"microsoft","neutral","126483887923793920" +"microsoft","neutral","126483490911952896" +"microsoft","neutral","126483128163373057" +"microsoft","neutral","126482131412189184" +"microsoft","neutral","126481856639143936" +"microsoft","neutral","126481810803793921" +"microsoft","neutral","126481496147111936" +"microsoft","neutral","126480696075227137" +"microsoft","neutral","126479554217910272" +"microsoft","neutral","126479285702762496" +"microsoft","neutral","126476071238508545" +"microsoft","neutral","126474577411969024" +"microsoft","neutral","126470128589217795" +"microsoft","neutral","126467278144413696" +"microsoft","neutral","126466763746574337" +"microsoft","irrelevant","126807138587783168" +"microsoft","irrelevant","126806596662726660" +"microsoft","irrelevant","126806460146524160" +"microsoft","irrelevant","126801012852916224" +"microsoft","irrelevant","126800980783271936" +"microsoft","irrelevant","126800969521577984" +"microsoft","irrelevant","126800228560019456" +"microsoft","irrelevant","126798811262763009" +"microsoft","irrelevant","126798048289488896" +"microsoft","irrelevant","126796467213058048" +"microsoft","irrelevant","126794558456610816" +"microsoft","irrelevant","126794506266882048" +"microsoft","irrelevant","126794057979670529" +"microsoft","irrelevant","126792436176531456" +"microsoft","irrelevant","126792221730148353" +"microsoft","irrelevant","126792156852666369" +"microsoft","irrelevant","126789413706543104" +"microsoft","irrelevant","126788967692648448" +"microsoft","irrelevant","126788442356064256" +"microsoft","irrelevant","126787343704260608" +"microsoft","irrelevant","126787220022640640" +"microsoft","irrelevant","126787152708255744" +"microsoft","irrelevant","126786658866700289" +"microsoft","irrelevant","126786402837995521" +"microsoft","irrelevant","126785814876274688" +"microsoft","irrelevant","126785710920441858" +"microsoft","irrelevant","126785377863340033" +"microsoft","irrelevant","126785355310579712" +"microsoft","irrelevant","126784813066756096" +"microsoft","irrelevant","126783417152053248" +"microsoft","irrelevant","126782732373196800" +"microsoft","irrelevant","126781181466378240" +"microsoft","irrelevant","126780041857216512" +"microsoft","irrelevant","126779541720010752" +"microsoft","irrelevant","126779286458871809" +"microsoft","irrelevant","126777498716479488" +"microsoft","irrelevant","126776818941440000" +"microsoft","irrelevant","126775712525979648" +"microsoft","irrelevant","126775015021625346" +"microsoft","irrelevant","126775003927678976" +"microsoft","irrelevant","126775002082189312" +"microsoft","irrelevant","126774982935187456" +"microsoft","irrelevant","126774793746907137" +"microsoft","irrelevant","126774646715580416" +"microsoft","irrelevant","126774646690414593" +"microsoft","irrelevant","126774646568783872" +"microsoft","irrelevant","126774645474070528" +"microsoft","irrelevant","126774641044897793" +"microsoft","irrelevant","126773569345699840" +"microsoft","irrelevant","126773055816085504" +"microsoft","irrelevant","126771371954020353" +"microsoft","irrelevant","126771131884638208" +"microsoft","irrelevant","126770870709534720" +"microsoft","irrelevant","126770869254094849" +"microsoft","irrelevant","126770866435530752" +"microsoft","irrelevant","126769830568280065" +"microsoft","irrelevant","126769633301774336" +"microsoft","irrelevant","126769627484274688" +"microsoft","irrelevant","126769625659748353" +"microsoft","irrelevant","126769625626198016" +"microsoft","irrelevant","126769625420673024" +"microsoft","irrelevant","126769623591944192" +"microsoft","irrelevant","126769621394137088" +"microsoft","irrelevant","126769618458120192" +"microsoft","irrelevant","126769617342435329" +"microsoft","irrelevant","126769617057234944" +"microsoft","irrelevant","126769617019486209" +"microsoft","irrelevant","126769616826531840" +"microsoft","irrelevant","126769452967669761" +"microsoft","irrelevant","126768905216720896" +"microsoft","irrelevant","126768623472750592" +"microsoft","irrelevant","126768380945502208" +"microsoft","irrelevant","126767886814543872" +"microsoft","irrelevant","126767288652271617" +"microsoft","irrelevant","126767180112076800" +"microsoft","irrelevant","126765812534099969" +"microsoft","irrelevant","126765752928829441" +"microsoft","irrelevant","126765300904505344" +"microsoft","irrelevant","126764864197758976" +"microsoft","irrelevant","126764589835759616" +"microsoft","irrelevant","126764578695680000" +"microsoft","irrelevant","126764341784625152" +"microsoft","irrelevant","126763862409232384" +"microsoft","irrelevant","126763321541148672" +"microsoft","irrelevant","126762153012236288" +"microsoft","irrelevant","126762113980055552" +"microsoft","irrelevant","126761498885361664" +"microsoft","irrelevant","126761427343130624" +"microsoft","irrelevant","126761080541290496" +"microsoft","irrelevant","126760617800515584" +"microsoft","irrelevant","126759986780057600" +"microsoft","irrelevant","126759818718482432" +"microsoft","irrelevant","126759115488903168" +"microsoft","irrelevant","126758582631927809" +"microsoft","irrelevant","126757586174345216" +"microsoft","irrelevant","126756934937350144" +"microsoft","irrelevant","126756701855682560" +"microsoft","irrelevant","126756084961652736" +"microsoft","irrelevant","126755997975977984" +"microsoft","irrelevant","126755662792364032" +"microsoft","irrelevant","126754698102439936" +"microsoft","irrelevant","126754357671772160" +"microsoft","irrelevant","126754317775552513" +"microsoft","irrelevant","126753938098761728" +"microsoft","irrelevant","126753907794919424" +"microsoft","irrelevant","126752731380719616" +"microsoft","irrelevant","126752590162698241" +"microsoft","irrelevant","126752126880858112" +"microsoft","irrelevant","126751890150137856" +"microsoft","irrelevant","126751116061974528" +"microsoft","irrelevant","126749587133308928" +"microsoft","irrelevant","126749255498088448" +"microsoft","irrelevant","126746850106675200" +"microsoft","irrelevant","126746549756768258" +"microsoft","irrelevant","126745825283031040" +"microsoft","irrelevant","126745438136176640" +"microsoft","irrelevant","126744626974564352" +"microsoft","irrelevant","126743570689429504" +"microsoft","irrelevant","126743504675282944" +"microsoft","irrelevant","126743035903094785" +"microsoft","irrelevant","126742038048804866" +"microsoft","irrelevant","126741924446076928" +"microsoft","irrelevant","126741919500992512" +"microsoft","irrelevant","126740492540055552" +"microsoft","irrelevant","126738342900539392" +"microsoft","irrelevant","126737787335606273" +"microsoft","irrelevant","126737489229656066" +"microsoft","irrelevant","126736841872388096" +"microsoft","irrelevant","126736717939093504" +"microsoft","irrelevant","126736619742035968" +"microsoft","irrelevant","126736441379274752" +"microsoft","irrelevant","126734707038756864" +"microsoft","irrelevant","126734622263476224" +"microsoft","irrelevant","126734381309108224" +"microsoft","irrelevant","126734290850557952" +"microsoft","irrelevant","126733828000722944" +"microsoft","irrelevant","126733528758095872" +"microsoft","irrelevant","126733262822440960" +"microsoft","irrelevant","126732894092791808" +"microsoft","irrelevant","126732424460767233" +"microsoft","irrelevant","126731601584455682" +"microsoft","irrelevant","126731253490794496" +"microsoft","irrelevant","126731136318713856" +"microsoft","irrelevant","126730979485302784" +"microsoft","irrelevant","126726825505329152" +"microsoft","irrelevant","126726459246129152" +"microsoft","irrelevant","126726358800936960" +"microsoft","irrelevant","126726063484178432" +"microsoft","irrelevant","126725627758915584" +"microsoft","irrelevant","126725465611304960" +"microsoft","irrelevant","126724685315579904" +"microsoft","irrelevant","126724454649839616" +"microsoft","irrelevant","126724248256528385" +"microsoft","irrelevant","126723588324737024" +"microsoft","irrelevant","126723323471204352" +"microsoft","irrelevant","126723066528153600" +"microsoft","irrelevant","126722796599521281" +"microsoft","irrelevant","126722505128935424" +"microsoft","irrelevant","126721828432519170" +"microsoft","irrelevant","126719767590604801" +"microsoft","irrelevant","126719569179054080" +"microsoft","irrelevant","126719029921579008" +"microsoft","irrelevant","126717715657396224" +"microsoft","irrelevant","126717214211575808" +"microsoft","irrelevant","126715806565412864" +"microsoft","irrelevant","126715672981016577" +"microsoft","irrelevant","126714068093509633" +"microsoft","irrelevant","126713830184198144" +"microsoft","irrelevant","126713264582299648" +"microsoft","irrelevant","126710706354663424" +"microsoft","irrelevant","126710657948196864" +"microsoft","irrelevant","126710063497887744" +"microsoft","irrelevant","126709780038434816" +"microsoft","irrelevant","126709737222963200" +"microsoft","irrelevant","126708721328992257" +"microsoft","irrelevant","126708681239822336" +"microsoft","irrelevant","126707839912771585" +"microsoft","irrelevant","126707460244373504" +"microsoft","irrelevant","126705840521613312" +"microsoft","irrelevant","126705791527952384" +"microsoft","irrelevant","126705448438079488" +"microsoft","irrelevant","126705060708225024" +"microsoft","irrelevant","126704896383787008" +"microsoft","irrelevant","126704346573455360" +"microsoft","irrelevant","126704316005351424" +"microsoft","irrelevant","126704236057739264" +"microsoft","irrelevant","126703003351785472" +"microsoft","irrelevant","126702733834194944" +"microsoft","irrelevant","126702673830481920" +"microsoft","irrelevant","126702046350024704" +"microsoft","irrelevant","126701862383661056" +"microsoft","irrelevant","126701401425444864" +"microsoft","irrelevant","126699572490813441" +"microsoft","irrelevant","126699294987259904" +"microsoft","irrelevant","126699236615127041" +"microsoft","irrelevant","126698924709920768" +"microsoft","irrelevant","126698924621832192" +"microsoft","irrelevant","126698507657678848" +"microsoft","irrelevant","126696228745523200" +"microsoft","irrelevant","126695671163133952" +"microsoft","irrelevant","126695665769250818" +"microsoft","irrelevant","126694595026366464" +"microsoft","irrelevant","126694584322490368" +"microsoft","irrelevant","126693348735057920" +"microsoft","irrelevant","126691436744810496" +"microsoft","irrelevant","126690582893572096" +"microsoft","irrelevant","126690506976657408" +"microsoft","irrelevant","126689129131028480" +"microsoft","irrelevant","126689124638932993" +"microsoft","irrelevant","126689077700476929" +"microsoft","irrelevant","126689077230698496" +"microsoft","irrelevant","126688740826550272" +"microsoft","irrelevant","126688659868106752" +"microsoft","irrelevant","126687780943306752" +"microsoft","irrelevant","126687120071999490" +"microsoft","irrelevant","126687048647184384" +"microsoft","irrelevant","126686733222944768" +"microsoft","irrelevant","126686455752962048" +"microsoft","irrelevant","126686446357716992" +"microsoft","irrelevant","126686301780049920" +"microsoft","irrelevant","126685380681547777" +"microsoft","irrelevant","126685141174202369" +"microsoft","irrelevant","126684849934303233" +"microsoft","irrelevant","126684618605867008" +"microsoft","irrelevant","126682886756777984" +"microsoft","irrelevant","126682505033154560" +"microsoft","irrelevant","126681644223578113" +"microsoft","irrelevant","126681070413418496" +"microsoft","irrelevant","126680181359378432" +"microsoft","irrelevant","126680158508810240" +"microsoft","irrelevant","126679552310251521" +"microsoft","irrelevant","126679463839801344" +"microsoft","irrelevant","126679305169289216" +"microsoft","irrelevant","126679060431634432" +"microsoft","irrelevant","126679053347467264" +"microsoft","irrelevant","126678520033325057" +"microsoft","irrelevant","126678376277749760" +"microsoft","irrelevant","126678301539446784" +"microsoft","irrelevant","126677890531201024" +"microsoft","irrelevant","126677821492961280" +"microsoft","irrelevant","126677668933533696" +"microsoft","irrelevant","126677325008994306" +"microsoft","irrelevant","126676840181022720" +"microsoft","irrelevant","126676566154555395" +"microsoft","irrelevant","126676435988512768" +"microsoft","irrelevant","126675755026493440" +"microsoft","irrelevant","126675459353223168" +"microsoft","irrelevant","126675434065764352" +"microsoft","irrelevant","126675392663789569" +"microsoft","irrelevant","126675085758173187" +"microsoft","irrelevant","126674938076725248" +"microsoft","irrelevant","126673983235035138" +"microsoft","irrelevant","126673920014299137" +"microsoft","irrelevant","126673684936146944" +"microsoft","irrelevant","126673474751172608" +"microsoft","irrelevant","126673257175855106" +"microsoft","irrelevant","126673062258147328" +"microsoft","irrelevant","126671792877223936" +"microsoft","irrelevant","126671141854134273" +"microsoft","irrelevant","126671001357529089" +"microsoft","irrelevant","126671000736763904" +"microsoft","irrelevant","126670954112880640" +"microsoft","irrelevant","126669765778485248" +"microsoft","irrelevant","126669192157073408" +"microsoft","irrelevant","126668863667572736" +"microsoft","irrelevant","126668329002872833" +"microsoft","irrelevant","126668278386012160" +"microsoft","irrelevant","126666904764030977" +"microsoft","irrelevant","126666281461096448" +"microsoft","irrelevant","126666155376132097" +"microsoft","irrelevant","126666022517350400" +"microsoft","irrelevant","126665539719409664" +"microsoft","irrelevant","126665086961065985" +"microsoft","irrelevant","126665078861869056" +"microsoft","irrelevant","126664812947181568" +"microsoft","irrelevant","126664329335541760" +"microsoft","irrelevant","126664120274653184" +"microsoft","irrelevant","126663390302187521" +"microsoft","irrelevant","126663150148911105" +"microsoft","irrelevant","126662722673844224" +"microsoft","irrelevant","126662658991718400" +"microsoft","irrelevant","126662635033858049" +"microsoft","irrelevant","126662553316245504" +"microsoft","irrelevant","126662533347164161" +"microsoft","irrelevant","126662436966236160" +"microsoft","irrelevant","126661775922966528" +"microsoft","irrelevant","126661415510614018" +"microsoft","irrelevant","126660850051321858" +"microsoft","irrelevant","126660622883631104" +"microsoft","irrelevant","126660026013188097" +"microsoft","irrelevant","126659873579610113" +"microsoft","irrelevant","126659604628242432" +"microsoft","irrelevant","126657946758295552" +"microsoft","irrelevant","126657340920438785" +"microsoft","irrelevant","126656806368968704" +"microsoft","irrelevant","126656462566080513" +"microsoft","irrelevant","126656050664443904" +"microsoft","irrelevant","126655886893649921" +"microsoft","irrelevant","126655535545204737" +"microsoft","irrelevant","126655432361123840" +"microsoft","irrelevant","126655411871940608" +"microsoft","irrelevant","126654661322211328" +"microsoft","irrelevant","126654651654340608" +"microsoft","irrelevant","126654309894070273" +"microsoft","irrelevant","126654232538521600" +"microsoft","irrelevant","126653714357432320" +"microsoft","irrelevant","126653276040073216" +"microsoft","irrelevant","126652806546464768" +"microsoft","irrelevant","126652711188963328" +"microsoft","irrelevant","126652683040993280" +"microsoft","irrelevant","126652553038540800" +"microsoft","irrelevant","126652025181179904" +"microsoft","irrelevant","126651878351183873" +"microsoft","irrelevant","126651164325457920" +"microsoft","irrelevant","126650231159922689" +"microsoft","irrelevant","126650224625205248" +"microsoft","irrelevant","126650108640100352" +"microsoft","irrelevant","126650101304262656" +"microsoft","irrelevant","126650034052792321" +"microsoft","irrelevant","126649985897996288" +"microsoft","irrelevant","126649791299063808" +"microsoft","irrelevant","126649528924389378" +"microsoft","irrelevant","126648588129419264" +"microsoft","irrelevant","126648260352942080" +"microsoft","irrelevant","126648259040120832" +"microsoft","irrelevant","126648114886086657" +"microsoft","irrelevant","126648049459142656" +"microsoft","irrelevant","126647771821383682" +"microsoft","irrelevant","126647390282326017" +"microsoft","irrelevant","126646647856955392" +"microsoft","irrelevant","126646439924334592" +"microsoft","irrelevant","126646302032396289" +"microsoft","irrelevant","126645036426334208" +"microsoft","irrelevant","126644764383780865" +"microsoft","irrelevant","126644360434565124" +"microsoft","irrelevant","126643786557296640" +"microsoft","irrelevant","126643010296487936" +"microsoft","irrelevant","126642364667269120" +"microsoft","irrelevant","126641501978632192" +"microsoft","irrelevant","126640207167631361" +"microsoft","irrelevant","126639750756040706" +"microsoft","irrelevant","126638913145159681" +"microsoft","irrelevant","126638752167759872" +"microsoft","irrelevant","126638520034013184" +"microsoft","irrelevant","126637475153186816" +"microsoft","irrelevant","126637471676104704" +"microsoft","irrelevant","126636564469121024" +"microsoft","irrelevant","126636005922050048" +"microsoft","irrelevant","126633706566856704" +"microsoft","irrelevant","126632414830276608" +"microsoft","irrelevant","126631816873517056" +"microsoft","irrelevant","126630944999346176" +"microsoft","irrelevant","126630702392426496" +"microsoft","irrelevant","126630465246466048" +"microsoft","irrelevant","126629195546755072" +"microsoft","irrelevant","126628979636572160" +"microsoft","irrelevant","126628891929493504" +"microsoft","irrelevant","126628699402539008" +"microsoft","irrelevant","126628570536742912" +"microsoft","irrelevant","126626670181490688" +"microsoft","irrelevant","126626576069693440" +"microsoft","irrelevant","126626327888539648" +"microsoft","irrelevant","126625929215754240" +"microsoft","irrelevant","126625386565087232" +"microsoft","irrelevant","126625317157744640" +"microsoft","irrelevant","126625265928515584" +"microsoft","irrelevant","126624831297949696" +"microsoft","irrelevant","126623895334817792" +"microsoft","irrelevant","126622818220785664" +"microsoft","irrelevant","126622165595459584" +"microsoft","irrelevant","126622031163822081" +"microsoft","irrelevant","126622030006202368" +"microsoft","irrelevant","126621969461415936" +"microsoft","irrelevant","126621883411070976" +"microsoft","irrelevant","126621712656760832" +"microsoft","irrelevant","126621298272112643" +"microsoft","irrelevant","126620982009008129" +"microsoft","irrelevant","126620721236545536" +"microsoft","irrelevant","126620532060848129" +"microsoft","irrelevant","126619975518666752" +"microsoft","irrelevant","126618143966756864" +"microsoft","irrelevant","126618143098548224" +"microsoft","irrelevant","126617262722531328" +"microsoft","irrelevant","126616352340447233" +"microsoft","irrelevant","126615874508558336" +"microsoft","irrelevant","126615672351498240" +"microsoft","irrelevant","126615378976718848" +"microsoft","irrelevant","126615034007789569" +"microsoft","irrelevant","126614513784074240" +"microsoft","irrelevant","126614370150129664" +"microsoft","irrelevant","126614145662599169" +"microsoft","irrelevant","126614144299446272" +"microsoft","irrelevant","126614136242180097" +"microsoft","irrelevant","126613470245437440" +"microsoft","irrelevant","126613334098325504" +"microsoft","irrelevant","126612194594000896" +"microsoft","irrelevant","126612152579657728" +"microsoft","irrelevant","126611679961300993" +"microsoft","irrelevant","126611107266834433" +"microsoft","irrelevant","126610365852303361" +"microsoft","irrelevant","126608861808431107" +"microsoft","irrelevant","126607587406913536" +"microsoft","irrelevant","126606198911930368" +"microsoft","irrelevant","126606101671186432" +"microsoft","irrelevant","126605924273111042" +"microsoft","irrelevant","126605409590063104" +"microsoft","irrelevant","126604075809771520" +"microsoft","irrelevant","126601340242767872" +"microsoft","irrelevant","126599691881299968" +"microsoft","irrelevant","126599445168144384" +"microsoft","irrelevant","126598693351723010" +"microsoft","irrelevant","126598545062105088" +"microsoft","irrelevant","126597416693665793" +"microsoft","irrelevant","126596658929733632" +"microsoft","irrelevant","126596412187226112" +"microsoft","irrelevant","126596388615229441" +"microsoft","irrelevant","126595424810307584" +"microsoft","irrelevant","126592053055459328" +"microsoft","irrelevant","126591243294748672" +"microsoft","irrelevant","126590333520855040" +"microsoft","irrelevant","126590035314229249" +"microsoft","irrelevant","126589888266108929" +"microsoft","irrelevant","126589139150839808" +"microsoft","irrelevant","126589085304369152" +"microsoft","irrelevant","126588920958955521" +"microsoft","irrelevant","126588570961068032" +"microsoft","irrelevant","126586819713310720" +"microsoft","irrelevant","126586599772389376" +"microsoft","irrelevant","126586563147743232" +"microsoft","irrelevant","126585997814280192" +"microsoft","irrelevant","126585826955104256" +"microsoft","irrelevant","126585200355454976" +"microsoft","irrelevant","126583539662733312" +"microsoft","irrelevant","126583374096764928" +"microsoft","irrelevant","126582476121444352" +"microsoft","irrelevant","126581464052678656" +"microsoft","irrelevant","126579540070907904" +"microsoft","irrelevant","126579035093479425" +"microsoft","irrelevant","126578736148652032" +"microsoft","irrelevant","126577183060799488" +"microsoft","irrelevant","126576827476094976" +"microsoft","irrelevant","126576629395898368" +"microsoft","irrelevant","126576294359072768" +"microsoft","irrelevant","126575943706877953" +"microsoft","irrelevant","126575853818744832" +"microsoft","irrelevant","126575680585596928" +"microsoft","irrelevant","126575637942120448" +"microsoft","irrelevant","126575368692957184" +"microsoft","irrelevant","126575205048000512" +"microsoft","irrelevant","126574756307808256" +"microsoft","irrelevant","126574310176468992" +"microsoft","irrelevant","126573880285466625" +"microsoft","irrelevant","126573790980358145" +"microsoft","irrelevant","126573784961527808" +"microsoft","irrelevant","126573378302783488" +"microsoft","irrelevant","126572846272086016" +"microsoft","irrelevant","126570427479896064" +"microsoft","irrelevant","126569812120973312" +"microsoft","irrelevant","126568272819793920" +"microsoft","irrelevant","126566960979914752" +"microsoft","irrelevant","126566043928895489" +"microsoft","irrelevant","126565939075497984" +"microsoft","irrelevant","126565570740101120" +"microsoft","irrelevant","126565301801320448" +"microsoft","irrelevant","126564330933194752" +"microsoft","irrelevant","126564298209247232" +"microsoft","irrelevant","126564244329218048" +"microsoft","irrelevant","126563471662915584" +"microsoft","irrelevant","126563328213516288" +"microsoft","irrelevant","126561890825543680" +"microsoft","irrelevant","126560733126328321" +"microsoft","irrelevant","126560506097049600" +"microsoft","irrelevant","126559394136723456" +"microsoft","irrelevant","126558250194829312" +"microsoft","irrelevant","126556805877858306" +"microsoft","irrelevant","126556628173598720" +"microsoft","irrelevant","126555304212176897" +"microsoft","irrelevant","126553180711559169" +"microsoft","irrelevant","126553028730953730" +"microsoft","irrelevant","126550811894480896" +"microsoft","irrelevant","126549243061207040" +"microsoft","irrelevant","126547733359230976" +"microsoft","irrelevant","126547233473691649" +"microsoft","irrelevant","126546908142501888" +"microsoft","irrelevant","126546297082748928" +"microsoft","irrelevant","126546080384040960" +"microsoft","irrelevant","126544902107570176" +"microsoft","irrelevant","126544031365873664" +"microsoft","irrelevant","126540569844523008" +"microsoft","irrelevant","126539960890306560" +"microsoft","irrelevant","126536951984689152" +"microsoft","irrelevant","126532894272397312" +"microsoft","irrelevant","126531176243539968" +"microsoft","irrelevant","126530398317592576" +"microsoft","irrelevant","126530235402424322" +"microsoft","irrelevant","126527536313278465" +"microsoft","irrelevant","126526431298723841" +"microsoft","irrelevant","126523675364171778" +"microsoft","irrelevant","126523554568224771" +"microsoft","irrelevant","126518913294020608" +"microsoft","irrelevant","126518577263153152" +"microsoft","irrelevant","126506168184078336" +"microsoft","irrelevant","126506057613848576" +"microsoft","irrelevant","126505970317787136" +"microsoft","irrelevant","126500518515310592" +"microsoft","irrelevant","126500332078505985" +"microsoft","irrelevant","126495306681548800" +"microsoft","irrelevant","126491523020898304" +"microsoft","irrelevant","126489614272827392" +"microsoft","irrelevant","126488920329433088" +"microsoft","irrelevant","126487014957785088" +"microsoft","irrelevant","126485491238436866" +"microsoft","irrelevant","126484927649820673" +"twitter","positive","126883590041640960" +"twitter","positive","126883448173510656" +"twitter","positive","126883416280006656" +"twitter","positive","126883364887203840" +"twitter","positive","126883290782244864" +"twitter","positive","126883211006590976" +"twitter","positive","126883187300384768" +"twitter","positive","126882971411165185" +"twitter","positive","126882662932692992" +"twitter","positive","126882408963391488" +"twitter","positive","126881835463614464" +"twitter","positive","126881080178507776" +"twitter","positive","126880912754475008" +"twitter","positive","126880559162077184" +"twitter","positive","126880385605976064" +"twitter","positive","126879785908580352" +"twitter","positive","126879662851887104" +"twitter","positive","126878670685085696" +"twitter","positive","126877750131818497" +"twitter","positive","126877362632667136" +"twitter","positive","126877263311536128" +"twitter","positive","126877209813188608" +"twitter","positive","126877171926040576" +"twitter","positive","126877056578486272" +"twitter","positive","126876733113778176" +"twitter","positive","126876654118240257" +"twitter","positive","126876600083025920" +"twitter","positive","126876125107462144" +"twitter","positive","126876107881455616" +"twitter","positive","126875611095502848" +"twitter","positive","126875441217798144" +"twitter","positive","126875378013843456" +"twitter","positive","126874748469788672" +"twitter","positive","126874346873556993" +"twitter","positive","126873707066048513" +"twitter","positive","126873518385274882" +"twitter","positive","126873128348561409" +"twitter","positive","126872791197814784" +"twitter","positive","126872175490764802" +"twitter","positive","126870551032643584" +"twitter","positive","126870402751397889" +"twitter","positive","126869964144644097" +"twitter","positive","126869134238679042" +"twitter","positive","126868475892338688" +"twitter","positive","126868330098331648" +"twitter","positive","126867350476697601" +"twitter","positive","126865422174785536" +"twitter","positive","126864056366804992" +"twitter","positive","126863862791282688" +"twitter","positive","126863821594832897" +"twitter","positive","126863766334873600" +"twitter","positive","126863084433326080" +"twitter","positive","126862820578050048" +"twitter","positive","126862735953768448" +"twitter","positive","126862443275235328" +"twitter","positive","126862309497905152" +"twitter","positive","126862124201947136" +"twitter","positive","126861364227608577" +"twitter","positive","126860944352612353" +"twitter","positive","126860415085973504" +"twitter","positive","126860046981279744" +"twitter","positive","126859858443112449" +"twitter","positive","126859371094360064" +"twitter","positive","126858606695030784" +"twitter","positive","126858477942476800" +"twitter","positive","126858393909608448" +"twitter","positive","126857095088840706" +"twitter","positive","126857082199744513" +"twitter","negative","126883562652844033" +"twitter","negative","126883300227817472" +"twitter","negative","126883243726344193" +"twitter","negative","126882964582838272" +"twitter","negative","126882934568390656" +"twitter","negative","126882761733705728" +"twitter","negative","126881698783834112" +"twitter","negative","126881658854064128" +"twitter","negative","126881376074076161" +"twitter","negative","126881010301419520" +"twitter","negative","126880978185625600" +"twitter","negative","126880813991202816" +"twitter","negative","126879988602519552" +"twitter","negative","126879964619485185" +"twitter","negative","126878518310223874" +"twitter","negative","126878448575717376" +"twitter","negative","126878307693244417" +"twitter","negative","126877679826894849" +"twitter","negative","126877484271665152" +"twitter","negative","126877335399051264" +"twitter","negative","126877245347348480" +"twitter","negative","126877135926337537" +"twitter","negative","126876956443688960" +"twitter","negative","126876682207502336" +"twitter","negative","126876493153452032" +"twitter","negative","126876140269862912" +"twitter","negative","126876046028050432" +"twitter","negative","126875653210521600" +"twitter","negative","126874389210861568" +"twitter","negative","126873912624693249" +"twitter","negative","126873860745330689" +"twitter","negative","126872684658294784" +"twitter","negative","126872492118769664" +"twitter","negative","126872325663621120" +"twitter","negative","126872316142559232" +"twitter","negative","126872265328562176" +"twitter","negative","126871878886363136" +"twitter","negative","126871286545788928" +"twitter","negative","126870923591692288" +"twitter","negative","126870920018137088" +"twitter","negative","126870745258266626" +"twitter","negative","126870618825179136" +"twitter","negative","126870358816067584" +"twitter","negative","126870162510057473" +"twitter","negative","126869855621218304" +"twitter","negative","126869842769870848" +"twitter","negative","126869706639544320" +"twitter","negative","126869466054275073" +"twitter","negative","126869063023607808" +"twitter","negative","126869039233511425" +"twitter","negative","126868993263943682" +"twitter","negative","126868622026080256" +"twitter","negative","126868404182319105" +"twitter","negative","126867983455879168" +"twitter","negative","126867876115259394" +"twitter","negative","126867543955738624" +"twitter","negative","126867446220062720" +"twitter","negative","126867188677218304" +"twitter","negative","126866560756363264" +"twitter","negative","126866493370679297" +"twitter","negative","126866003094290434" +"twitter","negative","126865431142219776" +"twitter","negative","126864648577351681" +"twitter","negative","126864610446942209" +"twitter","negative","126864575508381696" +"twitter","negative","126864510194683904" +"twitter","negative","126864203557507072" +"twitter","negative","126864059206336513" +"twitter","negative","126863505851809793" +"twitter","negative","126862065649459200" +"twitter","negative","126861410864087042" +"twitter","negative","126861382078578688" +"twitter","negative","126861309185761280" +"twitter","negative","126861228797722624" +"twitter","negative","126859898897174528" +"twitter","negative","126858852976181250" +"twitter","negative","126858607789740032" +"twitter","negative","126857136855719936" +"twitter","neutral","126883719368818688" +"twitter","neutral","126883711131201536" +"twitter","neutral","126883672682004480" +"twitter","neutral","126883640671076352" +"twitter","neutral","126883630369882112" +"twitter","neutral","126883597239066625" +"twitter","neutral","126883517706674176" +"twitter","neutral","126883457614884864" +"twitter","neutral","126883452820783104" +"twitter","neutral","126883438954422274" +"twitter","neutral","126883335875203072" +"twitter","neutral","126883224587739136" +"twitter","neutral","126883185396170752" +"twitter","neutral","126883158942695425" +"twitter","neutral","126883124595527681" +"twitter","neutral","126883013236752384" +"twitter","neutral","126883005263392768" +"twitter","neutral","126882970811379712" +"twitter","neutral","126882885553758208" +"twitter","neutral","126882832319651840" +"twitter","neutral","126882800585539585" +"twitter","neutral","126882730154803200" +"twitter","neutral","126882726061146112" +"twitter","neutral","126882653893967872" +"twitter","neutral","126882617843924992" +"twitter","neutral","126882559522111488" +"twitter","neutral","126882542610690049" +"twitter","neutral","126882518799626241" +"twitter","neutral","126882493059170304" +"twitter","neutral","126882453943103488" +"twitter","neutral","126882427661582336" +"twitter","neutral","126882349588815873" +"twitter","neutral","126882248644493312" +"twitter","neutral","126882244982878208" +"twitter","neutral","126882193967550464" +"twitter","neutral","126882122077184000" +"twitter","neutral","126882090343079937" +"twitter","neutral","126882054259474432" +"twitter","neutral","126881996629753856" +"twitter","neutral","126881960416120832" +"twitter","neutral","126881887049351168" +"twitter","neutral","126881736364785664" +"twitter","neutral","126881682266652672" +"twitter","neutral","126881626583076864" +"twitter","neutral","126881596086288385" +"twitter","neutral","126881589606105089" +"twitter","neutral","126881580521234432" +"twitter","neutral","126881523755528192" +"twitter","neutral","126881380503273472" +"twitter","neutral","126881317894893568" +"twitter","neutral","126881309015539712" +"twitter","neutral","126881203642040320" +"twitter","neutral","126881167541665792" +"twitter","neutral","126881136398962688" +"twitter","neutral","126881090446163968" +"twitter","neutral","126881073366958080" +"twitter","neutral","126881072167399425" +"twitter","neutral","126880978273697792" +"twitter","neutral","126880926268526592" +"twitter","neutral","126880883822166017" +"twitter","neutral","126880854361391104" +"twitter","neutral","126880672190185472" +"twitter","neutral","126880656352481280" +"twitter","neutral","126880621170659328" +"twitter","neutral","126880571233280000" +"twitter","neutral","126880518410215425" +"twitter","neutral","126880481361920000" +"twitter","neutral","126880429256093696" +"twitter","neutral","126880399912742912" +"twitter","neutral","126880329913995264" +"twitter","neutral","126880253145657344" +"twitter","neutral","126880223433195520" +"twitter","neutral","126880178705141762" +"twitter","neutral","126880108718989313" +"twitter","neutral","126880102226206720" +"twitter","neutral","126880068252336128" +"twitter","neutral","126880057141641216" +"twitter","neutral","126880036488880128" +"twitter","neutral","126880035507412992" +"twitter","neutral","126880030486822912" +"twitter","neutral","126880007741128704" +"twitter","neutral","126879867731062784" +"twitter","neutral","126879768481247232" +"twitter","neutral","126879759866142720" +"twitter","neutral","126879759316697088" +"twitter","neutral","126879729189011457" +"twitter","neutral","126879677737480192" +"twitter","neutral","126879608120418305" +"twitter","neutral","126879548687130624" +"twitter","neutral","126879506521792513" +"twitter","neutral","126879491124506624" +"twitter","neutral","126879484736585729" +"twitter","neutral","126879463450488832" +"twitter","neutral","126879462041206784" +"twitter","neutral","126879413013987328" +"twitter","neutral","126879328712658944" +"twitter","neutral","126879257975734272" +"twitter","neutral","126879221724356608" +"twitter","neutral","126879149003509760" +"twitter","neutral","126879122298372097" +"twitter","neutral","126879106347433984" +"twitter","neutral","126879073220829184" +"twitter","neutral","126879068800024576" +"twitter","neutral","126879028706672640" +"twitter","neutral","126879022188724224" +"twitter","neutral","126878948561924096" +"twitter","neutral","126878824574095360" +"twitter","neutral","126878811009728513" +"twitter","neutral","126878688682835968" +"twitter","neutral","126878654927077376" +"twitter","neutral","126878622509309952" +"twitter","neutral","126878620953231360" +"twitter","neutral","126878545338310656" +"twitter","neutral","126878539487252480" +"twitter","neutral","126878426312351744" +"twitter","neutral","126878409023426560" +"twitter","neutral","126878311497474048" +"twitter","neutral","126878176063389696" +"twitter","neutral","126878130353876992" +"twitter","neutral","126878062846554114" +"twitter","neutral","126878058278952960" +"twitter","neutral","126877998115852288" +"twitter","neutral","126877997717405697" +"twitter","neutral","126877971637211136" +"twitter","neutral","126877965064740864" +"twitter","neutral","126877936514105344" +"twitter","neutral","126877900610875393" +"twitter","neutral","126877869547855872" +"twitter","neutral","126877729856557056" +"twitter","neutral","126877666040225792" +"twitter","neutral","126877629600108544" +"twitter","neutral","126877612214726657" +"twitter","neutral","126877585488609282" +"twitter","neutral","126877557550354433" +"twitter","neutral","126877527674322944" +"twitter","neutral","126877467507040257" +"twitter","neutral","126877441359757313" +"twitter","neutral","126877420178522113" +"twitter","neutral","126877416630136832" +"twitter","neutral","126877186689998848" +"twitter","neutral","126877128204627970" +"twitter","neutral","126877032704507904" +"twitter","neutral","126876964287033344" +"twitter","neutral","126876948206075904" +"twitter","neutral","126876905696796672" +"twitter","neutral","126876843272974337" +"twitter","neutral","126876834548822017" +"twitter","neutral","126876823723315200" +"twitter","neutral","126876778806509568" +"twitter","neutral","126876760062181376" +"twitter","neutral","126876750788567040" +"twitter","neutral","126876736137871361" +"twitter","neutral","126876733877133312" +"twitter","neutral","126876684925415425" +"twitter","neutral","126876682928926720" +"twitter","neutral","126876676822007809" +"twitter","neutral","126876593531518976" +"twitter","neutral","126876559977095168" +"twitter","neutral","126876501755957249" +"twitter","neutral","126876490364223488" +"twitter","neutral","126876467631104000" +"twitter","neutral","126876452762296321" +"twitter","neutral","126876394339831808" +"twitter","neutral","126876099786444800" +"twitter","neutral","126876078592638976" +"twitter","neutral","126876009797656576" +"twitter","neutral","126876004730933250" +"twitter","neutral","126875958929145856" +"twitter","neutral","126875958694260736" +"twitter","neutral","126875943284379649" +"twitter","neutral","126875894177480705" +"twitter","neutral","126875893552525312" +"twitter","neutral","126875887093293056" +"twitter","neutral","126875872165769216" +"twitter","neutral","126875805505691648" +"twitter","neutral","126875787818319872" +"twitter","neutral","126875761469689856" +"twitter","neutral","126875734005399552" +"twitter","neutral","126875692855078912" +"twitter","neutral","126875612341207040" +"twitter","neutral","126875579172663296" +"twitter","neutral","126875562663874560" +"twitter","neutral","126875457265205248" +"twitter","neutral","126875416760815616" +"twitter","neutral","126875301157404672" +"twitter","neutral","126875280018112512" +"twitter","neutral","126875274901065728" +"twitter","neutral","126875267674284032" +"twitter","neutral","126875204772311040" +"twitter","neutral","126875183725297664" +"twitter","neutral","126875160623058944" +"twitter","neutral","126875159800987649" +"twitter","neutral","126875080084033536" +"twitter","neutral","126875059477426176" +"twitter","neutral","126875035817349120" +"twitter","neutral","126874978674151424" +"twitter","neutral","126874943341330432" +"twitter","neutral","126874922017488896" +"twitter","neutral","126874909933711361" +"twitter","neutral","126874886378496000" +"twitter","neutral","126874883207610368" +"twitter","neutral","126874847614734336" +"twitter","neutral","126874811296251904" +"twitter","neutral","126874799099219969" +"twitter","neutral","126874783756455936" +"twitter","neutral","126874662268452864" +"twitter","neutral","126874568655777793" +"twitter","neutral","126874543770976256" +"twitter","neutral","126874532555399168" +"twitter","neutral","126874503455318016" +"twitter","neutral","126874467824697344" +"twitter","neutral","126874421829971968" +"twitter","neutral","126874417824399360" +"twitter","neutral","126874384139948032" +"twitter","neutral","126874374077816832" +"twitter","neutral","126874332176711681" +"twitter","neutral","126874244629016576" +"twitter","neutral","126874228296396800" +"twitter","neutral","126874200253276160" +"twitter","neutral","126874174722539520" +"twitter","neutral","126874159606267904" +"twitter","neutral","126874138836074497" +"twitter","neutral","126874058062184448" +"twitter","neutral","126874038164393984" +"twitter","neutral","126873977284079616" +"twitter","neutral","126873903552405504" +"twitter","neutral","126873883172274176" +"twitter","neutral","126873866575425536" +"twitter","neutral","126873786715873280" +"twitter","neutral","126873749558530049" +"twitter","neutral","126873686987902976" +"twitter","neutral","126873680654516224" +"twitter","neutral","126873665601146882" +"twitter","neutral","126873546017357825" +"twitter","neutral","126873484746952705" +"twitter","neutral","126873420192423936" +"twitter","neutral","126873418846044160" +"twitter","neutral","126873366304010240" +"twitter","neutral","126873364173299712" +"twitter","neutral","126873323014590464" +"twitter","neutral","126873190420062208" +"twitter","neutral","126873153015263233" +"twitter","neutral","126873112389226497" +"twitter","neutral","126873062439260160" +"twitter","neutral","126873003920330752" +"twitter","neutral","126873001198239744" +"twitter","neutral","126872982315474945" +"twitter","neutral","126872958068207616" +"twitter","neutral","126872890007236608" +"twitter","neutral","126872886748266496" +"twitter","neutral","126872872294694912" +"twitter","neutral","126872871254491137" +"twitter","neutral","126872821375827968" +"twitter","neutral","126872777130123264" +"twitter","neutral","126872773925679105" +"twitter","neutral","126872528235921409" +"twitter","neutral","126872511492263937" +"twitter","neutral","126872465635938304" +"twitter","neutral","126872432517709825" +"twitter","neutral","126872371901640705" +"twitter","neutral","126872360052736000" +"twitter","neutral","126872357099945984" +"twitter","neutral","126872338158452736" +"twitter","neutral","126872320043257857" +"twitter","neutral","126872299709280256" +"twitter","neutral","126872290968338432" +"twitter","neutral","126872267257950209" +"twitter","neutral","126872221292576768" +"twitter","neutral","126872201663229952" +"twitter","neutral","126872199838699520" +"twitter","neutral","126872156763201536" +"twitter","neutral","126872143081390080" +"twitter","neutral","126872087414583296" +"twitter","neutral","126872051242893312" +"twitter","neutral","126872004652580864" +"twitter","neutral","126871971827953664" +"twitter","neutral","126871955419836417" +"twitter","neutral","126871924059013120" +"twitter","neutral","126871857277308930" +"twitter","neutral","126871758757306368" +"twitter","neutral","126871747759837185" +"twitter","neutral","126871669871620096" +"twitter","neutral","126871663299137536" +"twitter","neutral","126871626133417985" +"twitter","neutral","126871606952853504" +"twitter","neutral","126871562707144704" +"twitter","neutral","126871562098982912" +"twitter","neutral","126871520785072129" +"twitter","neutral","126871511326924800" +"twitter","neutral","126871458998788096" +"twitter","neutral","126871458726162432" +"twitter","neutral","126871437322629120" +"twitter","neutral","126871411431194624" +"twitter","neutral","126871372164120576" +"twitter","neutral","126871299741069312" +"twitter","neutral","126871291998371840" +"twitter","neutral","126871201778905088" +"twitter","neutral","126871143431938048" +"twitter","neutral","126871134439346177" +"twitter","neutral","126871086863355905" +"twitter","neutral","126871066760065024" +"twitter","neutral","126871041225134080" +"twitter","neutral","126871013937000450" +"twitter","neutral","126871003132465152" +"twitter","neutral","126870943489466368" +"twitter","neutral","126870941992091648" +"twitter","neutral","126870940570226688" +"twitter","neutral","126870931040768000" +"twitter","neutral","126870883481567232" +"twitter","neutral","126870815877771264" +"twitter","neutral","126870807191363584" +"twitter","neutral","126870792960086018" +"twitter","neutral","126870788564467713" +"twitter","neutral","126870756926816256" +"twitter","neutral","126870706943295489" +"twitter","neutral","126870699108339712" +"twitter","neutral","126870669030998016" +"twitter","neutral","126870651112927232" +"twitter","neutral","126870623086592000" +"twitter","neutral","126870593764208640" +"twitter","neutral","126870550546096128" +"twitter","neutral","126870454299398144" +"twitter","neutral","126870356563722240" +"twitter","neutral","126870354722439169" +"twitter","neutral","126870255703306241" +"twitter","neutral","126870119363260416" +"twitter","neutral","126870091341115392" +"twitter","neutral","126869939536674816" +"twitter","neutral","126869876588552192" +"twitter","neutral","126869850751631361" +"twitter","neutral","126869816584839168" +"twitter","neutral","126869769608642560" +"twitter","neutral","126869765049434112" +"twitter","neutral","126869751476654080" +"twitter","neutral","126869749098487809" +"twitter","neutral","126869742068842496" +"twitter","neutral","126869644534489088" +"twitter","neutral","126869604348862464" +"twitter","neutral","126869604214652929" +"twitter","neutral","126869575202643968" +"twitter","neutral","126869488007262208" +"twitter","neutral","126869448069095424" +"twitter","neutral","126869378452037632" +"twitter","neutral","126869302887464960" +"twitter","neutral","126869296994451457" +"twitter","neutral","126869197413285888" +"twitter","neutral","126869175053467649" +"twitter","neutral","126868990730575872" +"twitter","neutral","126868950637219840" +"twitter","neutral","126868911361757185" +"twitter","neutral","126868888074989568" +"twitter","neutral","126868868512743425" +"twitter","neutral","126868831493820416" +"twitter","neutral","126868828457144321" +"twitter","neutral","126868783431303168" +"twitter","neutral","126868754033426434" +"twitter","neutral","126868751533617152" +"twitter","neutral","126868633195524096" +"twitter","neutral","126868627109584896" +"twitter","neutral","126868570226425856" +"twitter","neutral","126868533991849988" +"twitter","neutral","126868465377226752" +"twitter","neutral","126868429864046592" +"twitter","neutral","126868429796933632" +"twitter","neutral","126868397282689025" +"twitter","neutral","126868271625539585" +"twitter","neutral","126868239203573760" +"twitter","neutral","126868233084080130" +"twitter","neutral","126868090687459328" +"twitter","neutral","126868017106780160" +"twitter","neutral","126868008902737920" +"twitter","neutral","126867948471189504" +"twitter","neutral","126867947418427393" +"twitter","neutral","126867879382614016" +"twitter","neutral","126867869236604928" +"twitter","neutral","126867793089015808" +"twitter","neutral","126867786680111104" +"twitter","neutral","126867774738927617" +"twitter","neutral","126867767914799104" +"twitter","neutral","126867760964841473" +"twitter","neutral","126867711807598592" +"twitter","neutral","126867680966881280" +"twitter","neutral","126867577925406721" +"twitter","neutral","126867506127310848" +"twitter","neutral","126867487659786240" +"twitter","neutral","126867439177842688" +"twitter","neutral","126867417401012225" +"twitter","neutral","126867400250490880" +"twitter","neutral","126867371364319232" +"twitter","neutral","126867320005066752" +"twitter","neutral","126867304549072896" +"twitter","neutral","126867275113447424" +"twitter","neutral","126867260106227713" +"twitter","neutral","126867201776037888" +"twitter","neutral","126867198642884608" +"twitter","neutral","126867164253798400" +"twitter","neutral","126867147673714688" +"twitter","neutral","126867119592837121" +"twitter","neutral","126867067776405506" +"twitter","neutral","126867012336095232" +"twitter","neutral","126866981377941504" +"twitter","neutral","126866968912478208" +"twitter","neutral","126866968753086464" +"twitter","neutral","126866963887689728" +"twitter","neutral","126866861521502208" +"twitter","neutral","126866854240202753" +"twitter","neutral","126866804575440897" +"twitter","neutral","126866628859281408" +"twitter","neutral","126866575700664320" +"twitter","neutral","126866562111123456" +"twitter","neutral","126866557325426688" +"twitter","neutral","126866520486846465" +"twitter","neutral","126866446323171328" +"twitter","neutral","126866438517567488" +"twitter","neutral","126866422012981248" +"twitter","neutral","126866421719384065" +"twitter","neutral","126866413053939712" +"twitter","neutral","126866411275554816" +"twitter","neutral","126866376899035136" +"twitter","neutral","126866353561927680" +"twitter","neutral","126866303138013184" +"twitter","neutral","126866277213016064" +"twitter","neutral","126866234712145920" +"twitter","neutral","126866190193790976" +"twitter","neutral","126866187790450688" +"twitter","neutral","126866185764601856" +"twitter","neutral","126866177778655233" +"twitter","neutral","126866083708801024" +"twitter","neutral","126866034048241664" +"twitter","neutral","126865968021516288" +"twitter","neutral","126865954645884928" +"twitter","neutral","126865903521505280" +"twitter","neutral","126865888724004864" +"twitter","neutral","126865881069391872" +"twitter","neutral","126865879848853505" +"twitter","neutral","126865837800951808" +"twitter","neutral","126865802434580480" +"twitter","neutral","126865779349127170" +"twitter","neutral","126865691923062784" +"twitter","neutral","126865511878361090" +"twitter","neutral","126865453221027843" +"twitter","neutral","126865436590604288" +"twitter","neutral","126865419830177794" +"twitter","neutral","126865247465254912" +"twitter","neutral","126865215915687936" +"twitter","neutral","126865145812107264" +"twitter","neutral","126865121938116608" +"twitter","neutral","126865091026100224" +"twitter","neutral","126865038479867904" +"twitter","neutral","126865038085591041" +"twitter","neutral","126865005009309696" +"twitter","neutral","126864987078660097" +"twitter","neutral","126864979432456193" +"twitter","neutral","126864974097293312" +"twitter","neutral","126864886402777088" +"twitter","neutral","126864870032408576" +"twitter","neutral","126864861576704000" +"twitter","neutral","126864793373122560" +"twitter","neutral","126864745587412992" +"twitter","neutral","126864657817407490" +"twitter","neutral","126864641174417408" +"twitter","neutral","126864610958647296" +"twitter","neutral","126864491890749440" +"twitter","neutral","126864475247742977" +"twitter","neutral","126864423037046784" +"twitter","neutral","126864404196241408" +"twitter","neutral","126864301293182977" +"twitter","neutral","126864271501041664" +"twitter","neutral","126864244716208129" +"twitter","neutral","126864237200023553" +"twitter","neutral","126864231718076417" +"twitter","neutral","126864167343894529" +"twitter","neutral","126864141561507840" +"twitter","neutral","126864136226357248" +"twitter","neutral","126864131289661441" +"twitter","neutral","126864105347878912" +"twitter","neutral","126864096388849665" +"twitter","neutral","126864070249947136" +"twitter","neutral","126864043154751489" +"twitter","neutral","126864007784185856" +"twitter","neutral","126863975223795712" +"twitter","neutral","126863972778508289" +"twitter","neutral","126863957767110656" +"twitter","neutral","126863949584023552" +"twitter","neutral","126863942634057728" +"twitter","neutral","126863921465393152" +"twitter","neutral","126863918646820864" +"twitter","neutral","126863876066254848" +"twitter","neutral","126863870689165312" +"twitter","neutral","126863814619709441" +"twitter","neutral","126863776476708864" +"twitter","neutral","126863772877996034" +"twitter","neutral","126863571912114177" +"twitter","neutral","126863525481156608" +"twitter","neutral","126863470397366272" +"twitter","neutral","126863457642483712" +"twitter","neutral","126863410750160896" +"twitter","neutral","126863409680625664" +"twitter","neutral","126863392764989440" +"twitter","neutral","126863275450310656" +"twitter","neutral","126863240041996289" +"twitter","neutral","126863216046374912" +"twitter","neutral","126863212762247168" +"twitter","neutral","126863206193963008" +"twitter","neutral","126863190691811328" +"twitter","neutral","126863104280760320" +"twitter","neutral","126863072269840384" +"twitter","neutral","126863060794224640" +"twitter","neutral","126862999720951808" +"twitter","neutral","126862947346694144" +"twitter","neutral","126862946310692864" +"twitter","neutral","126862939159412740" +"twitter","neutral","126862902325022720" +"twitter","neutral","126862899804246016" +"twitter","neutral","126862897639993344" +"twitter","neutral","126862892128677888" +"twitter","neutral","126862842052874240" +"twitter","neutral","126862821635002368" +"twitter","neutral","126862767520096257" +"twitter","neutral","126862734863241217" +"twitter","neutral","126862728181714944" +"twitter","neutral","126862726311059457" +"twitter","neutral","126862714957078528" +"twitter","neutral","126862636922044417" +"twitter","neutral","126862618836221954" +"twitter","neutral","126862573697114112" +"twitter","neutral","126862560870940673" +"twitter","neutral","126862552025137152" +"twitter","neutral","126862547214286848" +"twitter","neutral","126862517216612354" +"twitter","neutral","126862494437351425" +"twitter","neutral","126862407309082625" +"twitter","neutral","126862391924375552" +"twitter","neutral","126862244427468800" +"twitter","neutral","126862244075151361" +"twitter","neutral","126862174277738496" +"twitter","neutral","126862170502860800" +"twitter","neutral","126862130136879104" +"twitter","neutral","126862106443255810" +"twitter","neutral","126862039225352192" +"twitter","neutral","126862030979334144" +"twitter","neutral","126862000981671938" +"twitter","neutral","126861997127110657" +"twitter","neutral","126861952369700865" +"twitter","neutral","126861941372239872" +"twitter","neutral","126861916286103552" +"twitter","neutral","126861895436206081" +"twitter","neutral","126861880194109440" +"twitter","neutral","126861879598530561" +"twitter","neutral","126861823797493760" +"twitter","neutral","126861820169437184" +"twitter","neutral","126861776133431296" +"twitter","neutral","126861734144245760" +"twitter","neutral","126861727374643200" +"twitter","neutral","126861715752222720" +"twitter","neutral","126861701168631808" +"twitter","neutral","126861637931118592" +"twitter","neutral","126861630695940096" +"twitter","neutral","126861453209767936" +"twitter","neutral","126861421408567296" +"twitter","neutral","126861418078277632" +"twitter","neutral","126861321995173890" +"twitter","neutral","126861294312759296" +"twitter","neutral","126861285307584514" +"twitter","neutral","126861195058757634" +"twitter","neutral","126861181108498432" +"twitter","neutral","126861149726715904" +"twitter","neutral","126861036803473408" +"twitter","neutral","126861011813810176" +"twitter","neutral","126860964992794624" +"twitter","neutral","126860955605934081" +"twitter","neutral","126860933988483073" +"twitter","neutral","126860932881186817" +"twitter","neutral","126860898567589888" +"twitter","neutral","126860835560755200" +"twitter","neutral","126860802392195072" +"twitter","neutral","126860800978722816" +"twitter","neutral","126860744913469440" +"twitter","neutral","126860714118885376" +"twitter","neutral","126860700915208193" +"twitter","neutral","126860691255721984" +"twitter","neutral","126860597013917697" +"twitter","neutral","126860563740495872" +"twitter","neutral","126860548359995393" +"twitter","neutral","126860527497515008" +"twitter","neutral","126860504240107520" +"twitter","neutral","126860495079735296" +"twitter","neutral","126860492261167104" +"twitter","neutral","126860390356357121" +"twitter","neutral","126860373117775872" +"twitter","neutral","126860341941518336" +"twitter","neutral","126860267178049536" +"twitter","neutral","126860173053669376" +"twitter","neutral","126860038525562880" +"twitter","neutral","126859978941276161" +"twitter","neutral","126859887404777472" +"twitter","neutral","126859856782163968" +"twitter","neutral","126859833088552960" +"twitter","neutral","126859789883015168" +"twitter","neutral","126859782601703424" +"twitter","neutral","126859745154957312" +"twitter","neutral","126859710740701185" +"twitter","neutral","126859623671152640" +"twitter","neutral","126859623532732417" +"twitter","neutral","126859604985511937" +"twitter","neutral","126859530305929216" +"twitter","neutral","126859485322035200" +"twitter","neutral","126859432511537152" +"twitter","neutral","126859340622725120" +"twitter","neutral","126859326294999041" +"twitter","neutral","126859246213136384" +"twitter","neutral","126859124016300032" +"twitter","neutral","126859115657043968" +"twitter","neutral","126859053757501440" +"twitter","neutral","126859044756520960" +"twitter","neutral","126858999512580096" +"twitter","neutral","126858961159864320" +"twitter","neutral","126858958894931968" +"twitter","neutral","126858953673027584" +"twitter","neutral","126858718762639360" +"twitter","neutral","126858698520932352" +"twitter","neutral","126858281867149312" +"twitter","neutral","126858276339056640" +"twitter","neutral","126858233032871937" +"twitter","neutral","126858194046816256" +"twitter","neutral","126858149859831808" +"twitter","neutral","126857916631355393" +"twitter","neutral","126857800411398144" +"twitter","neutral","126857676134166528" +"twitter","neutral","126857481006751744" +"twitter","neutral","126857475034071040" +"twitter","neutral","126857044677505024" +"twitter","neutral","126856873738633216" +"twitter","neutral","126856857527648256" +"twitter","neutral","126856848778342402" +"twitter","neutral","126856541453291520" +"twitter","neutral","126856421907243009" +"twitter","neutral","126856387211960320" +"twitter","neutral","126856150980374528" +"twitter","neutral","126856031367204865" +"twitter","neutral","126855856414404608" +"twitter","neutral","126855838047547392" +"twitter","neutral","126855191571070976" +"twitter","neutral","126854358817181696" +"twitter","neutral","126853913591808002" +"twitter","neutral","126853667738497025" +"twitter","neutral","126853298996252674" +"twitter","irrelevant","126883777938067457" +"twitter","irrelevant","126883741481177088" +"twitter","irrelevant","126883583691472896" +"twitter","irrelevant","126883512073719808" +"twitter","irrelevant","126883431308197888" +"twitter","irrelevant","126883226760384512" +"twitter","irrelevant","126883122519343105" +"twitter","irrelevant","126883074888826880" +"twitter","irrelevant","126882987244662784" +"twitter","irrelevant","126882954629742592" +"twitter","irrelevant","126882787029553153" +"twitter","irrelevant","126882743819833345" +"twitter","irrelevant","126882703000879105" +"twitter","irrelevant","126882629365661696" +"twitter","irrelevant","126882613569912832" +"twitter","irrelevant","126882562202271744" +"twitter","irrelevant","126882507621797889" +"twitter","irrelevant","126882498536939521" +"twitter","irrelevant","126882470703529985" +"twitter","irrelevant","126882436930994176" +"twitter","irrelevant","126882291757752320" +"twitter","irrelevant","126882264360558592" +"twitter","irrelevant","126882080050262016" +"twitter","irrelevant","126881828337483776" +"twitter","irrelevant","126881827339243521" +"twitter","irrelevant","126881659516755969" +"twitter","irrelevant","126881629145808896" +"twitter","irrelevant","126881619335327744" +"twitter","irrelevant","126881591627759616" +"twitter","irrelevant","126881518151929856" +"twitter","irrelevant","126881495481724931" +"twitter","irrelevant","126881485264400385" +"twitter","irrelevant","126881462334144513" +"twitter","irrelevant","126881427550773248" +"twitter","irrelevant","126881398316466178" +"twitter","irrelevant","126881392956153856" +"twitter","irrelevant","126881358848065536" +"twitter","irrelevant","126881232997974016" +"twitter","irrelevant","126881227729928193" +"twitter","irrelevant","126881216468226048" +"twitter","irrelevant","126881169169063937" +"twitter","irrelevant","126881114936717313" +"twitter","irrelevant","126881095378665473" +"twitter","irrelevant","126881035748261888" +"twitter","irrelevant","126881008002940928" +"twitter","irrelevant","126880978324037632" +"twitter","irrelevant","126880976096858112" +"twitter","irrelevant","126880901903826945" +"twitter","irrelevant","126880815928975360" +"twitter","irrelevant","126880805610987520" +"twitter","irrelevant","126880734152634368" +"twitter","irrelevant","126880709427208192" +"twitter","irrelevant","126880705996259328" +"twitter","irrelevant","126880699587371008" +"twitter","irrelevant","126880644105121792" +"twitter","irrelevant","126880580485910529" +"twitter","irrelevant","126880556775522304" +"twitter","irrelevant","126880519391686656" +"twitter","irrelevant","126880484797063168" +"twitter","irrelevant","126880477943570433" +"twitter","irrelevant","126880468833550336" +"twitter","irrelevant","126880436906491904" +"twitter","irrelevant","126880353817337856" +"twitter","irrelevant","126880217124974594" +"twitter","irrelevant","126880194588975104" +"twitter","irrelevant","126880098401001473" +"twitter","irrelevant","126880095334973440" +"twitter","irrelevant","126880015357984768" +"twitter","irrelevant","126879958529343488" +"twitter","irrelevant","126879710054580224" +"twitter","irrelevant","126879705046597632" +"twitter","irrelevant","126879692635635712" +"twitter","irrelevant","126879567385337856" +"twitter","irrelevant","126879538415271936" +"twitter","irrelevant","126879417220874240" +"twitter","irrelevant","126879341559824384" +"twitter","irrelevant","126879308663894016" +"twitter","irrelevant","126879295195987968" +"twitter","irrelevant","126879277722505216" +"twitter","irrelevant","126879219484606464" +"twitter","irrelevant","126879210177441792" +"twitter","irrelevant","126879164258201601" +"twitter","irrelevant","126879138605834240" +"twitter","irrelevant","126879046071103488" +"twitter","irrelevant","126878948431900672" +"twitter","irrelevant","126878924411125760" +"twitter","irrelevant","126878914625802241" +"twitter","irrelevant","126878849656037377" +"twitter","irrelevant","126878819826139136" +"twitter","irrelevant","126878801970995200" +"twitter","irrelevant","126878766675918848" +"twitter","irrelevant","126878720098177024" +"twitter","irrelevant","126878709192990720" +"twitter","irrelevant","126878580494962688" +"twitter","irrelevant","126878489935753216" +"twitter","irrelevant","126878477424148480" +"twitter","irrelevant","126878461464821760" +"twitter","irrelevant","126878337342771201" +"twitter","irrelevant","126878307617746944" +"twitter","irrelevant","126878294867058688" +"twitter","irrelevant","126878250541645824" +"twitter","irrelevant","126878097030123520" +"twitter","irrelevant","126878057662398464" +"twitter","irrelevant","126878055091277824" +"twitter","irrelevant","126877892855611392" +"twitter","irrelevant","126877830150762496" +"twitter","irrelevant","126877791911292928" +"twitter","irrelevant","126877775830327296" +"twitter","irrelevant","126877738702344192" +"twitter","irrelevant","126877737410502659" +"twitter","irrelevant","126877693563244544" +"twitter","irrelevant","126877625867190273" +"twitter","irrelevant","126877589955543040" +"twitter","irrelevant","126877547899269120" +"twitter","irrelevant","126877547878289408" +"twitter","irrelevant","126877547710521344" +"twitter","irrelevant","126877547576311808" +"twitter","irrelevant","126877547244945408" +"twitter","irrelevant","126877540928331777" +"twitter","irrelevant","126877498981089280" +"twitter","irrelevant","126877457675595776" +"twitter","irrelevant","126877421919141889" +"twitter","irrelevant","126877358740344832" +"twitter","irrelevant","126877288418639872" +"twitter","irrelevant","126877230587576320" +"twitter","irrelevant","126877113348403201" +"twitter","irrelevant","126876910788673536" +"twitter","irrelevant","126876805134159872" +"twitter","irrelevant","126876741913415681" +"twitter","irrelevant","126876728206438400" +"twitter","irrelevant","126876695989985280" +"twitter","irrelevant","126876651551338496" +"twitter","irrelevant","126876632874106881" +"twitter","irrelevant","126876630491729920" +"twitter","irrelevant","126876586170523648" +"twitter","irrelevant","126876547025084416" +"twitter","irrelevant","126876538653245440" +"twitter","irrelevant","126876463965278208" +"twitter","irrelevant","126876452326080512" +"twitter","irrelevant","126876390388793344" +"twitter","irrelevant","126876299556962305" +"twitter","irrelevant","126876220460761088" +"twitter","irrelevant","126876206003003392" +"twitter","irrelevant","126876188294643712" +"twitter","irrelevant","126876082593992704" +"twitter","irrelevant","126876080538787841" +"twitter","irrelevant","126876073337159680" +"twitter","irrelevant","126876037689774080" +"twitter","irrelevant","126875903266529281" +"twitter","irrelevant","126875893481209858" +"twitter","irrelevant","126875748484124672" +"twitter","irrelevant","126875741869719552" +"twitter","irrelevant","126875719912538112" +"twitter","irrelevant","126875637720940544" +"twitter","irrelevant","126875605395456001" +"twitter","irrelevant","126875567168561152" +"twitter","irrelevant","126875553314783232" +"twitter","irrelevant","126875508662210560" +"twitter","irrelevant","126875401887821825" +"twitter","irrelevant","126875254437056512" +"twitter","irrelevant","126875239438229504" +"twitter","irrelevant","126875209897750528" +"twitter","irrelevant","126875201072922624" +"twitter","irrelevant","126875171008163840" +"twitter","irrelevant","126875123457331200" +"twitter","irrelevant","126875039621578752" +"twitter","irrelevant","126875034135433216" +"twitter","irrelevant","126875031983759360" +"twitter","irrelevant","126875028359888896" +"twitter","irrelevant","126875011221946368" +"twitter","irrelevant","126874994180497408" +"twitter","irrelevant","126874967433428992" +"twitter","irrelevant","126874894943260674" +"twitter","irrelevant","126874749321216000" +"twitter","irrelevant","126874719772356608" +"twitter","irrelevant","126874707780841472" +"twitter","irrelevant","126874706119888896" +"twitter","irrelevant","126874654055989248" +"twitter","irrelevant","126874645587701760" +"twitter","irrelevant","126874549550714880" +"twitter","irrelevant","126874482798370816" +"twitter","irrelevant","126874479291924480" +"twitter","irrelevant","126874451500474368" +"twitter","irrelevant","126874366775525377" +"twitter","irrelevant","126874348110888960" +"twitter","irrelevant","126874313423994880" +"twitter","irrelevant","126874273280303104" +"twitter","irrelevant","126874268779810817" +"twitter","irrelevant","126874226450903041" +"twitter","irrelevant","126874165105008640" +"twitter","irrelevant","126874164039659520" +"twitter","irrelevant","126874157840474113" +"twitter","irrelevant","126874145408561152" +"twitter","irrelevant","126874136017518593" +"twitter","irrelevant","126874079239217153" +"twitter","irrelevant","126874040261545985" +"twitter","irrelevant","126874002084990976" +"twitter","irrelevant","126873944501399552" +"twitter","irrelevant","126873912515624960" +"twitter","irrelevant","126873902499635200" +"twitter","irrelevant","126873886938763264" +"twitter","irrelevant","126873874909507584" +"twitter","irrelevant","126873816319262721" +"twitter","irrelevant","126873785512116225" +"twitter","irrelevant","126873756437200896" +"twitter","irrelevant","126873660442148865" +"twitter","irrelevant","126873649725718528" +"twitter","irrelevant","126873596080558082" +"twitter","irrelevant","126873574895140864" +"twitter","irrelevant","126873447912587264" +"twitter","irrelevant","126873417126383616" +"twitter","irrelevant","126873407487881217" +"twitter","irrelevant","126873260385239040" +"twitter","irrelevant","126873138079346688" +"twitter","irrelevant","126873037982281729" +"twitter","irrelevant","126873004494954496" +"twitter","irrelevant","126872948022849536" +"twitter","irrelevant","126872939638439936" +"twitter","irrelevant","126872936568201216" +"twitter","irrelevant","126872919480610816" +"twitter","irrelevant","126872906738319360" +"twitter","irrelevant","126872886232363008" +"twitter","irrelevant","126872881417293824" +"twitter","irrelevant","126872771929182209" +"twitter","irrelevant","126872763221819392" +"twitter","irrelevant","126872615380987905" +"twitter","irrelevant","126872483394621440" +"twitter","irrelevant","126872365211725824" +"twitter","irrelevant","126872362007277568" +"twitter","irrelevant","126872361462005760" +"twitter","irrelevant","126872326095638528" +"twitter","irrelevant","126872241693667328" +"twitter","irrelevant","126872218025213952" +"twitter","irrelevant","126872199620591617" +"twitter","irrelevant","126872143593095168" +"twitter","irrelevant","126872127986073600" +"twitter","irrelevant","126872084679892993" +"twitter","irrelevant","126872039138131968" +"twitter","irrelevant","126872023552102400" +"twitter","irrelevant","126871950185345024" +"twitter","irrelevant","126871942799175682" +"twitter","irrelevant","126871914579898369" +"twitter","irrelevant","126871909177626626" +"twitter","irrelevant","126871907302785024" +"twitter","irrelevant","126871890890461184" +"twitter","irrelevant","126871890320035841" +"twitter","irrelevant","126871852185436160" +"twitter","irrelevant","126871831583002625" +"twitter","irrelevant","126871719557341184" +"twitter","irrelevant","126871696887132160" +"twitter","irrelevant","126871658991599616" +"twitter","irrelevant","126871512195145729" +"twitter","irrelevant","126871511305961473" +"twitter","irrelevant","126871498832097280" +"twitter","irrelevant","126871426107047936" +"twitter","irrelevant","126871400500834304" +"twitter","irrelevant","126871372109594624" +"twitter","irrelevant","126871277813239808" +"twitter","irrelevant","126871263250620416" +"twitter","irrelevant","126871213732671488" +"twitter","irrelevant","126871199362990081" +"twitter","irrelevant","126871111416819712" +"twitter","irrelevant","126871089929392128" +"twitter","irrelevant","126871084950749184" +"twitter","irrelevant","126870987559026688" +"twitter","irrelevant","126870943330091008" +"twitter","irrelevant","126870919590330369" +"twitter","irrelevant","126870916729810944" +"twitter","irrelevant","126870873176150017" +"twitter","irrelevant","126870813839335424" +"twitter","irrelevant","126870802837680128" +"twitter","irrelevant","126870727617024000" +"twitter","irrelevant","126870682767343617" +"twitter","irrelevant","126870669492359169" +"twitter","irrelevant","126870663758757888" +"twitter","irrelevant","126870566916456448" +"twitter","irrelevant","126870498452832256" +"twitter","irrelevant","126870493402898434" +"twitter","irrelevant","126870445659131904" +"twitter","irrelevant","126870402910797826" +"twitter","irrelevant","126870391774908416" +"twitter","irrelevant","126870381867966465" +"twitter","irrelevant","126870318764662784" +"twitter","irrelevant","126870301471551489" +"twitter","irrelevant","126870289861718016" +"twitter","irrelevant","126870079798378497" +"twitter","irrelevant","126870058050912256" +"twitter","irrelevant","126870004766478336" +"twitter","irrelevant","126869995610324993" +"twitter","irrelevant","126869990535217152" +"twitter","irrelevant","126869983769788417" +"twitter","irrelevant","126869957085634560" +"twitter","irrelevant","126869921996091392" +"twitter","irrelevant","126869901922152448" +"twitter","irrelevant","126869798419300352" +"twitter","irrelevant","126869762763522049" +"twitter","irrelevant","126869705620332544" +"twitter","irrelevant","126869683449233408" +"twitter","irrelevant","126869567061495808" +"twitter","irrelevant","126869527416938496" +"twitter","irrelevant","126869515983257600" +"twitter","irrelevant","126869499046670336" +"twitter","irrelevant","126869440276078592" +"twitter","irrelevant","126869422857142272" +"twitter","irrelevant","126869344851472384" +"twitter","irrelevant","126869327222804480" +"twitter","irrelevant","126869308029669376" +"twitter","irrelevant","126869231693348865" +"twitter","irrelevant","126869213980786688" +"twitter","irrelevant","126869163250679810" +"twitter","irrelevant","126869159341604866" +"twitter","irrelevant","126869146045644800" +"twitter","irrelevant","126869110901575680" +"twitter","irrelevant","126869094610907136" +"twitter","irrelevant","126869070292320256" +"twitter","irrelevant","126869031180451840" +"twitter","irrelevant","126869021969756161" +"twitter","irrelevant","126868980664250369" +"twitter","irrelevant","126868924590600192" +"twitter","irrelevant","126868846517817344" +"twitter","irrelevant","126868773562089472" +"twitter","irrelevant","126868680003960833" +"twitter","irrelevant","126868673213378560" +"twitter","irrelevant","126868623645089792" +"twitter","irrelevant","126868612408553472" +"twitter","irrelevant","126868611922010112" +"twitter","irrelevant","126868601721462784" +"twitter","irrelevant","126868586882007041" +"twitter","irrelevant","126868539784183808" +"twitter","irrelevant","126868539641577472" +"twitter","irrelevant","126868472515928064" +"twitter","irrelevant","126868462336348162" +"twitter","irrelevant","126868364927827968" +"twitter","irrelevant","126868357625548800" +"twitter","irrelevant","126868352835661824" +"twitter","irrelevant","126868349396324352" +"twitter","irrelevant","126868328659693568" +"twitter","irrelevant","126868322431143937" +"twitter","irrelevant","126868256479920128" +"twitter","irrelevant","126868229409865728" +"twitter","irrelevant","126868208924884993" +"twitter","irrelevant","126868204881580032" +"twitter","irrelevant","126868198032281600" +"twitter","irrelevant","126868196295843840" +"twitter","irrelevant","126868167921373185" +"twitter","irrelevant","126868148879241216" +"twitter","irrelevant","126868137827250176" +"twitter","irrelevant","126868136531197952" +"twitter","irrelevant","126867955844788225" +"twitter","irrelevant","126867858461433857" +"twitter","irrelevant","126867818242248704" +"twitter","irrelevant","126867784494891008" +"twitter","irrelevant","126867765343686656" +"twitter","irrelevant","126867764282540032" +"twitter","irrelevant","126867734490382336" +"twitter","irrelevant","126867715947368449" +"twitter","irrelevant","126867611546943490" +"twitter","irrelevant","126867510288072705" +"twitter","irrelevant","126867253609234433" +"twitter","irrelevant","126867181358170112" +"twitter","irrelevant","126867170742374400" +"twitter","irrelevant","126867149775044608" +"twitter","irrelevant","126867019646771201" +"twitter","irrelevant","126867015620239360" +"twitter","irrelevant","126867000030007296" +"twitter","irrelevant","126866948909842432" +"twitter","irrelevant","126866905096130560" +"twitter","irrelevant","126866882979573760" +"twitter","irrelevant","126866835449708544" +"twitter","irrelevant","126866833537110016" +"twitter","irrelevant","126866827715420160" +"twitter","irrelevant","126866789962498048" +"twitter","irrelevant","126866773705375744" +"twitter","irrelevant","126866759792852992" +"twitter","irrelevant","126866701877903361" +"twitter","irrelevant","126866689236283392" +"twitter","irrelevant","126866515122331648" +"twitter","irrelevant","126866508818296832" +"twitter","irrelevant","126866474806673408" +"twitter","irrelevant","126866454845984768" +"twitter","irrelevant","126866426521849857" +"twitter","irrelevant","126866385757417472" +"twitter","irrelevant","126866312130609152" +"twitter","irrelevant","126866267998134272" +"twitter","irrelevant","126866222355722240" +"twitter","irrelevant","126866176889454592" +"twitter","irrelevant","126866076016447489" +"twitter","irrelevant","126866058337460224" +"twitter","irrelevant","126866027316383744" +"twitter","irrelevant","126866021402427392" +"twitter","irrelevant","126865987365634048" +"twitter","irrelevant","126865977970401280" +"twitter","irrelevant","126865966624813057" +"twitter","irrelevant","126865936115445761" +"twitter","irrelevant","126865932806139904" +"twitter","irrelevant","126865890665967618" +"twitter","irrelevant","126865868599726080" +"twitter","irrelevant","126865864086667264" +"twitter","irrelevant","126865777423941633" +"twitter","irrelevant","126865704380153856" +"twitter","irrelevant","126865703969099776" +"twitter","irrelevant","126865661602447362" +"twitter","irrelevant","126865633869709312" +"twitter","irrelevant","126865576093171712" +"twitter","irrelevant","126865573228445696" +"twitter","irrelevant","126865566236553216" +"twitter","irrelevant","126865487584968704" +"twitter","irrelevant","126865416671862785" +"twitter","irrelevant","126865410598502400" +"twitter","irrelevant","126865389861871617" +"twitter","irrelevant","126865304277090304" +"twitter","irrelevant","126865296463118337" +"twitter","irrelevant","126865215492071424" +"twitter","irrelevant","126865127977918464" +"twitter","irrelevant","126865092565417984" +"twitter","irrelevant","126865045949919233" +"twitter","irrelevant","126865038676987904" +"twitter","irrelevant","126865014215802880" +"twitter","irrelevant","126864993760182272" +"twitter","irrelevant","126864955285831680" +"twitter","irrelevant","126864954140803072" +"twitter","irrelevant","126864917566455809" +"twitter","irrelevant","126864908582260736" +"twitter","irrelevant","126864848649846784" +"twitter","irrelevant","126864847399944192" +"twitter","irrelevant","126864712766980096" +"twitter","irrelevant","126864692470751232" +"twitter","irrelevant","126864673416032256" +"twitter","irrelevant","126864623252144128" +"twitter","irrelevant","126864485943214080" +"twitter","irrelevant","126864460957749248" +"twitter","irrelevant","126864441999507456" +"twitter","irrelevant","126864344125419520" +"twitter","irrelevant","126864341071962112" +"twitter","irrelevant","126864316120047617" +"twitter","irrelevant","126864259027181568" +"twitter","irrelevant","126864201804300289" +"twitter","irrelevant","126864188969725953" +"twitter","irrelevant","126864074045784064" +"twitter","irrelevant","126864072447766528" +"twitter","irrelevant","126864016613183489" +"twitter","irrelevant","126863938339094531" +"twitter","irrelevant","126863859075121152" +"twitter","irrelevant","126863845326209024" +"twitter","irrelevant","126863808177254400" +"twitter","irrelevant","126863734265217025" +"twitter","irrelevant","126863732931432448" +"twitter","irrelevant","126863723179675648" +"twitter","irrelevant","126863722412118017" +"twitter","irrelevant","126863708889690112" +"twitter","irrelevant","126863696726200320" +"twitter","irrelevant","126863647845789696" +"twitter","irrelevant","126863644968501250" +"twitter","irrelevant","126863594850746369" +"twitter","irrelevant","126863579914829824" +"twitter","irrelevant","126863563133431809" +"twitter","irrelevant","126863551355822080" +"twitter","irrelevant","126863538626109440" +"twitter","irrelevant","126863416211156992" +"twitter","irrelevant","126863368492564481" +"twitter","irrelevant","126863360779227136" +"twitter","irrelevant","126863360686956544" +"twitter","irrelevant","126863321327611904" +"twitter","irrelevant","126863319226265600" +"twitter","irrelevant","126863283658567680" +"twitter","irrelevant","126863189844566016" +"twitter","irrelevant","126863136111333376" +"twitter","irrelevant","126863118398787584" +"twitter","irrelevant","126863108802232320" +"twitter","irrelevant","126863108688986112" +"twitter","irrelevant","126863078112509952" +"twitter","irrelevant","126863012433899520" +"twitter","irrelevant","126862981333139457" +"twitter","irrelevant","126862918921891840" +"twitter","irrelevant","126862904514445312" +"twitter","irrelevant","126862853822099456" +"twitter","irrelevant","126862852681240576" +"twitter","irrelevant","126862832489861121" +"twitter","irrelevant","126862813355442176" +"twitter","irrelevant","126862789649248256" +"twitter","irrelevant","126862725933576192" +"twitter","irrelevant","126862722028666880" +"twitter","irrelevant","126862722007699456" +"twitter","irrelevant","126862721793789953" +"twitter","irrelevant","126862721609248769" +"twitter","irrelevant","126862719927332864" +"twitter","irrelevant","126862668039598080" +"twitter","irrelevant","126862654324211713" +"twitter","irrelevant","126862632648065025" +"twitter","irrelevant","126862595117424641" +"twitter","irrelevant","126862536162295808" +"twitter","irrelevant","126862464003477504" +"twitter","irrelevant","126862447041712128" +"twitter","irrelevant","126862429912186880" +"twitter","irrelevant","126862343148802048" +"twitter","irrelevant","126862329269858305" +"twitter","irrelevant","126862315990679552" +"twitter","irrelevant","126862308302532608" +"twitter","irrelevant","126862269148692481" +"twitter","irrelevant","126862268725080065" +"twitter","irrelevant","126862217013497856" +"twitter","irrelevant","126862201440043008" +"twitter","irrelevant","126862151393619968" +"twitter","irrelevant","126862151343284224" +"twitter","irrelevant","126862150265352193" +"twitter","irrelevant","126862116874489856" +"twitter","irrelevant","126861992001679360" +"twitter","irrelevant","126861988004499457" +"twitter","irrelevant","126861979762696192" +"twitter","irrelevant","126861960137539585" +"twitter","irrelevant","126861955762888706" +"twitter","irrelevant","126861933071704065" +"twitter","irrelevant","126861782378745856" +"twitter","irrelevant","126861771314184192" +"twitter","irrelevant","126861717790658561" +"twitter","irrelevant","126861611741872128" +"twitter","irrelevant","126861593249185793" +"twitter","irrelevant","126861589122002944" +"twitter","irrelevant","126861539511775232" +"twitter","irrelevant","126861496109121536" +"twitter","irrelevant","126861493059854336" +"twitter","irrelevant","126861480128819201" +"twitter","irrelevant","126861380992241664" +"twitter","irrelevant","126861357831303168" +"twitter","irrelevant","126861252134830080" +"twitter","irrelevant","126861245109387264" +"twitter","irrelevant","126861233138827264" +"twitter","irrelevant","126861218592980992" +"twitter","irrelevant","126861187605471232" +"twitter","irrelevant","126861118684676096" +"twitter","irrelevant","126861100120686592" +"twitter","irrelevant","126860970583797760" +"twitter","irrelevant","126860953450065920" +"twitter","irrelevant","126860911557345280" +"twitter","irrelevant","126860890749419521" +"twitter","irrelevant","126860877344407552" +"twitter","irrelevant","126860835892109312" +"twitter","irrelevant","126860830942834689" +"twitter","irrelevant","126860808251637762" +"twitter","irrelevant","126860789536657408" +"twitter","irrelevant","126860754912681985" +"twitter","irrelevant","126860740689797120" +"twitter","irrelevant","126860719860883458" +"twitter","irrelevant","126860653167263744" +"twitter","irrelevant","126860610020446209" +"twitter","irrelevant","126860576600227841" +"twitter","irrelevant","126860372987740160" +"twitter","irrelevant","126860329463447552" +"twitter","irrelevant","126860270181171201" +"twitter","irrelevant","126860146289803265" +"twitter","irrelevant","126860127167987712" +"twitter","irrelevant","126860114610241536" +"twitter","irrelevant","126860109090521088" +"twitter","irrelevant","126859990513369088" +"twitter","irrelevant","126859860640940032" +"twitter","irrelevant","126859857604247552" +"twitter","irrelevant","126859846506127360" +"twitter","irrelevant","126859794383515649" +"twitter","irrelevant","126859777308495872" +"twitter","irrelevant","126859668151738368" +"twitter","irrelevant","126859627051757568" +"twitter","irrelevant","126859610006110208" +"twitter","irrelevant","126859509414100992" +"twitter","irrelevant","126859503495938048" +"twitter","irrelevant","126859490061598720" +"twitter","irrelevant","126859443127332864" +"twitter","irrelevant","126859428455657472" +"twitter","irrelevant","126859363079041024" +"twitter","irrelevant","126859354614939648" +"twitter","irrelevant","126859286558158849" +"twitter","irrelevant","126859257856540672" +"twitter","irrelevant","126859211593351168" +"twitter","irrelevant","126859155175772161" +"twitter","irrelevant","126859077304320001" +"twitter","irrelevant","126858835058098176" +"twitter","irrelevant","126858789868670976" +"twitter","irrelevant","126858639855206400" +"twitter","irrelevant","126858516655898625" +"twitter","irrelevant","126858260216160256" +"twitter","irrelevant","126858248325308416" +"twitter","irrelevant","126858186962644992" +"twitter","irrelevant","126858148878368769" +"twitter","irrelevant","126858034466131969" +"twitter","irrelevant","126858032951996416" +"twitter","irrelevant","126858004690767872" +"twitter","irrelevant","126857921068941314" +"twitter","irrelevant","126857918929838080" +"twitter","irrelevant","126857746200014849" +"twitter","irrelevant","126857736238530560" +"twitter","irrelevant","126857518591901698" +"twitter","irrelevant","126857511230902272" +"twitter","irrelevant","126857421321797634" +"twitter","irrelevant","126857383715676160" +"twitter","irrelevant","126857361515216897" +"twitter","irrelevant","126857211921174528" +"twitter","irrelevant","126857080513638400" +"twitter","irrelevant","126857071667847168" +"twitter","irrelevant","126857049920385024" +"twitter","irrelevant","126856764242137088" +"twitter","irrelevant","126856732331884545" +"twitter","irrelevant","126856603021484032" +"twitter","irrelevant","126856425371746304" +"twitter","irrelevant","126856274531991552" +"twitter","irrelevant","126856135918620673" +"twitter","irrelevant","126856097863708672" +"twitter","irrelevant","126856097431699456" +"twitter","irrelevant","126855687060987904" +"twitter","irrelevant","126855171702661120" +"twitter","irrelevant","126854999442587648" +"twitter","irrelevant","126854818101858304" +"twitter","irrelevant","126854423317188608" \ No newline at end of file diff --git a/full-corpus.csv b/full-corpus.csv new file mode 100644 index 0000000..bf1d1de --- /dev/null +++ b/full-corpus.csv @@ -0,0 +1,5396 @@ +"Topic","Sentiment","TweetId","TweetDate","TweetText" +"apple","positive","126415614616154112","Tue Oct 18 21:53:25 +0000 2011","Now all @Apple has to do is get swype on the iphone and it will be crack. Iphone that is" +"apple","positive","126404574230740992","Tue Oct 18 21:09:33 +0000 2011","@Apple will be adding more carrier support to the iPhone 4S (just announced)" +"apple","positive","126402758403305474","Tue Oct 18 21:02:20 +0000 2011","Hilarious @youtube video - guy does a duet with @apple 's Siri. Pretty much sums up the love affair! http://t.co/8ExbnQjY" +"apple","positive","126397179614068736","Tue Oct 18 20:40:10 +0000 2011","@RIM you made it too easy for me to switch to @Apple iPhone. See ya!" +"apple","positive","126395626979196928","Tue Oct 18 20:34:00 +0000 2011","I just realized that the reason I got into twitter was ios5 thanks @apple" +"apple","positive","126394830791254016","Tue Oct 18 20:30:50 +0000 2011","I'm a current @Blackberry user, little bit disappointed with it! Should I move to @Android or @Apple @iphone" +"apple","positive","126379685453119488","Tue Oct 18 19:30:39 +0000 2011","The 16 strangest things Siri has said so far. I am SOOO glad that @Apple gave Siri a sense of humor! http://t.co/TWAeUDBp via @HappyPlace" +"apple","positive","126377656416612353","Tue Oct 18 19:22:35 +0000 2011","Great up close & personal event @Apple tonight in Regent St store!" +"apple","positive","126373779483004928","Tue Oct 18 19:07:11 +0000 2011","From which companies do you experience the best customer service aside from @zappos and @apple?" +"apple","positive","126366353757179904","Tue Oct 18 18:37:41 +0000 2011","Just apply for a job at @Apple, hope they call me lol" +"apple","positive","126366123368267776","Tue Oct 18 18:36:46 +0000 2011","RT @JamaicanIdler: Lmao I think @apple is onto something magical! I am DYING!!! haha. Siri suggested where to find whores and where to h ..." +"apple","positive","126365858481188864","Tue Oct 18 18:35:43 +0000 2011","Lmao I think @apple is onto something magical! I am DYING!!! haha. Siri suggested where to find whores and where to hide a body lolol" +"apple","positive","126360935509135362","Tue Oct 18 18:16:09 +0000 2011","RT @PhillipRowntree: Just registered as an @apple developer... Here's hoping I can actually do it... Any help, greatly appreciated!" +"apple","positive","126360398885687296","Tue Oct 18 18:14:01 +0000 2011","Wow. Great deals on refurbed #iPad (first gen) models. RT: Apple offers great deals on refurbished 1st-gen iPads http://t.co/ukWOKBGd @Apple" +"apple","positive","126358340220616704","Tue Oct 18 18:05:50 +0000 2011","Just registered as an @apple developer... Here's hoping I can actually do it... Any help, greatly appreciated!" +"apple","positive","126357982685569024","Tue Oct 18 18:04:25 +0000 2011","你好 ! Currently learning Mandarin for my upcoming trip to Hong Kong. I gotta hand it to @Apple iPhones & their uber useful flashcard apps " +"apple","positive","126352268705538048","Tue Oct 18 17:41:43 +0000 2011","Come to the dark side 📱“@gretcheneclark: Hey @apple, if you send me a free iPhone, I will publicly and ceremoniously burn my #BlackBerry.â€" +"apple","positive","126350302113824769","Tue Oct 18 17:33:54 +0000 2011","Hey @apple, if you send me a free iPhone (any version will do), I will publicly and ceremoniously burn my #BlackBerry." +"apple","positive","126349695676203009","Tue Oct 18 17:31:29 +0000 2011","Thank you @apple for Find My Mac - just located and wiped my stolen Air. #smallvictory #thievingbastards" +"apple","positive","126342268603998208","Tue Oct 18 17:01:58 +0000 2011","Thanks to @Apple Covent Garden #GeniusBar for replacing my MacBook keyboard/cracked wristpad during my lunch break today, out of warranty." +"apple","positive","126325800080392193","Tue Oct 18 15:56:32 +0000 2011","@DailyDealChat @apple Thanks!!" +"apple","positive","126324177501302784","Tue Oct 18 15:50:05 +0000 2011","iPads Replace Bound Playbooks on Some N.F.L. Teams http://t.co/2UXAWKwf @apple @nytimes" +"apple","positive","126323785145126912","Tue Oct 18 15:48:32 +0000 2011","@apple..good ipad" +"apple","positive","126322063332999169","Tue Oct 18 15:41:41 +0000 2011","@apple @siri is efffing amazing!!" +"apple","positive","126319186141130752","Tue Oct 18 15:30:15 +0000 2011","Amazing new @Apple iOs 5 feature. http://t.co/jatFVfpM" +"apple","positive","126318009647235072","Tue Oct 18 15:25:35 +0000 2011","RT @TripLingo: We're one of a few ""Featured Education Apps"" on the @Apple **Website** today, sweet! http://t.co/0yWvbe1Z" +"apple","positive","126315223060709376","Tue Oct 18 15:14:30 +0000 2011","We're one of a few ""Featured Education Apps"" on the @Apple **Website** today, sweet! http://t.co/0yWvbe1Z" +"apple","positive","126315011600678913","Tue Oct 18 15:13:40 +0000 2011","When you want something done right, you do it yourself... or go to @Apple. AT&T you're useless these days. #yourdaysarenumbered + +" +"apple","positive","126314687116750849","Tue Oct 18 15:12:22 +0000 2011","We did an unexpected workshop for the #iPhone4S at @apple yesterday and we got an awesome amount of info #notjustaboutthephone @gamerchik16" +"apple","positive","126312877916307458","Tue Oct 18 15:05:11 +0000 2011","<3 #ios5 @apple" +"apple","positive","126311981564178432","Tue Oct 18 15:01:37 +0000 2011","---» RT @Apple No question bro. RT @AintEeenTrippin: Should I get dis iPhone or a EVO 3D?" +"apple","positive","126307801046847488","Tue Oct 18 14:45:01 +0000 2011","RT @imightbewrong: I'm OVER people bitching about the #iPhone4S... I think it's the smartest phone I've ever had and I'm very happy. : ..." +"apple","positive","126302673820594176","Tue Oct 18 14:24:38 +0000 2011","I'm OVER people bitching about the #iPhone4S... I think it's the smartest phone I've ever had and I'm very happy. :) Way to go @Apple!" +"apple","positive","126301956951117826","Tue Oct 18 14:21:47 +0000 2011","@Twitter CEO points to @Apple as 'corporate mentor' as @iOS signups triple http://t.co/GCY8iphN" +"apple","positive","126287654093471745","Tue Oct 18 13:24:57 +0000 2011","At the bus with my iPhone ;) thxx @apple" +"apple","positive","126284506360578049","Tue Oct 18 13:12:27 +0000 2011","@azee1v1 @apple @umber AppStore is well done, so is iTunes on the mobile devices. I was talking about desktop app." +"apple","positive","126263834968211456","Tue Oct 18 11:50:18 +0000 2011","NYTimes: Coach Wants to See You. And Bring Your iPad. http://t.co/J2FTiEnG #iPad @apple set red 42 red 42 hut hut @NFL wish I had an #iPad" +"apple","positive","126256230397259776","Tue Oct 18 11:20:05 +0000 2011","@apple @jilive @DanielPink: Apple sells 4 million iPhone 4S units in first weekend ... Steve Jobs brilliance lives on for ever! #iphone #RVA" +"apple","positive","126213333123743744","Tue Oct 18 08:29:38 +0000 2011","@blackberry is like the #Titanic and it seems everyone is running for the @apple #iPhone life rafts and there won't be enough for everyone!" +"apple","positive","126195522691280896","Tue Oct 18 07:18:51 +0000 2011","@bkad5161 than apologize to @apple ;)" +"apple","positive","126183339945234432","Tue Oct 18 06:30:27 +0000 2011","@Apple downloads of iOS 5 are proving popular with users -- http://t.co/NSHLfiUX" +"apple","positive","126180209501286400","Tue Oct 18 06:18:01 +0000 2011","Lmfao look at the argument I had with Siri !! +@ijustine @apple http://t.co/D4VjL7SI" +"apple","positive","126164430546403328","Tue Oct 18 05:15:18 +0000 2011","Incredible: 4 million iPhone 4Ss in 3 days. 135% better than the iPhone 4 http://t.co/1FMJxTMM @apple #iphone4s" +"apple","positive","126148685737361408","Tue Oct 18 04:12:45 +0000 2011","Save me from #HP's unwanted OS! Help me buy an #iPhone! I have seen the light! #lol http://t.co/8gUP9Acz #backchannel @apple" +"apple","positive","126140794078892033","Tue Oct 18 03:41:23 +0000 2011","Well @apple fixed my #ios5 battery drain problem with a replacement iPhone 4 -- it's working like a champ now" +"apple","positive","126134400466419712","Tue Oct 18 03:15:59 +0000 2011","Currently ordering a BRAND NEW MACBOOK PRO!!! Bahhh... my MacBook is 5 years old. I'll miss it. But it's time. cc: @Apple -" +"apple","positive","126130991365500928","Tue Oct 18 03:02:26 +0000 2011","you are so blessed. @apple" +"apple","positive","126116898051076096","Tue Oct 18 02:06:26 +0000 2011","#Siri now knows who my dad, mom, brother and girlfriend is. Thanks @apple" +"apple","positive","126116614495154176","Tue Oct 18 02:05:18 +0000 2011","Well at least the @apple store has amazing call waiting music! #need4s" +"apple","positive","126112836219973632","Tue Oct 18 01:50:17 +0000 2011","#sweet... #apple replaced my glass #probono. thank you @apple" +"apple","positive","126107965991297024","Tue Oct 18 01:30:56 +0000 2011","Not Bad! @Apple Sells Over 4 Million #IPhones in Debut Weekend - Bloomberg http://t.co/AVSl3ygU - #smartphone #sm RT @VinodRad" +"apple","positive","126104732426186752","Tue Oct 18 01:18:05 +0000 2011","loving new technology from @apple iPhone 4s, mac air and iCloud are unreal #technology" +"apple","positive","126097426493878272","Tue Oct 18 00:49:03 +0000 2011","I'm loving this new IOS5 update :) @apple" +"apple","positive","126095744531832832","Tue Oct 18 00:42:22 +0000 2011","Another mention for Apple Store: http://t.co/fiIOApKt - RT @floridamike Once again getting great customer service from the @apple store ..." +"apple","positive","126084907343691776","Mon Oct 17 23:59:19 +0000 2011","Time to go get my iPhone 4s. Looking forward to sticking it to the man by no longer paying for most texts. Thanks @apple." +"apple","positive","126079414986485761","Mon Oct 17 23:37:29 +0000 2011","hey @apple I hate my computer i need a #mack wanna send me a free one." +"apple","positive","126076743613284354","Mon Oct 17 23:26:52 +0000 2011","Thank you @apple. My new gf(iphone4s) is great! She does everything!" +"apple","positive","126076238375817216","Mon Oct 17 23:24:52 +0000 2011","#iCloud set up was flawless and works like a champ! To the Cloud @Apple" +"apple","positive","126075534894571520","Mon Oct 17 23:22:04 +0000 2011","@Wisconsin_Mommy @Apple I'd totally email the company... I always get great service at our @Apple store!" +"apple","positive","126064519943426048","Mon Oct 17 22:38:18 +0000 2011","@apple loving the new IOS5 upgrade for the iPhone!" +"apple","positive","126063569660936193","Mon Oct 17 22:34:31 +0000 2011","The nice @apple tech support guy fixed my iTouch =D" +"apple","positive","126063358037340161","Mon Oct 17 22:33:41 +0000 2011","Once again getting great customer service from the @apple store at millenia mall." +"apple","positive","126059405941809152","Mon Oct 17 22:17:59 +0000 2011","Is it just me or is #iOS5 faster for the iPad? @apple" +"apple","positive","126059399319003136","Mon Oct 17 22:17:57 +0000 2011","I love our @apple imac even though I haven't seen my hubby in 3 days now! #geek" +"apple","positive","126057030996852737","Mon Oct 17 22:08:32 +0000 2011","making the switch from @Android to @Apple #iphone #iphone4S #smartphone #stevejobs (@ Apple Store) http://t.co/kj6pJvkH" +"apple","positive","126049183865114624","Mon Oct 17 21:37:21 +0000 2011","So THANKFUL for the incredible people @apple for going above and beyond and offering to and replacing my water-damaged Macbook Pro!!! Wow!" +"apple","positive","126040352237961217","Mon Oct 17 21:02:16 +0000 2011","New macbook is too sick @apple" +"apple","positive","126040074595999746","Mon Oct 17 21:01:10 +0000 2011","Play on ma man. Loving the camera in the #iphone4s. Well done @apple #fb http://t.co/tmdFqRe1" +"apple","positive","126034495991328768","Mon Oct 17 20:39:00 +0000 2011","So yeah... @apple #iOS5 #readinglists have changed my life. #nowicanspendevenmoretimeonmyphone." +"apple","positive","126026756623831041","Mon Oct 17 20:08:14 +0000 2011","@Apple Safari Reader owns the worldwide web" +"apple","positive","126019393460244481","Mon Oct 17 19:38:59 +0000 2011","I love @apple service . My case has cracked 3x and I go in and they hand me a case and I walk out" +"apple","positive","126015087386431488","Mon Oct 17 19:21:52 +0000 2011","#10twitterpeopleiwouldliketomeet +@coollike @TheGadgetShow @thelittleappkid @Jon4Lakers @BenRubery @Apple @twitter @FXhomeHitFilm (-2)" +"apple","positive","126009748020658177","Mon Oct 17 19:00:39 +0000 2011","Said to have laid out the next 4 years @apple.Jobs last iPhone is 2012 not the iPhone4S. iPhone(4G/5) 2012 is magical! http://t.co/DxxklUBp" +"apple","positive","126008369562652672","Mon Oct 17 18:55:11 +0000 2011","Kind of excited. On my way to my last class right now and then going to the @Apple store, so buy #MacOSC Snow Leopard and Lion :-)" +"apple","positive","126002597063696384","Mon Oct 17 18:32:14 +0000 2011","i used to be with @blackberry over 4-5yrs .. after all the disruptions and lost gigs thx to their service im moving to @apple #iphone" +"apple","positive","125999676972470272","Mon Oct 17 18:20:38 +0000 2011","Apple sells 4 million iPhones in 3 days @apple keep doing what you are doing, because you are doing it well! http://t.co/ZZc6bE0w" +"apple","positive","125995158679461888","Mon Oct 17 18:02:41 +0000 2011","Yessss! I'm lovin the iPhone update especially the slide down bar at top of screen =) good job @Apple." +"apple","positive","125978568726560768","Mon Oct 17 16:56:46 +0000 2011","4 millions in a weekend, 16 #iPhone4S per second. This is madness?! no, this is @Apple !!!" +"apple","positive","125974505385500672","Mon Oct 17 16:40:37 +0000 2011",".@apple you got me. I'm now invested. MacBook Pro next year. Time to get on selling more of my #android gear" +"apple","positive","125960325437722624","Mon Oct 17 15:44:16 +0000 2011","@iancollinsuk @apple I like what you did there...!" +"apple","positive","125959059957485569","Mon Oct 17 15:39:14 +0000 2011","I just sent my grandma a post card using my #CardsApp thanks @Apple" +"apple","positive","125947912306954240","Mon Oct 17 14:54:56 +0000 2011","@KostaTsetsekas @apple Putting it in the wash is kind of the equivalent to ""Will it blend?"" Glad to hear it's still alive." +"apple","positive","125947232359948288","Mon Oct 17 14:52:14 +0000 2011","Laundering Ari's iPhone not my finest moment. But after drying in bag of (organic :-) rice for 4 days it booted up!!!!!!!!!!! @apple" +"apple","positive","125943290288803841","Mon Oct 17 14:36:34 +0000 2011","Bravo, @Apple! http://t.co/BgoTzj7K" +"apple","positive","125940394566483968","Mon Oct 17 14:25:04 +0000 2011","God Bless @YouTube, @apple for #appletv & our bad ass system. LOVING #PrincessOfChina. GB to @coldplay & @rihanna too :)" +"apple","positive","125925618486489088","Mon Oct 17 13:26:21 +0000 2011","Been off twitter for a few days as I smashed my iPhone but @apple were very nice and gave me a new one :)" +"apple","positive","125924446430183425","Mon Oct 17 13:21:42 +0000 2011","Thank you @Apple iOS 5 for email pop up on the lock screen and opening it when unlocking." +"apple","positive","125922999651139584","Mon Oct 17 13:15:57 +0000 2011","One word - #wow. RT @jldavid iPhone 4S First Weekend Sales Top Four Million: http://t.co/Zx5Pw0GT (via @apple)" +"apple","positive","125909565031198720","Mon Oct 17 12:22:34 +0000 2011","This good here iPhone will do me VERY well today. Thanks to the gods that are @apple." +"apple","positive","125902301931126785","Mon Oct 17 11:53:42 +0000 2011","RT @MN2NOVA: Love ios5 Easter eggs. Pull down from middle top to bottom and see what pulls down. Awesome little feature! #ios5 @apple" +"apple","positive","125901202591461376","Mon Oct 17 11:49:20 +0000 2011","Love ios5 Easter eggs. Pull down from middle top to bottom and see what pulls down. Awesome little feature! #ios5 @apple" +"apple","positive","125900497327636480","Mon Oct 17 11:46:32 +0000 2011","Love #ios5 Easter eggs. Pull down from middle top to bottom and see what pulls down. Awesome little feature! @apple #lovemyiphone" +"apple","positive","125898611572740097","Mon Oct 17 11:39:02 +0000 2011","Updated my iOS and started using cloud services. Pretty bad ass @apple my #iPhone 3GS still the champ." +"apple","positive","125850288488841217","Mon Oct 17 08:27:01 +0000 2011","Gone for a run, beautiful morning , man do I love iOS 5 @apple, #iPhone" +"apple","positive","125840039031738368","Mon Oct 17 07:46:17 +0000 2011","@apple your simply the best." +"apple","positive","125794931439702016","Mon Oct 17 04:47:03 +0000 2011","I must admit @apple has made me a very happy camper! I have text tones now! Lol! Ring tone: #MakeMeProud Drakes vers! Text tone: Nicki's" +"apple","positive","125728717942161408","Mon Oct 17 00:23:56 +0000 2011","Day305, I'm thankful for the great customer service received today from @Apple via phone CS, new phone on the way #365daysofgratefulness" +"apple","positive","125727629012770816","Mon Oct 17 00:19:37 +0000 2011","S/O to @apple for replacing my phone for free" +"apple","positive","125722746100531200","Mon Oct 17 00:00:13 +0000 2011","Loving the new iPod update @apple" +"apple","positive","125717622728818688","Sun Oct 16 23:39:51 +0000 2011","@alexlindsay My wife upgraded her iPhone 4. I think Siri alone is worth the upgrade. Looking forward to @Apple continuing to enhance Siri." +"apple","positive","125714253452812288","Sun Oct 16 23:26:28 +0000 2011","RT @tomkeene Thx @instagram Thx @apple #hypo #D-76 #tri-x http://t.co/BPPJwncp" +"apple","positive","125713935344214016","Sun Oct 16 23:25:12 +0000 2011","@SteveJobs being honored tonite @Apple...A truly great loss to the world.He will so be missed" +"apple","positive","125712433087123456","Sun Oct 16 23:19:14 +0000 2011","Thx @instagram Thx @apple #hypo #D-76 #tri-x http://t.co/D7EeJHBT" +"apple","positive","125708639607599104","Sun Oct 16 23:04:09 +0000 2011","Loving my new #iPhone4S thanks @apple for #ios5" +"apple","positive","125706813583798274","Sun Oct 16 22:56:54 +0000 2011","i love this. so much. thank you @apple. http://t.co/Ui8lOEzX" +"apple","positive","125701161926930433","Sun Oct 16 22:34:27 +0000 2011","@apple the iPhone 4s is great #genius" +"apple","positive","125699573799845888","Sun Oct 16 22:28:08 +0000 2011","@apple Cards app notifies me the card I sent has arrived at local post office and should be delivered today... Sunday. Truly is #magic." +"apple","positive","125688922410975232","Sun Oct 16 21:45:48 +0000 2011","Love my new I0S5 @Apple updates. Just when I think it can't get any better somehow it simplifies my life more. That's right-it's an Apple." +"apple","positive","125681742760771584","Sun Oct 16 21:17:17 +0000 2011","@apple Siri is amazing" +"apple","positive","125680049478316032","Sun Oct 16 21:10:33 +0000 2011","@rygurl you need an @apple iphone4S with Siri!" +"apple","positive","125677424565424128","Sun Oct 16 21:00:07 +0000 2011","Meet #Siri, your new iPhone butler. Click the link and be amazed by all it can do: http://t.co/lvfFdCEL @Apple" +"apple","positive","125673004511412224","Sun Oct 16 20:42:33 +0000 2011","just my man @apple store in @schaumburg, whoops!!!!ðŸ˜" +"apple","positive","125667241978114048","Sun Oct 16 20:19:39 +0000 2011","So, I am using my work PC (NEVER EVER) to get a feel for it; it has the worst speakers ever!! @apple you have spoiled me!! #imamac" +"apple","positive","125664375364255744","Sun Oct 16 20:08:17 +0000 2011","I ♥ @Apple http://t.co/a8on3IAa" +"apple","positive","125662399217930240","Sun Oct 16 20:00:25 +0000 2011","@apple just got the new iOS5 upgrade with iMessage...good luck surviving now @BlackBerry" +"apple","positive","125652668080336896","Sun Oct 16 19:21:45 +0000 2011","Loving #iOS5 !! #awesome @Apple" +"apple","positive","125648027045199873","Sun Oct 16 19:03:18 +0000 2011","RT @MattyRiesz: @kathrynyee You were right, an iPhone is a must have. #addicted {WELCOME TO THE @APPLE CLUB}" +"apple","positive","125645258003464192","Sun Oct 16 18:52:18 +0000 2011","Thank you @apple for your innovations. Exhibit A: Guy playing with Facetime instead of watching game at sports bar. http://t.co/oU7K39ge" +"apple","positive","125643107260829697","Sun Oct 16 18:43:45 +0000 2011","@blackberry boo hiss!............@apple wuhu!!!!!!!! When will my berry powered technology actually work??" +"apple","positive","125633677597229056","Sun Oct 16 18:06:17 +0000 2011","@apple by far the best iPod and first time iPhone ever.... Good job guys" +"apple","positive","125633065757310976","Sun Oct 16 18:03:51 +0000 2011","Thank you Steve @apple store 5th av. http://t.co/nSAisriP" +"apple","positive","125628199269961729","Sun Oct 16 17:44:31 +0000 2011","@Apple's Siri is witchcraft. What's next @googleresearch. 2 yr lead lost?" +"apple","positive","125623745284018176","Sun Oct 16 17:26:49 +0000 2011","@Apple iOS 5 is sweet! Notifications, phone search covers mail now, wifi sync, iCloud backup and integrated Twitter are all well done." +"apple","positive","125618466353983488","Sun Oct 16 17:05:50 +0000 2011","RT @katebetts: Another great James Stewart story in today's NY Times about importance of architecture in @apple retail success http://t. ..." +"apple","positive","125610372727193601","Sun Oct 16 16:33:41 +0000 2011","Another great James Stewart story in today's NY Times about importance of architecture in @apple retail success http://t.co/Kniz452s" +"apple","positive","125608381431025664","Sun Oct 16 16:25:47 +0000 2011","I <3 @apple http://t.co/ondXWpEr" +"apple","positive","125596541028282369","Sun Oct 16 15:38:43 +0000 2011","Welcome to the twitter world @MarkStuver. This is due to #iOS5 and @apple thanks guys." +"apple","positive","125585606100267008","Sun Oct 16 14:55:16 +0000 2011","Impressive service @apple genius bar metro centre. Power cable replaced free n booked in for screen replacement for free :- D" +"apple","positive","125562428200202240","Sun Oct 16 13:23:10 +0000 2011","RT @deb_lavoy: the nice guy at the @apple store replaced my phone gratis when I showed him the hairline crack on the screen. thanks @apple" +"apple","positive","125561950376701952","Sun Oct 16 13:21:16 +0000 2011","the nice guy at the @apple store replaced my phone gratis when I showed him the hairline crack on the screen. thanks @apple" +"apple","positive","125550135911518209","Sun Oct 16 12:34:19 +0000 2011","My iPhone 4S battery lasted longer than a day. That hasn't happened since my edge iPhone. Nice job, @apple." +"apple","positive","125547297072357376","Sun Oct 16 12:23:02 +0000 2011","It would have taken me 15 mins to write this with my #Blackberry. Thank u @Apple 4s for converting me and showing me the grass is greener!" +"apple","positive","125539788546781185","Sun Oct 16 11:53:12 +0000 2011","RT @herahussain: @RickySinghPT got a new backside for my eye phone! V impressed with @apple" +"apple","positive","125537993942515712","Sun Oct 16 11:46:04 +0000 2011","@RickySinghPT got a new backside for my eye phone! V impressed with @apple" +"apple","positive","125537578974851072","Sun Oct 16 11:44:25 +0000 2011","#iPhone 4S in Space http://t.co/jINNHVwz this is amazing #creative @apple your products inspire people to do unbelievable things" +"apple","positive","125524107386302465","Sun Oct 16 10:50:53 +0000 2011","Thank you @apple for making my iPad 2 feel like new again with your new iOS 5!" +"apple","positive","125523414298533888","Sun Oct 16 10:48:08 +0000 2011","_ibertaddigital.tv/ iPad a briliant SteveJobs produck ,http://t.co/00ohfLY6 +@Apple present... http://t.co/DBbWSDpx" +"apple","positive","125501576952553472","Sun Oct 16 09:21:22 +0000 2011","Using my awsome iPad... I love it. I love my MacBook too and my iPod. It's all amazing! I love @apple" +"apple","positive","125501281753251840","Sun Oct 16 09:20:11 +0000 2011","@apple iOS 5 upgrade done ...... Much better feature..... Few more feature required" +"apple","positive","125495491701125120","Sun Oct 16 08:57:11 +0000 2011","New iOS 5 update is THE BEST. iloveyou @apple" +"apple","positive","125459338524499969","Sun Oct 16 06:33:31 +0000 2011","Finally got my iPhone 4S, thanks @Apple. Stupid @att. Learned my lesson." +"apple","positive","125455260801179648","Sun Oct 16 06:17:19 +0000 2011","I absolutely love my iPhone 4S. Thank you, Steve and @apple." +"apple","positive","125423290767507456","Sun Oct 16 04:10:17 +0000 2011","@apple - you have invented a product that actually gets my brother to call my parents when he gets where he's going. Amazing. #siri #ipod4s" +"apple","positive","125416879035658240","Sun Oct 16 03:44:48 +0000 2011","dammit, listening to siri is making me want to upgrade. well played @apple." +"apple","positive","125407532893224962","Sun Oct 16 03:07:40 +0000 2011","Create new folders from within your photo album in #iOS5, finally!! Thanks @Apple, that's been a thorn in my side for a while. #newfeature" +"apple","positive","125402636764712960","Sun Oct 16 02:48:13 +0000 2011","Video card on @Kimaris workstation died after just six months. So long @hp. Hello @apple." +"apple","positive","125402412147146752","Sun Oct 16 02:47:19 +0000 2011","@Blackberry & @Facebook U R really about to make me throw this @Blackberry in the trash an get an @Apple iPhone! @Facebook upload issues!" +"apple","positive","125393816470568961","Sun Oct 16 02:13:10 +0000 2011","#iOS5 update submitted to @apple! Thanks for all the support!" +"apple","positive","125356807626559488","Sat Oct 15 23:46:06 +0000 2011","Awesome service from the @apple store in pc. Thanks chris!" +"apple","positive","125343429289984000","Sat Oct 15 22:52:56 +0000 2011","RT @To1ne: .@apple thanks for fixing this... http://t.co/wTj1ogDO" +"apple","positive","125338216411828224","Sat Oct 15 22:32:14 +0000 2011","I am crazy about #iOS5 . The photo cropping is the best! @apple" +"apple","positive","125333598197911552","Sat Oct 15 22:13:53 +0000 2011",".@apple thanks for fixing this... http://t.co/wTj1ogDO" +"apple","positive","125330595302744064","Sat Oct 15 22:01:57 +0000 2011","Hell yes!!! Got my contacts back!! Thanks @apple" +"apple","positive","125319163366473728","Sat Oct 15 21:16:31 +0000 2011","RT @SawyerHartman: I FU*KING LOVE YOU @APPLE this phone is the best thing ever !! SIRI = BEST THING EVER MADE" +"apple","positive","125313088160411649","Sat Oct 15 20:52:23 +0000 2011","@apple has changed life." +"apple","positive","125309946723188736","Sat Oct 15 20:39:54 +0000 2011","Mad props to the @apple employee that didn't charge me to replace the back plate on my iPhone! Made my day!" +"apple","positive","125281706327552001","Sat Oct 15 18:47:41 +0000 2011","@jonsibley Actually, the @Apple mouse is pretty sweet man." +"apple","positive","125279987254300672","Sat Oct 15 18:40:51 +0000 2011","The #iphone4s is amazing. Siri's voice recognition is absolutely a step above previous attempts. Bravo @apple" +"apple","positive","125264731035537409","Sat Oct 15 17:40:13 +0000 2011","Finally got the @apple IPhone thanks to @sprint getting with the times" +"apple","positive","125256305647693825","Sat Oct 15 17:06:45 +0000 2011","#iOs5 is nice and as it had to be ! Thanks @Apple" +"apple","positive","125252188065902592","Sat Oct 15 16:50:23 +0000 2011","Good support fm Kevin @apple #Bellevue store 4 biz customers TY!" +"apple","positive","125243911538098176","Sat Oct 15 16:17:30 +0000 2011","Just downloaded IOS 5. It's better than I expected! #thankyou @apple" +"apple","positive","125238017299451905","Sat Oct 15 15:54:04 +0000 2011","@Apple: Siri is amazing!!! I'm in love!" +"apple","positive","125165176772247552","Sat Oct 15 11:04:38 +0000 2011","Love love love iOS 5!! @apple" +"apple","negative","126418790706712576","Tue Oct 18 22:06:03 +0000 2011","RT @cjwallace03: So apparently @apple put MB cap on your SMS with the new update. 25mb storage before it tells you your inbox is full. W ..." +"apple","negative","126417285559762944","Tue Oct 18 22:00:04 +0000 2011","RT @Jewelz2611 @mashable @apple, iphones r 2 expensive. Most went w/ htc/galaxy. No customer loyalty w/phone comp.." +"apple","negative","126416915664084992","Tue Oct 18 21:58:36 +0000 2011","@mashable @apple, iphones r 2 expensive. Most went w/ htc/galaxy. No customer loyalty w/phone comp.." +"apple","negative","126416109212680192","Tue Oct 18 21:55:23 +0000 2011","THiS IS WHAT WiLL KiLL APPLE http://t.co/72Jw4z5c RiP @APPLE" +"apple","negative","126411162622496768","Tue Oct 18 21:35:44 +0000 2011","@apple why my tunes no go on my iPhone? iPhone lonely without them. silly #iOS5" +"apple","negative","126410591949697024","Tue Oct 18 21:33:28 +0000 2011","@apple needs to hurry up and release #iTunesMatch" +"apple","negative","126409696553861121","Tue Oct 18 21:29:54 +0000 2011","Why is #Siri always down @apple" +"apple","negative","126408864387182593","Tue Oct 18 21:26:36 +0000 2011","I just need to exchange a cord at the apple store why do I have to wait for a genius? @apple" +"apple","negative","126408052525105153","Tue Oct 18 21:23:22 +0000 2011","@apple AirDrop #fail - Immediate ""declined your request."" every time" +"apple","negative","126407767132078082","Tue Oct 18 21:22:14 +0000 2011","good article about why @apple fucked it all up with lion and their future. http://t.co/zNDP9Vr6 #fb" +"apple","negative","126405405667627008","Tue Oct 18 21:12:51 +0000 2011","RT @radlerc: Yellowgate? Some iPhone 4S Users Complain of Yellow Tint to Screen http://t.co/uaqrxTNk @apple @iphone4s" +"apple","negative","126405185630253056","Tue Oct 18 21:11:59 +0000 2011","Yellowgate? Some iPhone 4S Users Complain of Yellow Tint to Screen http://t.co/uaqrxTNk @apple @iphone4s" +"apple","negative","126405040809312256","Tue Oct 18 21:11:24 +0000 2011","The one #iphone feature still missing since @apple first showed it.. Contacts pictures on the contacts list! Simple yet 5 major updates miss" +"apple","negative","126400637930979329","Tue Oct 18 20:53:55 +0000 2011","Asked siri is she dreams of electric sleep. Was disappointed that she didn't have a snippy answer. Missed opportunity @apple" +"apple","negative","126394680903614465","Tue Oct 18 20:30:14 +0000 2011","@paulens It surprises me that @Apple throws up an error alert about authorizing, and there's no ""Authorize this computer"" button." +"apple","negative","126393717421645825","Tue Oct 18 20:26:25 +0000 2011","@Lisa_Marie1987 shhhh. the evil sith lords @apple may hear you ha!" +"apple","negative","126393204550537216","Tue Oct 18 20:24:22 +0000 2011","FUCK YOU @apple DIE IN A FUCKING BLAZE INFERNO." +"apple","negative","126392402083708928","Tue Oct 18 20:21:11 +0000 2011","Oh, @apple. Steve obviously had nothing to do with iPhoto, as it's the perfect opposite of 'insanely great'. Get it fixed, please." +"apple","negative","126391082308206593","Tue Oct 18 20:15:56 +0000 2011","OMG @apple WHY THE FUCK DID YOU DELETE ALL MY MUSIC YOU DICKS" +"apple","negative","126389218284015616","Tue Oct 18 20:08:32 +0000 2011","@ryanbaldwin @apple So in iTunes I go Store -> Authorise… why doesn't it just auto-authorise it when I sign into iTunes? Grrrr..." +"apple","negative","126388194194362369","Tue Oct 18 20:04:28 +0000 2011","Seriously - I have absolutely no offing clue what @Apple means by ""authorization"", nor how to do it." +"apple","negative","126388023725268992","Tue Oct 18 20:03:47 +0000 2011","Boy, could @apple make it any harder to put my purchased music from the cloud on to my new macbook pro? ""You must authorize this computer…""" +"apple","negative","126385036441296896","Tue Oct 18 19:51:55 +0000 2011","shit, shit, shit. IOS5 update ate all my apps, data and media just like @apple said it would. This is going to take some time to rebuild." +"apple","negative","126382959711358976","Tue Oct 18 19:43:40 +0000 2011",". @apple & @AT&T u cannot tell me there isn't at least 1 64GB iPhone 4S in LA or Vegas!! Give me a fucking break!!!!" +"apple","negative","126382051661328385","Tue Oct 18 19:40:03 +0000 2011","Love @apple downloads. 4 hours and i-pad now wonky! #ripstevejobs #thenonsensepersists #neednewipadguide #fatfuckingchance" +"apple","negative","126381519513194497","Tue Oct 18 19:37:56 +0000 2011","Dear @apple My new Air is now a notbook since your update killed #wifi #bug #destroying #productivity" +"apple","negative","126380588822298625","Tue Oct 18 19:34:15 +0000 2011","I am so done with @Att and @apple 's profitering and lack of customer service, so fucking down with both!!!" +"apple","negative","126380553464315904","Tue Oct 18 19:34:06 +0000 2011","It would be great If @Apple would send my new phone. #frustrated" +"apple","negative","126377298650861568","Tue Oct 18 19:21:10 +0000 2011","@apple thank you for ruining my 3GS with #iOS5. You've just turned my phone into an utterly useless pile of shit." +"apple","negative","126375381249966080","Tue Oct 18 19:13:33 +0000 2011","@rogerweir no but I have the option of a replacement iPhone 4s ? +Not sure if I want one after having 2 duff iPhones. +@O2 @iphone4s @apple" +"apple","negative","126372694118768640","Tue Oct 18 19:02:52 +0000 2011","So apparently @apple put MB cap on your SMS with the new update. 25mb storage before it tells you your inbox is full. What is this 2001?" +"apple","negative","126372040696541184","Tue Oct 18 19:00:17 +0000 2011","You know @apple It's been almost a week since I paid for iTunes Match, I would really like to use it. Any ETA on a fix?" +"apple","negative","126363154837020672","Tue Oct 18 18:24:58 +0000 2011","removing all @apple shit." +"apple","negative","126361483432038400","Tue Oct 18 18:18:19 +0000 2011","So @PhoenixSwinger 's iPhone 4 is giving her a hella hard time w/ the iOS5 update @apple" +"apple","negative","126358781633368064","Tue Oct 18 18:07:35 +0000 2011","What??? I that sucks hello @apple RT @PhoneDog_Aaron Interesting note - DROID RAZR's battery isn't removable." +"apple","negative","126358301393956866","Tue Oct 18 18:05:41 +0000 2011","@Apple can't send me an iPhone preordered 1hr after launching but they cans send 5 or 10 to all the jackasses who want to shoot or blend 'em" +"apple","negative","126358272084152320","Tue Oct 18 18:05:34 +0000 2011","Gotta say the @Apple itouch iphone shuffle etc.. sound quality is AWFUL.. painfully crap. Its been a downgrade from @Sony sound quality wise" +"apple","negative","126357580741226496","Tue Oct 18 18:02:49 +0000 2011","@bisquiat @Apple the upgrade just slows down my phone so much, it's stuck half the time. uch. thankfully no other damage. sucks for you :(" +"apple","negative","126357227727626240","Tue Oct 18 18:01:25 +0000 2011","@Mayati I think @Apple didn't do such a thorough job with the step x steps for upgrade and move to iCloud. Now it's cost me mightily." +"apple","negative","126354628999778305","Tue Oct 18 17:51:05 +0000 2011","Hey @apple now I have iOS5 my iPhone doesn't include songs that are on compilation albums under the artist's name. #whaddupwitdat" +"apple","negative","126351972948393984","Tue Oct 18 17:40:32 +0000 2011","@NickTheFNicon He can send but not rcve txts so he has an apt @apple at 4pm.Then he exclaims: And I waited a whole YEAR for this phone!!LMAO" +"apple","negative","126348169826148352","Tue Oct 18 17:25:25 +0000 2011","Total chaos at @apple store regent street. Like an Ethiopian feeding station. Can't believe this is same co. that makes all that cool shit." +"apple","negative","126344426854416385","Tue Oct 18 17:10:33 +0000 2011","@FishMama: If you made a purchase, just wait for the @apple survey! hate going b/c of the bad #custserv" +"apple","negative","126343931117047808","Tue Oct 18 17:08:35 +0000 2011","Correction: @ Best Buy kudos to Chris @ Alamo Ranch S.A. TX-fixed issues couldn't resolve after 1/2 day w/ @ATT & @Apple. Hero of my day!" +"apple","negative","126343679785959424","Tue Oct 18 17:07:35 +0000 2011","@phxguy88 @Apple @BGR That's why all the ppl who stand in line for hrs to get the ""newest"" model are suckers..." +"apple","negative","126343214805426176","Tue Oct 18 17:05:44 +0000 2011","Would it kill @apple to put a braille type bump on their earbuds so we know which bud is R and L in the dark." +"apple","negative","126334597431697408","Tue Oct 18 16:31:29 +0000 2011","@APPLE Wow @MOTOROLA Just crushed your dreams...." +"apple","negative","126331480233353216","Tue Oct 18 16:19:06 +0000 2011","RT @phxguy88: Oh, just fuck you, @apple. Already?? ---> iPhone 5 on schedule for summer launch? http://t.co/Ofh9PTaG via @BGR" +"apple","negative","126331327271284736","Tue Oct 18 16:18:30 +0000 2011","Oh, just fuck you, @apple. Already?? ---> iPhone 5 on schedule for summer launch? http://t.co/Ofh9PTaG via @BGR" +"apple","negative","126327808803880960","Tue Oct 18 16:04:31 +0000 2011","@apple, +No, I won't wait until thursday for an available appointment just so a 'genius' can tell me I'm shit out of luck. #now" +"apple","negative","126325125749542913","Tue Oct 18 15:53:51 +0000 2011","WTF?!?! @apple the new iOS 5 doesn't allow you NOT to get push notifications from newstand? and SIRI just keeps yapping FUCK!" +"apple","negative","126324573384871936","Tue Oct 18 15:51:39 +0000 2011","@Apple, on the #iPad with #iOS5, why has the Messages Icon been included when it can't be used?" +"apple","negative","126324389741473792","Tue Oct 18 15:50:56 +0000 2011","@Steelo254 yea! I pre-order through @apple and they sorry too just like #AT&T" +"apple","negative","126324077513293824","Tue Oct 18 15:49:41 +0000 2011","Interesting... @apple now requires you to have a reservation ? #apple #iphone #4S : http://t.co/zZK4fTii" +"apple","negative","126321169468100609","Tue Oct 18 15:38:08 +0000 2011","@apple why is my iPhone battery so crappy #fail" +"apple","negative","126320033369563138","Tue Oct 18 15:33:37 +0000 2011","My @Apple @macbook keyboard will not type :(" +"apple","negative","126310645443461121","Tue Oct 18 14:56:19 +0000 2011","Why doesn't @apple iCloud sync Stickies? They've always been around, just nothing every progressed w/ them!! Why apple why? @gruber" +"apple","negative","126309616391950336","Tue Oct 18 14:52:13 +0000 2011","@apple, your new ""Save a Version"" function in Pages is absolutely the most awful, interrupting, counter-intuitive piece of crap in the world" +"apple","negative","126307071984545793","Tue Oct 18 14:42:07 +0000 2011","I hate my @apple computer. Thats 3500 dollars down the drain." +"apple","negative","126302386644975616","Tue Oct 18 14:23:30 +0000 2011","@apple my iPhone is charging very slowly!!!!!" +"apple","negative","126299379832336384","Tue Oct 18 14:11:33 +0000 2011","@Apple #iOS5 gm on ipad1 is very slow wash better on beta7/8 solve this problem, or give me the ipad2" +"apple","negative","126297326565330944","Tue Oct 18 14:03:23 +0000 2011","@apple Wish I could pick month, day, and YEAR when setting a new calendar item on my iPhone. Why hasn't the new iOS fixed this yet?!" +"apple","negative","126292335540699136","Tue Oct 18 13:43:33 +0000 2011","@Apple iTunes is the worst program ever. For such a great phone, you make some awful software." +"apple","negative","126286814578348032","Tue Oct 18 13:21:37 +0000 2011","One would think the voice recognition on the @apple tech support line would work a little better." +"apple","negative","126283602571964416","Tue Oct 18 13:08:51 +0000 2011","@apple $319 to repair my iPad 2, Apple you've lost me and my $700 a year, Android here I come!" +"apple","negative","126281019476291585","Tue Oct 18 12:58:35 +0000 2011","@azee1v1 @apple @umber Proper consolidation, proper syncing, stop losing my PURCHASED items, checkboxes that do what you think they will do." +"apple","negative","126280555980529664","Tue Oct 18 12:56:45 +0000 2011","@AsimRang @apple @umber the desktop app is wack though" +"apple","negative","126279811151831042","Tue Oct 18 12:53:47 +0000 2011","I made a reservation and yet I still have to wait in line. Humpt! Oh @apple (@ Apple Store w/ 2 others) http://t.co/JZmVBdNm" +"apple","negative","126258214412091392","Tue Oct 18 11:27:58 +0000 2011","Frustrated that I bought a new macbook pro from @apple only to find it doesnt ship with media! #expensivepaperweight" +"apple","negative","126257645282799616","Tue Oct 18 11:25:43 +0000 2011","Dear @apple. I had to turn off all those awesome featured you just enabled. Data plan can't handle it. 200 mb of data just because #" +"apple","negative","126251052667375616","Tue Oct 18 10:59:31 +0000 2011","#DontBeMadAtMeBecause #Android is by far better than @Apple" +"apple","negative","126247557339947008","Tue Oct 18 10:45:37 +0000 2011","@apple is there a problem with iOS 5 that is preventing audio apps and audio functions from operating properly? I keep losing sound." +"apple","negative","126243680129523712","Tue Oct 18 10:30:14 +0000 2011","@apple #apple One thing I hater about this #mouse the #buttery finish so fast am not #happy now #macworld @macworld http://t.co/5Uh4a4Vt" +"apple","negative","126240605419487232","Tue Oct 18 10:18:00 +0000 2011","Hey @Apple: stop sending your automatically-depreciate-all-iPhones-older-than-the-4s signal. My #iPhone4 is dying rapidly processor-wise." +"apple","negative","126238223537152001","Tue Oct 18 10:08:32 +0000 2011","Only thing bad about the new @canon camera is that it has two compact flash cards... not one of SD. SD goes in my @apple computer. Damn you!" +"apple","negative","126221894126022656","Tue Oct 18 09:03:39 +0000 2011","Wtf @apple 64 pages for the new terms & conditions when u update ur apps from the iTunes store. Do u really think we'll read em? Really???" +"apple","negative","126189036644728832","Tue Oct 18 06:53:05 +0000 2011","fuck u @apple" +"apple","negative","126188946974720000","Tue Oct 18 06:52:44 +0000 2011","oh and prolly ganna be late to work bc @apple has screwed me and my phone has my only alarm on it" +"apple","negative","126188717902802944","Tue Oct 18 06:51:49 +0000 2011","why the fuck dose my phone decide its just ganna freeze every time i try to update it so fucking sick of @apple" +"apple","negative","126188686453907457","Tue Oct 18 06:51:41 +0000 2011",". if u need me just text me o wait u i wont get it cuz @apple fucks me every time #fuckingpissed" +"apple","negative","126182880123695104","Tue Oct 18 06:28:37 +0000 2011","iMessage doesn't show the time a message was sent, annoying @apple" +"apple","negative","126167083334643713","Tue Oct 18 05:25:51 +0000 2011","Like @apple da fuck is this shit? http://t.co/nb4DHlSg" +"apple","negative","126163315499081728","Tue Oct 18 05:10:53 +0000 2011","RT @CircusTK: I'm wit chu!! “@ShayDiddy: Officially boycotting @ups!!! Calling @apple to curse them out next for using them wasting my t ..." +"apple","negative","126163250172801024","Tue Oct 18 05:10:37 +0000 2011","Damn it @Apple!! Whatchu done to my phone??" +"apple","negative","126158846375903233","Tue Oct 18 04:53:07 +0000 2011","Samsung seeks iPhone 4S ban in Japan and Australia +Patent war intensifies with injunction sought against @Apple http://t.co/QmwjTvnk" +"apple","negative","126155291288023040","Tue Oct 18 04:38:59 +0000 2011","@apple, why is it every time there is iOS software update my iPad goes dead and I need to totally restore? #fail" +"apple","negative","126153311521996800","Tue Oct 18 04:31:07 +0000 2011","@apple #iOS.5 has been nothing but a pain in the ass no room for my music.. Or photos.. Or apps! Can I undo this garbage??" +"apple","negative","126148955217203200","Tue Oct 18 04:13:49 +0000 2011","RT @RedDeerSteph: @Joelplane @apple I hear you! I've had trouble with my 3 & now 4. I've even turned down brightness. #andshuttingdownru ..." +"apple","negative","126148565302128640","Tue Oct 18 04:12:16 +0000 2011","@Joelplane @apple I hear you! I've had trouble with my 3 & now 4. I've even turned down brightness. #andshuttingdownrunningprograms #nohelp" +"apple","negative","126143926523539457","Tue Oct 18 03:53:50 +0000 2011","@albertmal88 remember @apple is evil. #icloud entering the #dropbox market" +"apple","negative","126141077131497472","Tue Oct 18 03:42:31 +0000 2011","9% now on my second full charge of the day. Pissed @Apple" +"apple","negative","126132919117938689","Tue Oct 18 03:10:06 +0000 2011","RT @ShayDiddy: @CircusTK @ups @apple both of them are bs!!! How do u tell me go between a certain time ONLY and the muh fuh is closed!" +"apple","negative","126131535211536384","Tue Oct 18 03:04:36 +0000 2011","I'm wit chu!! “@ShayDiddy: Officially boycotting @ups!!! Calling @apple to curse them out next for using them wasting my time!â€" +"apple","negative","126129938247061504","Tue Oct 18 02:58:15 +0000 2011","ugh! @apple, you’re reservation page for the iPhone is NOT working." +"apple","negative","126129582326816769","Tue Oct 18 02:56:50 +0000 2011","iTunes is @apple's worst product. Worse than the #Newton or the hockey puck mouse. It's utterly painful to use." +"apple","negative","126128599030956032","Tue Oct 18 02:52:56 +0000 2011","Deár iCloud I HATE U , AND I HOPE YOU DIE , YOU ARE THE WORST FUCKING INVENTION IN THE WORLD , FUCK YOUUUUUUUUUUUU #iCloud @apple @stevejobs" +"apple","negative","126127465155403777","Tue Oct 18 02:48:25 +0000 2011","i update to ios 5 and lose everything on my phone and it wont let me sign into my itunes account... thanks @Apple" +"apple","negative","126126605344047105","Tue Oct 18 02:45:00 +0000 2011","Suddenly lost all address book on @3GS iPhone. And someone was using my @Apple ID. It all fucked up." +"apple","negative","126121175926571009","Tue Oct 18 02:23:26 +0000 2011","oh.. my iphone is overcapacity huh?! -____- @Apple won't let me be great!!!" +"apple","negative","126118222746497025","Tue Oct 18 02:11:42 +0000 2011","Issues with updating iTunes on my windows pc - they really are not compatible.... Sent more time talking to @Apple care than using it!!" +"apple","negative","126106964420857857","Tue Oct 18 01:26:57 +0000 2011","@apple u guys are gay" +"apple","negative","126099775417364480","Tue Oct 18 00:58:23 +0000 2011","Restored my iPhone. STILL NO TEXTS. DEAR @APPLE Y NO LOVE, Y RESTRICTIONS ON MY SMS? Y NO TEXTS.... #iOS5 #iOS5atemydingo" +"apple","negative","126096173198082048","Tue Oct 18 00:44:05 +0000 2011","none of my apps work after the new ios from @apple. what do i do?!?" +"apple","negative","126094194312876032","Tue Oct 18 00:36:13 +0000 2011","Seemingly endless loop of calls to @apple, @ups, @verizonwireless to investigate my missing #iphone4s. #crankywithnophone" +"apple","negative","126089287660863488","Tue Oct 18 00:16:43 +0000 2011","@Wisconsin_Mommy @apple that's terrible! I hope you get an apology!" +"apple","negative","126088404084588546","Tue Oct 18 00:13:12 +0000 2011","@zombiebomber have been on the phone with @verizonwireless and @apple pretty much ever since then. Really annoyed." +"apple","negative","126085893353250816","Tue Oct 18 00:03:14 +0000 2011","@chascouponmom @apple I get they are busy w/ the new phone, but I just wanted to buy a stylus. they made me wait forever outside & never" +"apple","negative","126084068298334208","Mon Oct 17 23:55:59 +0000 2011","@apple @iphone Please deliver my daughter's i4s, she is driving me nuts #iphone" +"apple","negative","126082198720888833","Mon Oct 17 23:48:33 +0000 2011","I'm givin' this stupid @apple reserve system 2days… If I cant get reservation, I'll never buy any Apple products… (yup I hate contracts)" +"apple","negative","126082123743502336","Mon Oct 17 23:48:15 +0000 2011","@sprint @bestbuy still no word on my iPhone4s preorder. Best Buy blames Sprint, Sprint blames @apple, I just want an honest answer. #help" +"apple","negative","126079672386723840","Mon Oct 17 23:38:31 +0000 2011","Hey @apple, the SMS full message is complete shit. Yes, I'm annoyed." +"apple","negative","126075115686465536","Mon Oct 17 23:20:24 +0000 2011","Have never had such poor customer service at @Apple before! What happened? (@ Apple Store w/ 2 others) http://t.co/GKlXMUi6" +"apple","negative","126073520504569858","Mon Oct 17 23:14:04 +0000 2011","@Apple your service experience is really fucking slipping (except for cute, eyelash batting girlies). (@ Apple Store) http://t.co/dOHDEnMg" +"apple","negative","126072901144281088","Mon Oct 17 23:11:36 +0000 2011","Had ma Ipas not 24 hours an I jailbroke it...now its SHAGGED itunes won't letme restore it @apple SYM!!!!" +"apple","negative","126069614181486593","Mon Oct 17 22:58:32 +0000 2011","@hailfire101 @Irvysan They are... then @apple happened and snatched Siri so they could be douchebags and say 'it's ours!'" +"apple","negative","126068917012668416","Mon Oct 17 22:55:46 +0000 2011","What was @apple thinking making #Siri totally dependent on a network connection? Siri + @ATT = utter frustration." +"apple","negative","126063215842037760","Mon Oct 17 22:33:07 +0000 2011","Wow the Genius Bar Reservation Line @Apple is ridic right now - ___- I am not amused. #ugh" +"apple","negative","126054048972537856","Mon Oct 17 21:56:41 +0000 2011","Where is my iPhone!?!?!?! +@apple" +"apple","negative","126053722966069248","Mon Oct 17 21:55:24 +0000 2011","@apple battery life suck on iOS 5" +"apple","negative","126050114518261760","Mon Oct 17 21:41:03 +0000 2011","An apple update has seemed to render my work machine incapable of opening HDV video. Thanks @Apple. Zero useful productivity today." +"apple","negative","126044756320075776","Mon Oct 17 21:19:46 +0000 2011","@apple I committed to your cloud storage iDisk and now it's raining :-( so soon no more iDisk. Any plans for a new service for storage?" +"apple","negative","126044425964109824","Mon Oct 17 21:18:27 +0000 2011","Hey @apple why can't I share a reminder list from my iPhone? +Also why can't I login to the iCloud webpage to modify sharing from my phone?" +"apple","negative","126037831301869568","Mon Oct 17 20:52:15 +0000 2011","Apparently fuzzball crashes on #iOS5. Congratulations @apple on another incompatible upgrade" +"apple","negative","126036793970786304","Mon Oct 17 20:48:07 +0000 2011","could @apple lion please integrate vertical spaces in mission control. this horizontal business is making me nauseous" +"apple","negative","126034507475337216","Mon Oct 17 20:39:02 +0000 2011","Can someone plz explain to me whhyy @apple is only distributing 2-3 phones per day to the sprint stores. I'm really ... http://t.co/uf9taK8f" +"apple","negative","126031969166434304","Mon Oct 17 20:28:57 +0000 2011","GAH. @apple iOS 5 opens text messages painfully slow, on top of 3 restore attempts 2 succeed. Early adopterness gets better of me." +"apple","negative","126030936084189184","Mon Oct 17 20:24:51 +0000 2011","Dear @apple: Why did all my PDFs and ePub files disappear from ibooks in my iPhone post ios5 upgrade? This hurts. #3GS #needsomethingtoread" +"apple","negative","126029733325582336","Mon Oct 17 20:20:04 +0000 2011","“@CBM: Lies @apple. the battery on this new iPhone4S is definitely not any better.†<< check Settings: 8 times more App usage in iOS 5" +"apple","negative","126021108641181696","Mon Oct 17 19:45:48 +0000 2011","As a huge podcast fan - I really feel like @Apple dropped the ball on this one. http://t.co/wvzPrbCI via @Carrypad" +"apple","negative","126018120983904256","Mon Oct 17 19:33:56 +0000 2011","I don't really like Siri's voice. Perhaps @apple can get Star Wars voices just like @garmin http://t.co/pSUJg9pN" +"apple","negative","126016585348558848","Mon Oct 17 19:27:49 +0000 2011","“@carlton858: I really hate dealing with the brain dead people at the @apple store. For such good products, customer service sucks.†in NZ?" +"apple","negative","126014999444467712","Mon Oct 17 19:21:31 +0000 2011","I really hate dealing with the brain dead people at the @apple store. For such good products, customer service sucks." +"apple","negative","126014540721827840","Mon Oct 17 19:19:42 +0000 2011","You can make photo albums in the Photos app with iOS 5, but can't password protect them? Um... @apple, fix that. Quickly." +"apple","negative","126012822936231936","Mon Oct 17 19:12:52 +0000 2011","Anyone else seeing missing signal bars on their #iPhone 3GS with an upgrade to #iOS5? cc/ @applecanada @apple @forstall" +"apple","negative","126008913400303616","Mon Oct 17 18:57:20 +0000 2011","RT @CBM: Lies @apple. the battery on this new iPhone4S is definitely not any better." +"apple","negative","126006116168642560","Mon Oct 17 18:46:13 +0000 2011","@apple If you want to know what customers think dont send updates with the ""noreply"" return address. Who invented that anyway. #useless" +"apple","negative","126003967552524288","Mon Oct 17 18:37:41 +0000 2011","Every time I try the voice control on my iPod Touch to send an iMessage, it starts playing ""Who Knew"" by Pink. Still not Flawless @Apple" +"apple","negative","126001775626031105","Mon Oct 17 18:28:58 +0000 2011","should be studying/ doing work but no I'm hold with @apple HURRY THE FUCK UP! #nopatience" +"apple","negative","126001635162992640","Mon Oct 17 18:28:25 +0000 2011","Ok Hindi keyboard in #iOS5 is something to cheer about. But @apple what about support for 20+ missing Indian languages!? #FAIL" +"apple","negative","125999655011098624","Mon Oct 17 18:20:33 +0000 2011","RT @ScottDugas: Warning: if iphone apps spontaneously loose data, it might not be the app's fault - http://t.co/1SEsvWwm #ios5downgrade ..." +"apple","negative","125996379913986048","Mon Oct 17 18:07:32 +0000 2011","@betweensundays Ah! Yeah...should be an option...hopefully @apple figures that out. Thx" +"apple","negative","125995264325599233","Mon Oct 17 18:03:06 +0000 2011","""Waiting for items to copy"" in Itunes after everything DID copy goes on, and on, and on. Come on @Apple, what the hell?" +"apple","negative","125994965183635456","Mon Oct 17 18:01:55 +0000 2011","@Apple unhappy again with service/product quality. Won't buy @Apple again." +"apple","negative","125994596336533504","Mon Oct 17 18:00:27 +0000 2011","Lies @apple. the battery on this new iPhone4S is definitely not any better." +"apple","negative","125989051101741056","Mon Oct 17 17:38:25 +0000 2011","“@philipgrey: dear @apple, why you gotta go change the way <input type=""number""> is handled out of the blue?â€" +"apple","negative","125987979784224770","Mon Oct 17 17:34:09 +0000 2011","Been on hold with @apple customer service for 25 minutes. Wow, lt's like they're #timewarnercable." +"apple","negative","125987439692099584","Mon Oct 17 17:32:01 +0000 2011","anyone else stuck in duped calendar/mail/battery sucking @apple hell? #iCloud and #OSX Lion are a disappointment" +"apple","negative","125982320917364736","Mon Oct 17 17:11:40 +0000 2011","For being the inventor of the computer mouse — Why is it that @Apple has never made one that is not a complete piece of fucking shit?!!?" +"apple","negative","125980659415138304","Mon Oct 17 17:05:04 +0000 2011","@Apple you're killing me. Excited about iOS5 no longer- tragic battery drain, genius bar wants to replace the battery. Really?" +"apple","negative","125978454146551808","Mon Oct 17 16:56:18 +0000 2011","1st impressions of #iOS5- Disappointed w upgrade restore. Lot of apps & folders missing @apple do how much time went in2 orging it? #fail" +"apple","negative","125976113657823232","Mon Oct 17 16:47:00 +0000 2011","@PrJusto Don't question the @Apple they will remember your dissent!" +"apple","negative","125974886006005760","Mon Oct 17 16:42:07 +0000 2011","@apple: Multiple times siri is ""having trouble connecting to the network."" Siri needs servers (and some exercise!)" +"apple","negative","125974810021998595","Mon Oct 17 16:41:49 +0000 2011","So my iMessage still isn't working! @apple" +"apple","negative","125974351035117568","Mon Oct 17 16:40:00 +0000 2011","RT @JimMcNiel: if @apple does not resolve the #Siri network issues they will need to rename a great product to Sorri - Who else is havin ..." +"apple","negative","125973390283653120","Mon Oct 17 16:36:11 +0000 2011","dear @apple, why you gotta go change the way <input type=""number""> is handled out of the blue?" +"apple","negative","125969932285513728","Mon Oct 17 16:22:26 +0000 2011","Ugh. WTF is up with all the bottlenecks @paypal @apple?" +"apple","negative","125966385259098112","Mon Oct 17 16:08:21 +0000 2011","+1 RT @Doug_Newton: @apple PLEASE FIX #Siri!!!! She can't connect to your network!!!!!!!" +"apple","negative","125965988146585601","Mon Oct 17 16:06:46 +0000 2011","@apple PLEASE FIX #Siri!!!! She can't connect to your network!!!!!!!" +"apple","negative","125965853769478144","Mon Oct 17 16:06:14 +0000 2011","if @apple does not resolve the #Siri network issues they will need to rename a great product to Sorri - Who else is having issues?" +"apple","negative","125964314220830722","Mon Oct 17 16:00:07 +0000 2011","any chance @apple will release good headphones that dont blast music outside the ears? #iphone #ipod its quite obnoxious" +"apple","negative","125963262733991936","Mon Oct 17 15:55:56 +0000 2011","@Apple - #Siri is not working due to ""network problem""? Seriously? Can you fix this? #iPhone4s #Fail" +"apple","negative","125961999791308800","Mon Oct 17 15:50:55 +0000 2011","@trisha_ps @iCloud is Cloudy. @apple tech, not very techy." +"apple","negative","125961793926475776","Mon Oct 17 15:50:06 +0000 2011","@electricsoup It has to connect to @Apple to process commands, which it is failing to do right now" +"apple","negative","125960026891362304","Mon Oct 17 15:43:05 +0000 2011","See @Apple, that what's happen when you release iPhone 4S with same crappy design as the old phone!" +"apple","negative","125958961269702656","Mon Oct 17 15:38:51 +0000 2011","This. RT @bonkoif Looks like @Apple did not appropriately project the resources needed for Siri. Not very reliable" +"apple","negative","125958368773943296","Mon Oct 17 15:36:29 +0000 2011","Looks like @Apple did not appropriately project the resources needed for Siri. Not very reliable" +"apple","negative","125956505768960000","Mon Oct 17 15:29:05 +0000 2011","Spoke to an @Apple rep on phone & could hear her tv blaring in the bg. She said, ""I can't actually help u, just direct u back to the web""" +"apple","negative","125956403574747137","Mon Oct 17 15:28:41 +0000 2011","What's wrong with Siri today @apple" +"apple","negative","125956319344721920","Mon Oct 17 15:28:21 +0000 2011","@Apple Siri is down!" +"apple","negative","125954651152592896","Mon Oct 17 15:21:43 +0000 2011","I'm suspecting @Apple timed the health-decline of my iPhone 4 perfectly with the iPhone 4s release... My battery is seriously ailing." +"apple","negative","125953600861126656","Mon Oct 17 15:17:33 +0000 2011","on hold for another half hour. phone is ringing but no one is picking up.@apple you stink." +"apple","negative","125950557310562305","Mon Oct 17 15:05:27 +0000 2011","Ok this @Apple software update for the #iphone is taking waaaaay too long! +#annoyed" +"apple","negative","125945821240885248","Mon Oct 17 14:46:38 +0000 2011","@Apple would do better to inform customers properly about migrating to #iCloud - many complaints lead to #image damage http://t.co/QL7zmPpO" +"apple","negative","125943204943114240","Mon Oct 17 14:36:14 +0000 2011","@apple does iOS 5 rape your battery life? or am I just using my phone a lot since getting it" +"apple","negative","125937228328341504","Mon Oct 17 14:12:29 +0000 2011","@jane73 @Gazmaz hopefully it's a small bug @Apple must be fixing it. RIGHT APPLE }:0( +#itjustworks" +"apple","negative","125930406125117440","Mon Oct 17 13:45:23 +0000 2011","@samsung, @apple, you do realise you're just really pissing off the customer base AS WELL AS looking petty." +"apple","negative","125929899071516676","Mon Oct 17 13:43:22 +0000 2011","@Apple - why do I have to have apps I am updating in their original 'folder' location? That was painful." +"apple","negative","125929395264299009","Mon Oct 17 13:41:22 +0000 2011","@apple domain fail. Open 2 hrs late for walk in ppl. Store was empty and i only need a skin 4 my screen. You represent customer service FAIL" +"apple","negative","125920912171216896","Mon Oct 17 13:07:39 +0000 2011","@BrianMincey @apple the chipsets tear through battery life. Coworker has a htc thunderbolt that he gets about 3 hours of life." +"apple","negative","125876542600519681","Mon Oct 17 10:11:21 +0000 2011","I wish the Apple updater would stop trying to ram iTunes down my throat. I don't own a pissing iPhone @apple" +"apple","negative","125863232249405440","Mon Oct 17 09:18:27 +0000 2011","Is it me or does my #iPhone run slower since the #iOS 5 update? @apple @Verizon" +"apple","negative","125846659182764032","Mon Oct 17 08:12:36 +0000 2011","Just searching with my voice on my 2 year old nexus one @apple." +"apple","negative","125845538926112768","Mon Oct 17 08:08:09 +0000 2011","@apple, your update server has been timing out for a week. i had to force a restore w/the new #iOS5 software. lost all my stuff. thanks!" +"apple","negative","125836461936361472","Mon Oct 17 07:32:05 +0000 2011","#appleupdates +2 success stories- iPhone & iPad 2. +Now been on phone to Apple 45 mins. iPad 1 WILL not. Wasting my time +@apple +#Replaceit!" +"apple","negative","125826259048607744","Mon Oct 17 06:51:32 +0000 2011","Thanks to a 90 minute wait on hold, half an hour actually talking to someone, the @Apple tech decides to send me an email to fix my phone" +"apple","negative","125824148579692544","Mon Oct 17 06:43:09 +0000 2011","I am very upset about and inconvenienced by this iphone update failed backup. @iphone @apple" +"apple","negative","125822115155947520","Mon Oct 17 06:35:04 +0000 2011","but seriously @apple finance me a damn phone you greedy bastards! OR Santa...get your jolly ass in gear and get us a 4S - white plz" +"apple","negative","125819194049699840","Mon Oct 17 06:23:28 +0000 2011","My schedule is 2 busy 2 spend UNquality time with @ATT @Apple 2 recover Iphone again-The mo' mobile I get the mo' immobile I is. Sync Not!" +"apple","negative","125815370513793024","Mon Oct 17 06:08:16 +0000 2011","Pondering the iOS update... From the sounds of @MikeFerri's difficulties, I may just hold off :/ @Apple, FIX IT!" +"apple","negative","125807830363156480","Mon Oct 17 05:38:18 +0000 2011","@twalve @apple you know the issues they have with DST and alarms :p" +"apple","negative","125806568389361664","Mon Oct 17 05:33:17 +0000 2011","Why book @Apple Genius Bar if after 2.5 hours you still can't be served. Disappointing Apple Store Sydney. #thanksfornothing" +"apple","negative","125806240138928128","Mon Oct 17 05:31:59 +0000 2011","Apparently @Apple genius training doesn't extend to clocks & service. They're half an hour late for my appointment and couldn't let me know?" +"apple","negative","125799384976863232","Mon Oct 17 05:04:45 +0000 2011","Thanks @apple for an OS update that has only succesfully bricked my iPod." +"apple","negative","125794703819030528","Mon Oct 17 04:46:09 +0000 2011","screw you @apple iOS 5. I can't download this crap, and I'm tired of trying. Get this problem figured out about my connection timing out" +"apple","negative","125731810733867011","Mon Oct 17 00:36:14 +0000 2011","Wow. iCloud's mail is down. 1st the iOS 5 upgrade issues and now this. I really hope this isn't @apple without Steve Jobs." +"apple","negative","125729727653756928","Mon Oct 17 00:27:57 +0000 2011","@apple why don't you guys test your upgrades before you put them out. 15 1/2 hrs u guys wasted of my time yesterday. Thanks." +"apple","negative","125722107710672896","Sun Oct 16 23:57:40 +0000 2011","what is going on with all my @Apple products?! First my #iPad freezes (still is frozen) and then my #iPhone won't turn on!!! #techieTroubles" +"apple","negative","125717447276904448","Sun Oct 16 23:39:09 +0000 2011","#iPad in perpetual restore loop after trying to upgrade to #iOS5 :( +@apple sux without steve" +"apple","negative","125717161531551744","Sun Oct 16 23:38:01 +0000 2011","2nd @apple genius apptment running late 2nd day in row for 2nd dodgy iPod . You make mega $$$ hire some staff." +"apple","negative","125713100782575616","Sun Oct 16 23:21:53 +0000 2011","It took @apple about 7 years just to figure out how to properly syncronize an iPhone. WOW! #ios5" +"apple","negative","125712104253702146","Sun Oct 16 23:17:55 +0000 2011","@apple So I went to the 5th Ave store and was told I could wait about 4 hours to get a 4S. Needless to say I'm back at the hotel relaxing." +"apple","negative","125711996074209280","Sun Oct 16 23:17:30 +0000 2011","Overall I am a fan of the iPhone 4S; however, my Siri has issues connecting to the network @Apple" +"apple","negative","125710089716899840","Sun Oct 16 23:09:55 +0000 2011","Terrible customer service at Los Gatos @Apple! Why do I have to pay $199 for a bad home button on my iPhone?!?" +"apple","negative","125708348237680640","Sun Oct 16 23:03:00 +0000 2011","@Apple thank you for sucking so much. I cannot believe you would sell something online that isn't made" +"apple","negative","125706246056706049","Sun Oct 16 22:54:39 +0000 2011","Having major battery drain issue since updating iPhone 4 to iOS 5. Anyone else? @AppStore @iPhone @apple" +"apple","negative","125701785540235264","Sun Oct 16 22:36:55 +0000 2011","Dear @apple please send me my Mac Book back surely the repair is done by now... Thanks!" +"apple","negative","125691072398639104","Sun Oct 16 21:54:21 +0000 2011","@Apple, #iOS5 on iPhone2 causes Camera apperature dilation problems & freeze." +"apple","negative","125689691927351296","Sun Oct 16 21:48:52 +0000 2011","@Apple, #iOS5 #geolocation Suburbs with 2 words drops Map Pins in other States! Please fix." +"apple","negative","125681375058735104","Sun Oct 16 21:15:49 +0000 2011","Sorry @apple, but #iMessage will never be what #BBM is. Disappointed." +"apple","negative","125681125376000000","Sun Oct 16 21:14:49 +0000 2011","Iphone 4s siri doesn't work for shit @apple" +"apple","negative","125679166015283203","Sun Oct 16 21:07:02 +0000 2011","Clarence Thomas must have designed the new OSX lion, tossing decades of workflow standards, @apple bring back “save asâ€! #autoSaveSucks" +"apple","negative","125665094561574913","Sun Oct 16 20:11:07 +0000 2011","@semilshah yeah except that when you ask for a receipt, it takes them 30 minutes to figure out where it printed out. Fix that, @Apple!" +"apple","negative","125664507757461504","Sun Oct 16 20:08:48 +0000 2011","RT @JDougyJ: hey @apple do you wanna fix my fucking playlist order, you piece of shit" +"apple","negative","125663477573500930","Sun Oct 16 20:04:42 +0000 2011","Bummer! My @Yahoo! iPad fantasy football app keeps crashing; not sure if it's the app or the new @apple iOS 5. Only two apps running." +"apple","negative","125661140939321344","Sun Oct 16 19:55:25 +0000 2011","@Apple can suck my dick gotta restore my ipad #annoyed" +"apple","negative","125659125886623744","Sun Oct 16 19:47:24 +0000 2011","The line at the @apple is just STUPID" +"apple","negative","125657359841361920","Sun Oct 16 19:40:23 +0000 2011","@nansen Man, that sux! @apple io5 upgrade a total #fail for you!" +"apple","negative","125656559190683651","Sun Oct 16 19:37:12 +0000 2011","Dear @Apple Contacts, you are terrible. I just cleaned duplicates for 3 hours and they magically reappeared. Fix it." +"apple","negative","125654540455378945","Sun Oct 16 19:29:11 +0000 2011","Is it too much to ask which app is incompatible, @apple? http://t.co/yKqip7nk" +"apple","negative","125651769261965312","Sun Oct 16 19:18:10 +0000 2011","Pissed with whoever designs keyboards with @apple for not having a home and end key. working on the CLI i use those keys often" +"apple","negative","125649285667749889","Sun Oct 16 19:08:18 +0000 2011","@NoVaTyler @apple Most of mine are gone too! Not recovered after I saved them b4 I upgraded my iPhone! Soooo pissed!" +"apple","negative","125645811903250432","Sun Oct 16 18:54:30 +0000 2011","hey @apple do you wanna fix my fucking playlist order, you piece of shit" +"apple","negative","125643054190305280","Sun Oct 16 18:43:33 +0000 2011","Siri is having a hard time remembering who my sister is. contact card needs space for family info. #iPhone4S @apple" +"apple","negative","125642742977138689","Sun Oct 16 18:42:18 +0000 2011","#ios5 getting sloppy @apple. U need to check your QA team. Too many issues with apps, data and syncing" +"apple","negative","125641051531784192","Sun Oct 16 18:35:35 +0000 2011","RT @nansen: so this SMS full message is known @apple issue with new iOS5 update. trying restore. When all else #fail - restore." +"apple","negative","125639217090011136","Sun Oct 16 18:28:18 +0000 2011","So glad that I paid $69 for this @apple care protection plan. Been on hold for over 20 minutes now. Great customer service. #NOT" +"apple","negative","125633065878958080","Sun Oct 16 18:03:51 +0000 2011","not happy with @apple right now. the upgrade to my itouch4 took out my itouch and laptop. #help" +"apple","negative","125631239364427776","Sun Oct 16 17:56:36 +0000 2011","All I can say about @Apple right now is #GoodRiddance — See? Cancer isn't ALL BAD!" +"apple","negative","125626166492147713","Sun Oct 16 17:36:26 +0000 2011","@twitter can you look at problem on iPad where it won't open @ messages. It just crashes the app. You too @apple maybe your fault." +"apple","negative","125621144148639744","Sun Oct 16 17:16:29 +0000 2011","@apple What's the point of iCloud wireless updates when ios5 now forces me to use the computer to charge my ipod touch now? #worstupdateever" +"apple","negative","125607492356018176","Sun Oct 16 16:22:14 +0000 2011","@robert_terpstra no worries bud, ps: they're saying that the 4s is having a lot of network issues right now. (I'm @apple for mac problems)" +"apple","negative","125603435440644098","Sun Oct 16 16:06:07 +0000 2011","I hate you @apple for not sending Toledo,oh @bestbuy any @iPhone for @verizon even after I pre ordered!!!" +"apple","negative","125601235985367041","Sun Oct 16 15:57:22 +0000 2011","@pmphillips @apple If my non-Apple equipment broke this often, I guess I'd need an Asus genius bar. But it doesn't, so I don't :-)" +"apple","negative","125599423131697154","Sun Oct 16 15:50:10 +0000 2011","Why is the new @apple iPhone OS duplicating all my iCal events? There's 2 of everything. How do I fix it? (tried turning iCloud off)" +"apple","negative","125598450090917888","Sun Oct 16 15:46:18 +0000 2011","Dear @Apple, how convenient/handy/thoughtful/smart that after iPhone #ios5 update you're apps won't work anymore! (none!!!) :( #gr%\mbl^.%)" +"apple","negative","125588202286366721","Sun Oct 16 15:05:35 +0000 2011","Hey @Apple...I know Steve Jobs died & all but my Twitter for iPad app keeps crashing...Fix it...NOW..please..lol -F.Light" +"apple","negative","125586348064247808","Sun Oct 16 14:58:13 +0000 2011","@apple not happy with Apple today. I did the suggested software update and now nothing works!!!" +"apple","negative","125583385895768064","Sun Oct 16 14:46:27 +0000 2011","Smashed yet another #iPhone screen. When are @apple gonna make the phone more durable!!! Tired of fixing it myself!!!" +"apple","negative","125561930416013312","Sun Oct 16 13:21:11 +0000 2011","IOS5 has one problem, when i save someone in my contacts they don't show his nam becuz of the region/ +965 @apple #apple" +"apple","negative","125556679571025920","Sun Oct 16 13:00:19 +0000 2011","I sure as hell better be able to restore my calendar events. Terrible bug apple! @apple #icloud #anothericloudfail" +"apple","negative","125544764203466752","Sun Oct 16 12:12:58 +0000 2011","I really hate that @apple won't let you hide the Newsstand app or move it into a folder. It serves no purpose for me." +"apple","negative","125533730222784512","Sun Oct 16 11:29:08 +0000 2011","Life was much easier when @apple and @BlackBerry were just fruits!" +"apple","negative","125521682894041088","Sun Oct 16 10:41:15 +0000 2011","@apple No location reminders on ipad2! That was the point of updating to iOS 5" +"apple","negative","125510333078048768","Sun Oct 16 09:56:09 +0000 2011","Comparing @Office #live with @google #apps and @apple #iCloud , I do not see #iCloud leading the crowd. Do you agree ?" +"apple","negative","125475953509015552","Sun Oct 16 07:39:33 +0000 2011","Turns out my computer isn't as fixed as it is meant to be. Fuck you @Apple, I'm going back to your Genius Bar to complain. #annoyed" +"apple","negative","125471372485992448","Sun Oct 16 07:21:21 +0000 2011","If @apple doesn't fix iOS5 this week, I'm buying a droid #emptythreats" +"apple","negative","125464229577891840","Sun Oct 16 06:52:58 +0000 2011","@apple pls launch #Siri on iphone4 .. Disappointed after spending hours for upgrading to iOS5.." +"apple","negative","125458395800154112","Sun Oct 16 06:29:47 +0000 2011","@apple you can suck it you and your fucking iPhones better be glad it's all I like cause the service Is fucking terrible" +"apple","negative","125442137302110208","Sun Oct 16 05:25:10 +0000 2011","This is the first time I've been unhappy with any @apple product... Disappointed, and the Internet seems as confused as I am right now." +"apple","negative","125441478951575552","Sun Oct 16 05:22:33 +0000 2011","After a week or so with @Apple's #iOS5 I'm considering going back to #jailbroken 4.2 - same features as 5, plus more. Not as stable, though." +"apple","negative","125408962215555072","Sun Oct 16 03:13:21 +0000 2011","Had issues with #Siri reaching network today. Too many uses or my signal? @apple" +"apple","negative","125408737296003072","Sun Oct 16 03:12:27 +0000 2011","The new iOS5 completely failed at upgrading the phone. Now I'm stuck in this restore loop. Thanks @apple for wasting my day. Can i undo?" +"apple","negative","125408701166256128","Sun Oct 16 03:12:18 +0000 2011","Is it me or does #iPhone4s run a little warm sometimes? @apple" +"apple","negative","125407447383937025","Sun Oct 16 03:07:20 +0000 2011","@apple WTF why can we not add the newsstand to a folder?! We are already in your walled garden, so please don't remind us of it." +"apple","negative","125406743923671040","Sun Oct 16 03:04:32 +0000 2011","@apple just now seeing none of my Microsoft Office programs work anymore after Lion upgrade. Why??" +"apple","negative","125405260650000384","Sun Oct 16 02:58:38 +0000 2011","Dear @Apple, I don't want the newsstand icon on my screen. #notcool" +"apple","negative","125405005493706752","Sun Oct 16 02:57:37 +0000 2011","@Blackberry does take better quality photos than @Apple iPhone though. I give them that! #photograhersEye" +"apple","negative","125404317669785600","Sun Oct 16 02:54:53 +0000 2011","Ok @iTunesMusic idk wth is up but after syncing my phone my album art is out of order! WTF @Apple!" +"apple","negative","125400161886277632","Sun Oct 16 02:38:23 +0000 2011","I wish Siri could tweet for me . @apple fix that ." +"apple","negative","125399780527570944","Sun Oct 16 02:36:52 +0000 2011","@apple I'm still waiting for you to deliver my phone to @sprint" +"apple","negative","125394863255588864","Sun Oct 16 02:17:19 +0000 2011","I will never purchase another @Apple product for the rest of my life. I am glad Steve Jobs is fucking dead. MAY YOU FUCKING ROT IN HELL!!!!!" +"apple","negative","125394746452619265","Sun Oct 16 02:16:51 +0000 2011","Thanks to @Apple my new iMac is a brick. I cannot re-install Lion and I cannot purchase it because it was installed by the former owner." +"apple","negative","125394663573172224","Sun Oct 16 02:16:32 +0000 2011","Asked #Siri ""Where's Baby Lisa?"" and was told ""sorry, I'm having trouble connecting to the network right now."" @apple server fail. #ios5" +"apple","negative","125371779039502336","Sun Oct 16 00:45:36 +0000 2011","Only thing #iOS.5 has done is deleted my tiger woods game and made impossible to retrieve. Thanks @Apple!" +"apple","negative","125369698840887297","Sun Oct 16 00:37:20 +0000 2011","Damn you @apple for breaking wififofum! It continued to work in ios 4. Was one of my most used apps. Damn Apple's overbearing policies!" +"apple","negative","125365852487942145","Sun Oct 16 00:22:03 +0000 2011","yo, @apple, this update is a disaster" +"apple","negative","125365814579826688","Sun Oct 16 00:21:54 +0000 2011","Dear @Apple,Congratulations, I am suing my insurance company and you just managed to make it to the top of my shit list. #Ineedthosepictures" +"apple","negative","125365581170999296","Sun Oct 16 00:20:58 +0000 2011","Dear @Apple: You must let me turn off iCloud to save bandwidth and NOT delete EVERY document on my iPad. That's BULLSHIT!" +"apple","negative","125355869859876864","Sat Oct 15 23:42:23 +0000 2011","RT @JoelBurns: Dear @Apple, it's me again. Thank for beautiful new iOS features. But I miss some of the old ones. Lk making calls & texts +>(" +"apple","negative","125355139409252352","Sat Oct 15 23:39:28 +0000 2011","By far the worst experience upgrading out of any version of iOS @apple #iOS5" +"apple","negative","125347619072512000","Sat Oct 15 23:09:35 +0000 2011","What happened? RT @xCuntNuggetx: Never purchasing another item from @apple." +"apple","negative","125346783390990337","Sat Oct 15 23:06:16 +0000 2011","Never purchasing another item from @apple." +"apple","negative","125341902739484672","Sat Oct 15 22:46:53 +0000 2011","RT @leiboaz: Wow, worst @apple customer service experience ever. Arrogant and inept. Bad combo, Biltmore Apple store." +"apple","negative","125341804857008128","Sat Oct 15 22:46:29 +0000 2011","Wow, worst @apple customer service experience ever. Arrogant and inept. Bad combo, Biltmore Apple store." +"apple","negative","125338210158125056","Sat Oct 15 22:32:12 +0000 2011","Hey guys at @Apple, WTF is happening to #iPhone4, with #iOS5 suddenly fade to black and begin to vibrate continuously, I had to reboot it" +"apple","negative","125336335656558592","Sat Oct 15 22:24:45 +0000 2011","@hbogo @apple - why don't you play nice and let me use HDMI from iPad? Pretty lame." +"apple","negative","125334948017213441","Sat Oct 15 22:19:14 +0000 2011","Warning: if iphone apps spontaneously loose data, it might not be the app's fault - http://t.co/1SEsvWwm #ios5downgrade @apple @marcoarment" +"apple","negative","125334519254482944","Sat Oct 15 22:17:32 +0000 2011","RT @JoelBurns: Dear @Apple, it's me again. Thanks for beautiful new iOS features. But I miss some of the old ones. Like making calls & t ..." +"apple","negative","125333948556521472","Sat Oct 15 22:15:16 +0000 2011","…and only the first 23 images in my Photo Roll made it over? Seriously, @apple, how did you fuck this up so much?" +"apple","negative","125330038248849408","Sat Oct 15 21:59:44 +0000 2011","@apple why does #iPhone4 plus #iOS5 equal half the battery life?" +"apple","negative","125329867674886144","Sat Oct 15 21:59:03 +0000 2011","@apple why isn't my iTunes updating? I just wanna update my iPhone to IOS5!!! Then iTunes says that it needs to update to do it. Then fails!" +"apple","negative","125327896066785280","Sat Oct 15 21:51:13 +0000 2011","Now @apple won't let me use credit on @iTunes unless I have a credit card. Should have told me when I redeemed the cards. Sound fair to you?" +"apple","negative","125313086465904641","Sat Oct 15 20:52:22 +0000 2011","@amandbrar @apple Siri has worked for me less than 10% of the time. If service doesn't improve by Monday, I'm returning my iPhone 4S." +"apple","negative","125311989751877632","Sat Oct 15 20:48:01 +0000 2011","@azrael: RT @DavideGori: And again #MobileMe is down. Issues when syncing, accessing mail... is this a good premise for #iCloud? @Apple, ..." +"apple","negative","125309975881977857","Sat Oct 15 20:40:01 +0000 2011","need @apple help!! installing new ios on #iphone4 and am stuck on the Find My iPhone screen :( i keep clicking next but it doesn't work" +"apple","negative","125309448108519424","Sat Oct 15 20:37:55 +0000 2011","Bummer that @Apple #iCloud syncs on Windows only w/ Outlook (not Thunderbird) & IE (not Firefox, Chrome). #sadmac" +"apple","negative","125309427422203904","Sat Oct 15 20:37:50 +0000 2011","@RonakKataria @apple you heard of testing before deploying updates. Difference between microsoft & apple: None" +"apple","negative","125307394640199680","Sat Oct 15 20:29:46 +0000 2011","Disappointing visual merchandising, @apple. Simple, but lacks elegance. And sequence doesn't even tell us about #Siri. http://t.co/XV2q3gfi" +"apple","negative","125305396842856448","Sat Oct 15 20:21:49 +0000 2011","has been on terminal hold with @apple support! She must not be the only person having trouble with the new iphone and synching!" +"apple","negative","125303217214062592","Sat Oct 15 20:13:09 +0000 2011","@apple thanks apple for ios 5 and losing all my contacts!" +"apple","negative","125302079752384512","Sat Oct 15 20:08:38 +0000 2011","@apple give me my apps back!!!" +"apple","negative","125301860256063488","Sat Oct 15 20:07:46 +0000 2011","@Apple #iOS5 NONSTOP DROPPED CALLS&MUTE BUTTON RANDOMLY ACTIVATING&PERSON CANT HEAR ME, PULL PHONE FROM EAR TO BE ON A ""MOBILE ME"" SCREEN" +"apple","negative","125301393560047616","Sat Oct 15 20:05:54 +0000 2011","RT @tomcolontonio: Didn't it occur to @apple #apple that ppl share iTunes accounts amongst family's ????? +Wtf!!!!" +"apple","negative","125301265700892672","Sat Oct 15 20:05:24 +0000 2011","@tomcolontonio @apple #EPICFAIL" +"apple","negative","125277260872822786","Sat Oct 15 18:30:01 +0000 2011","90 minutes after being at 100% my iPhone 4 (post #ios5 upgrade) has lost 40% of its charge and the @apple store folks are out of ideas." +"apple","negative","125276004817190914","Sat Oct 15 18:25:01 +0000 2011","So ""cutting the cord"" makes podcasts even harder to manage?!? No subscriptions! WTH, @Apple? http://t.co/LD2i871x #iOS5 (via @carrypad)" +"apple","negative","125271422431014914","Sat Oct 15 18:06:49 +0000 2011","remember when people used to say ""flat the fuck out!!"" lol.. and their face while saying.... kindahow i feel about @apple" +"apple","negative","125269239207706624","Sat Oct 15 17:58:08 +0000 2011","In less than an hour my post- #iOS5 iPhone has gone from 100% to 73% battery charge and the @apple store folks have no solution for me :(" +"apple","negative","125269161327865856","Sat Oct 15 17:57:50 +0000 2011","Well @apple seems 2b screwing #iphone 4s resellars. They still dont have 4s. Yet were promised. Similar 2 Ma Bell's monopolistic prctices?" +"apple","negative","125267178336419840","Sat Oct 15 17:49:57 +0000 2011","Also @EricGreenspan, don't forget @Apple stop closing the app store after each bloody purchase..." +"apple","negative","125265721281351680","Sat Oct 15 17:44:09 +0000 2011","@apple cu" +"apple","negative","125261285083447296","Sat Oct 15 17:26:32 +0000 2011","*sigh* @apple store seems stumped what to do about my catastrophically bad battery life after upgrading to #ios5" +"apple","negative","125247130762883072","Sat Oct 15 16:30:17 +0000 2011","Hmmmm a lot of #Siri features don't work in Canada. Location and directions? Seriously @apple come on!" +"apple","negative","125245780192792576","Sat Oct 15 16:24:55 +0000 2011","@apple, #siri can't connect to the network -- it's LESS useful that VoiceControl now... :(" +"apple","negative","125245104859529216","Sat Oct 15 16:22:14 +0000 2011","@apple took my Siri and made it exclusive to iPhone 4S #SoRude" +"apple","negative","125236708403970048","Sat Oct 15 15:48:52 +0000 2011","total time on the phone with @apple: 1 hour 27 minutes. let's see how this goes." +"apple","negative","125236166151774208","Sat Oct 15 15:46:43 +0000 2011","Waited in line for nothing, fucking genius @Apple called an app for ""John"" not ""jean"" stop fucking up my name im not a French boy. #PCam ..." +"apple","negative","125232266849947648","Sat Oct 15 15:31:13 +0000 2011","Dear @apple, One day you're going to release a version of iTunes that doesn't freeze more often than it works. One day." +"apple","negative","125230743990444032","Sat Oct 15 15:25:10 +0000 2011","It's been 24hrs since I reported @ups theft of my iPhone and @apple has done nothing. No return call as promised. Nothing." +"apple","negative","125230107580317696","Sat Oct 15 15:22:38 +0000 2011","This does not bode well. After charging all night, my @Apple iPhone 4S has 25% battery after only 3hr off charger with no calls & light use" +"apple","negative","125227837438435328","Sat Oct 15 15:13:37 +0000 2011","Houston we have a problem!! My iPad has been restoring for 12+ hours after installing @apple IOS5. This can't be right...." +"apple","negative","125224588253741056","Sat Oct 15 15:00:43 +0000 2011","#Siri went down for a little while last night. Come on @apple! Get your shit together on this launch!" +"apple","negative","125223685194915840","Sat Oct 15 14:57:07 +0000 2011","@ford should have teamed up with @Apple instead of @microsoft to make this SYNC system in my new focus. +Car is sweet SYNC is #EpicFail" +"apple","negative","125204228967903232","Sat Oct 15 13:39:49 +0000 2011","Installed io5 - fine on ipad but wiped wife's iPhone and restore from backup failed! #Bigtrouble #help @apple" +"apple","negative","125202037293064192","Sat Oct 15 13:31:06 +0000 2011","RT @gdcurry: Really @Apple? What have you done to my iTunes now? I can't click on Music, Movies, TV Shows, or any playlists. I get a l ..." +"apple","negative","125129328446017536","Sat Oct 15 08:42:11 +0000 2011","DAMN YOU !!! @apple" +"apple","neutral","126417484017451009","Tue Oct 18 22:00:51 +0000 2011","@Late_Show I would have watched but the folks at @apple have a jihad against adobe flash. Plse consider a YouTube link in future on UR site" +"apple","neutral","126415742177513472","Tue Oct 18 21:53:56 +0000 2011","RT @rdingwell: .@Apple has a record quarter and because a bunch of professional guessers (aka analysts) wanted more, its a disappointmen ..." +"apple","neutral","126415618625912832","Tue Oct 18 21:53:26 +0000 2011","Hey @apple, androids releasing brand new state of the art phones, whens your new phone come out? What's that? (cont) http://t.co/2sko9l3d" +"apple","neutral","126414657836687362","Tue Oct 18 21:49:37 +0000 2011",".@Apple has a record quarter and because a bunch of professional guessers (aka analysts) wanted more, its a disappointment #wtf" +"apple","neutral","126410146703351808","Tue Oct 18 21:31:42 +0000 2011","@Apple how fun wouldn't it be if it was possible to integrate ( soon to be named ) with notifications?" +"apple","neutral","126409984836763648","Tue Oct 18 21:31:03 +0000 2011","Interesting read on war b/w @Apple & @Samsung- http://t.co/Vt9d24Yi -using latter, agree lack of innovation, but better specs at same price!" +"apple","neutral","126407959495442432","Tue Oct 18 21:23:00 +0000 2011","RT @adamnash: The takeaway from the @Apple earnings call? Even Apple needs a new iPhone release every 12 months to stay competitive. cc ..." +"apple","neutral","126407672521162753","Tue Oct 18 21:21:52 +0000 2011","The takeaway from the @Apple earnings call? Even Apple needs a new iPhone release every 12 months to stay competitive. cc: @hblodget" +"apple","neutral","126407511531192320","Tue Oct 18 21:21:13 +0000 2011","Today's headline: @apple reports lower 4Q earnings. Headline in 3 months: @Apple reports record Q1 earnings." +"apple","neutral","126405911697817600","Tue Oct 18 21:14:52 +0000 2011","Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/XVcch6Os #PampersHelloApps""" +"apple","neutral","126405821482532864","Tue Oct 18 21:14:31 +0000 2011","@apple expanded the app store to more than 20 new countries in the december quarter" +"apple","neutral","126405160934178816","Tue Oct 18 21:11:53 +0000 2011","@Apple will be rolling out iPhone 4S to more than 20 more countries by the end of the month." +"apple","neutral","126403530838913024","Tue Oct 18 21:05:24 +0000 2011","Is it JUST a coincidence that @Apple named their virtual assistant after Tom and Katie's robot daughter?" +"apple","neutral","126401882766839811","Tue Oct 18 20:58:51 +0000 2011","Good or Bad results for @Apple ? Depends on who you ask apparently http://t.co/yDH8ffxE" +"apple","neutral","126400491067416576","Tue Oct 18 20:53:20 +0000 2011","Interesting times when increasing your revenue from $20B to $28B in a recession is considered a disappointment. @apple" +"apple","neutral","126394795802370049","Tue Oct 18 20:30:42 +0000 2011","People are waiting in line @apple for phones at 1:30 on a Tuesday! Don't you people have Jobs(no pun intended?)???" +"apple","neutral","126393452324855808","Tue Oct 18 20:25:21 +0000 2011","@Apple Would much rather get outta this having a ( see pic ) and have fun with this "" Have your friends choose music"" http://t.co/7IiHbr4J" +"apple","neutral","126389413054910464","Tue Oct 18 20:09:18 +0000 2011","RT @CRMStrategies: Great Tech War of 2012 http://t.co/64jGRoBp @FastCompany profiles @Amazon @Apple @Google @Facebook on @CNBC /ht @phil ..." +"apple","neutral","126387460463788032","Tue Oct 18 20:01:33 +0000 2011","@apple gonna see if anyone stayed logged into their fb or twitter #TroubleMaker" +"apple","neutral","126386085164101634","Tue Oct 18 19:56:05 +0000 2011","@leg3nd @apple the new iPhone?" +"apple","neutral","126384526925639681","Tue Oct 18 19:49:53 +0000 2011","If you've been struggling to get hold of me, I'm back online with a new iPhone - thanks @apple" +"apple","neutral","126383125059211265","Tue Oct 18 19:44:19 +0000 2011","Hey @Apple, pretty much all your products are amazing. You blow minds every time you launch a new gizmo. That said, your hold music is crap." +"apple","neutral","126381578975842304","Tue Oct 18 19:38:11 +0000 2011","Great Tech War of 2012 http://t.co/64jGRoBp @FastCompany profiles @Amazon @Apple @Google @Facebook on @CNBC /ht @philsimon" +"apple","neutral","126380323733909504","Tue Oct 18 19:33:11 +0000 2011","@Jake_Gyllenhaal So you've made up with @Apple?" +"apple","neutral","126379730827083776","Tue Oct 18 19:30:50 +0000 2011","@Apple has a pic of Steve Jobs on their home page. #Honor" +"apple","neutral","126377120023842816","Tue Oct 18 19:20:28 +0000 2011","@Apple to shut stores!!! C'mon, Only Wednesday for @SteveJobs memorial." +"apple","neutral","126375024595705856","Tue Oct 18 19:12:08 +0000 2011","@Motorola is the Best Android-powered company that is going against @Apple." +"apple","neutral","126374630377275392","Tue Oct 18 19:10:35 +0000 2011","@apple #readyfor5.0.1 http://t.co/M2FqakYr" +"apple","neutral","126370776013213697","Tue Oct 18 18:55:15 +0000 2011","This is what I have been waiting for RT @Apple The last big project Steve Jobs was working on was the new cranial insertion device: iMplant." +"apple","neutral","126368680459251712","Tue Oct 18 18:46:55 +0000 2011","Apple Users Get The official Kalifornia Cavi App on your Apple Device now on - powered by @Apple - Download it free http://t.co/HlGnvlRw" +"apple","neutral","126368285259350017","Tue Oct 18 18:45:21 +0000 2011","The official Kalifornia Cavi Mobil App is now available on the app store - powered by @Apple - Download it free http://t.co/HlGnvlRw" +"apple","neutral","126368127524159488","Tue Oct 18 18:44:44 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/I6woujUh #PampersHelloApps" +"apple","neutral","126364189097865216","Tue Oct 18 18:29:05 +0000 2011","@Apple #RIPSteve RT @davepeck: Steve Jobs and the Seven Rules of Success http://t.co/4nLRLdWy" +"apple","neutral","126362867778859008","Tue Oct 18 18:23:50 +0000 2011","macbreak weekly @TWiT via @apple airplay in full HD on the 42"". finally! @leolaporte @Ihnatko @alexlindsay" +"apple","neutral","126360821419884544","Tue Oct 18 18:15:42 +0000 2011","Stay young, stay foolish. But take ownership when u f*ck up, don't complain, and never give up. #resilience @apple" +"apple","neutral","126360182308618240","Tue Oct 18 18:13:09 +0000 2011","Dear @motorola, I smell a lawsuit coming your way. cc: @apple #motoactv" +"apple","neutral","126358012343492608","Tue Oct 18 18:04:32 +0000 2011","RT @hofmania: I just paid someone to wait in line at the @apple store this morning for me on @zaarly #putapriceonconvenience #iphone htt ..." +"apple","neutral","126357527196741632","Tue Oct 18 18:02:36 +0000 2011","@apple Or @Microsoft Buying Out @RIM ?" +"apple","neutral","126353359962775552","Tue Oct 18 17:46:03 +0000 2011","@apple Why don't you have a wireless remote for Keynote presentations w a MacBook Air? Nobody wants to see the pith man navigate w a iPhone" +"apple","neutral","126351669029126144","Tue Oct 18 17:39:20 +0000 2011","RT @HaendelBalzora: dear @apple please invent ""FaceTime Voicemails."" sincerely, haendel balzora jr. #apple" +"apple","neutral","126348857071239168","Tue Oct 18 17:28:09 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/YUydxo1t #PampersHelloApps" +"apple","neutral","126347890196103168","Tue Oct 18 17:24:19 +0000 2011","dear @apple please invent ""FaceTime Voicemails."" sincerely, haendel balzora jr. #apple" +"apple","neutral","126346633721032705","Tue Oct 18 17:19:19 +0000 2011","RT @jesperordrup: Hi @apple. Household has 4 iphones, 2 ipads, 2 minis, 2 apple tv, AirPorts, Timecapsule +. Whats a usable iPhoto shari ..." +"apple","neutral","126346584068861952","Tue Oct 18 17:19:07 +0000 2011","Couldnt help it & explored the #iphone4s @apple store & talked to #Siri... Should've tried to speak to her in French?! #nexttime" +"apple","neutral","126343124174901248","Tue Oct 18 17:05:22 +0000 2011","is it just me or do other people miss the iPod symbol on the iPhone? I had the first iPod, it's what I relate to @apple #ios5 #nostalgia" +"apple","neutral","126336867477094400","Tue Oct 18 16:40:31 +0000 2011","@ATTCustomerCare @att @apple I already have an iPhone 4 with an ATT contract. Can I buy a factory UNLOCKED iPhone 4S with my contract?" +"apple","neutral","126336687382081536","Tue Oct 18 16:39:48 +0000 2011","RT @HowardMortman: .RT @edatpost: .@Apple fan Eric Holder spotted at the Genius Store http://t.co/7vZSIXpy" +"apple","neutral","126331354718801921","Tue Oct 18 16:18:36 +0000 2011","Hi @apple. Household has 4 iphones, 2 ipads, 2 minis, 2 apple tv, AirPorts, Timecapsule +. Whats a usable iPhoto sharing setup? /cc @maddox" +"apple","neutral","126330974270271488","Tue Oct 18 16:17:06 +0000 2011","RT @edatpost: Bah! Genius BAR // RT @edatpost: .@Apple fan Eric Holder spotted at the Genius Store http://t.co/FTLttLPn" +"apple","neutral","126330155441467392","Tue Oct 18 16:13:50 +0000 2011","#motoactiv? Methinks @apple and maybe @Nike are already prepping lawsuits" +"apple","neutral","126329109759524865","Tue Oct 18 16:09:41 +0000 2011","Check this video out -- moe. plays Live on iPads. Video dedicated in memory of #SteveJobs http://t.co/XSiyDnqu @ipad @mac @apple #brilliant" +"apple","neutral","126328424624160768","Tue Oct 18 16:06:58 +0000 2011","Bah! Genius BAR // RT @edatpost: .@Apple fan Eric Holder spotted at the Genius Store http://t.co/FTLttLPn" +"apple","neutral","126326886354784256","Tue Oct 18 16:00:51 +0000 2011","Evolution of @Apple Ads 1975-2002: http://t.co/QTOqOZ42 @thesavoia #stevejobs #tech #art" +"apple","neutral","126325069281624064","Tue Oct 18 15:53:38 +0000 2011","@apple #siri http://t.co/k7b686hX" +"apple","neutral","126324621279641601","Tue Oct 18 15:51:51 +0000 2011",".RT @edatpost: .@Apple fan Eric Holder spotted at the Genius Store http://t.co/LCTPxHF5" +"apple","neutral","126324256236765185","Tue Oct 18 15:50:24 +0000 2011","RT @edatpost: .@Apple fan Eric Holder spotted at the Genius Store http://t.co/FTLttLPn" +"apple","neutral","126323574989520896","Tue Oct 18 15:47:41 +0000 2011","RT @edatpost: .@Apple fan Eric Holder spotted at the Genius Store http://t.co/OshPyV2G" +"apple","neutral","126323533696614402","Tue Oct 18 15:47:32 +0000 2011",".@Apple fan Eric Holder spotted at the Genius Store http://t.co/FTLttLPn" +"apple","neutral","126321197062426624","Tue Oct 18 15:38:14 +0000 2011","RT @DailyDealChat: @bidmyway I like your app for the @apple iPhone very simple and easy to use!" +"apple","neutral","126320247379730432","Tue Oct 18 15:34:28 +0000 2011","@bidmyway I like your app for the @apple iPhone very simple and easy to use!" +"apple","neutral","126320076063379456","Tue Oct 18 15:33:47 +0000 2011","Thanks, @Apple. Now even more people on the bus are talking to inanimate objects. Harder than ever to tell who's crazy and who's hip." +"apple","neutral","126318553031917569","Tue Oct 18 15:27:44 +0000 2011","Noticed how @apple has added the like & tweet icons on their page? http://t.co/PZoNZeC8 #like #facebook #tweet" +"apple","neutral","126316594971422720","Tue Oct 18 15:19:57 +0000 2011","@rtimbuc thx for publicly coming out out of the @Apple closet ... :)" +"apple","neutral","126316179429134336","Tue Oct 18 15:18:18 +0000 2011","Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/2XJZaLCs #PampersHelloApps" +"apple","neutral","126315088641658881","Tue Oct 18 15:13:58 +0000 2011","@RIM to release new generation of phones in the new year to contend with @Apple... Hmmm." +"apple","neutral","126311681126187008","Tue Oct 18 15:00:26 +0000 2011","RT @TimjwDowning: #iOS 5 is a great system but it's taken all day to download @apple" +"apple","neutral","126311223343058946","Tue Oct 18 14:58:37 +0000 2011","Using @Apple's mobile @AirPort Utility http://t.co/TiDpaHYC" +"apple","neutral","126309939961536513","Tue Oct 18 14:53:31 +0000 2011","@Dropbox balked at acquisition by @Apple. My apps thank them. :) http://t.co/VsYPl9Xu" +"apple","neutral","126308556294205441","Tue Oct 18 14:48:01 +0000 2011","These cos are worth billions: @Apple, @Facebook @Google & @Amazon and they are battling for consumers http://t.co/4WxPCH9h via @fastcompany" +"apple","neutral","126302918797312000","Tue Oct 18 14:25:37 +0000 2011","Just when #TomCruise thought he had a unique name for his daughter, @Apple pulls the rug out from under him with the new #Siri App." +"apple","neutral","126302719882444801","Tue Oct 18 14:24:49 +0000 2011","I wonder If the person who has @apple is holding out for apple to buy it from them" +"apple","neutral","126301989486342145","Tue Oct 18 14:21:55 +0000 2011","iOS 5 for iPhone and iPad is Here: Apple's Best Release Yet? http://t.co/iLYGZ5Ox #ios5 #ios @apple #ipad #tablet" +"apple","neutral","126301301511426049","Tue Oct 18 14:19:11 +0000 2011","The 'Great Tech War': @Apple, @Facebook, @Google, @Amazon battle for the future http://t.co/sVu8cG1M via @fastcompany" +"apple","neutral","126300596633481216","Tue Oct 18 14:16:23 +0000 2011","@BeeeThree I am aye, I wasn't @apple" +"apple","neutral","126300304600866816","Tue Oct 18 14:15:13 +0000 2011","How the community saw #SteveJobs - 12 Most #Community ""iCelebrates"" Steve Jobs - http://t.co/jJRwIvmc @Apple" +"apple","neutral","126298592364331008","Tue Oct 18 14:08:25 +0000 2011","Steve Jobs' unconventional #leadership secret - ACC SmartBrief: http://t.co/CY6VxRnF > @Apple #sm #HR #NEHRA" +"apple","neutral","126294550535872512","Tue Oct 18 13:52:21 +0000 2011","RT @aplusk: Sending love & light to everyone @Apple & the entire Jobs family.Today we lost a Giant who will be missed even by those who ..." +"apple","neutral","126294304628019201","Tue Oct 18 13:51:23 +0000 2011","@apple store NY" +"apple","neutral","126293879166205952","Tue Oct 18 13:49:41 +0000 2011","Attending #mobilemem , listening to James Kelley from @apple ." +"apple","neutral","126292279009882113","Tue Oct 18 13:43:20 +0000 2011","Tell how Steve Jobs & @Apple changed your life and $5 will be donated to Pancreatic #Cancer Research! @PanCAN @Pan_Act http://t.co/BWzganje" +"apple","neutral","126292109727768576","Tue Oct 18 13:42:42 +0000 2011","yeah, @apple is updated!! planking photos, #lmaoo http://t.co/IusXJJDq" +"apple","neutral","126291860305100801","Tue Oct 18 13:41:40 +0000 2011","picking up my #iPhone4S in two hours @Apple!" +"apple","neutral","126290154737504256","Tue Oct 18 13:34:53 +0000 2011","@DickCostolo says daily #twitter signups have TRIPLED since the new @apple #iphone4S came out" +"apple","neutral","126289716097196032","Tue Oct 18 13:33:09 +0000 2011","yes, i am live tweeting my @apple tech support call. Isn't this fun?" +"apple","neutral","126283761754185728","Tue Oct 18 13:09:29 +0000 2011","@azee1v1 @apple @umber Better file info/easier to get to, duplication of files when importing. Categorizing files." +"apple","neutral","126283639959990274","Tue Oct 18 13:09:00 +0000 2011","ok @apple, now we got our iOS5 and #icloud, can we please have #LogicX" +"apple","neutral","126283441657495552","Tue Oct 18 13:08:13 +0000 2011","@AsimRang @apple @umber well so far ive seen it for apps" +"apple","neutral","126283440457912320","Tue Oct 18 13:08:13 +0000 2011","Hello again, iPhone 4!!! Thank you, @apple! Although this should have never broken in the first place! #tsk" +"apple","neutral","126283024278110208","Tue Oct 18 13:06:33 +0000 2011","@AsimRang @apple @umber when you upgrade to ios5 there is a section in app store which lets you download anything you have purchased before" +"apple","neutral","126281432644595713","Tue Oct 18 13:00:14 +0000 2011","@Apple, @Facebook, @Google and @Amazon battle in...The Great Tech War of 2012 http://t.co/uL9zZKv8 by @fmanjoo" +"apple","neutral","126280507729260544","Tue Oct 18 12:56:33 +0000 2011","@AsimRang @apple @umber the podcasts feature works just fine, music purchasing is straight forward, what else do you want?" +"apple","neutral","126279672433614848","Tue Oct 18 12:53:14 +0000 2011","“@NoVaTyler: @Foreverfit4u amazing how quickly apple went down the poop tube†It couldn't have 2do w/operator error! #sarcasm Still ⤠@apple" +"apple","neutral","126277810431074304","Tue Oct 18 12:45:50 +0000 2011","RT @Apple We destroying you next RT @therealterica: Duh #TeamAndroid RT @JuanHustle I got a feeling we living in the last days of Blackberry" +"apple","neutral","126272713332506624","Tue Oct 18 12:25:35 +0000 2011","@KaeKaed @Apple Here is an easy way to put Newsstand in a folder http://t.co/yVXmWIbI" +"apple","neutral","126267746739699713","Tue Oct 18 12:05:51 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/AAP7StHt #PampersHelloApps" +"apple","neutral","126257394622808064","Tue Oct 18 11:24:43 +0000 2011","Been MIA from Twitter.. But now Tweeting from my new @apple iPhone! :) #finallycool #notreally well, It IS ..I'm not :) #stilllearning" +"apple","neutral","126243528832593920","Tue Oct 18 10:29:37 +0000 2011","@AsgariAJ #iPhone need to tell @Apple that" +"apple","neutral","126232037492404224","Tue Oct 18 09:43:57 +0000 2011","@Mommy_gaga, #Win an @Apple iPod Touch @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/qEATNMpI #PampersHelloApps" +"apple","neutral","126225922159427584","Tue Oct 18 09:19:39 +0000 2011","Need to update my apps. ): my apple ID is disabled.. Don't know what to do.. @apple @AppStore @itunes @Siri_Assistant" +"apple","neutral","126219340214304768","Tue Oct 18 08:53:30 +0000 2011","eating @apple xDD" +"apple","neutral","126218596786511873","Tue Oct 18 08:50:33 +0000 2011","I wonder if @Apple OSX will get built-in @Twitter @support like iOS5. That would be very helpful at the moment." +"apple","neutral","126217194173501441","Tue Oct 18 08:44:58 +0000 2011","Spell check needed when saving documents - fingers crossed Steve Jobs left that nugget of technology behind on the @Apple to do list. :)" +"apple","neutral","126213965817708544","Tue Oct 18 08:32:09 +0000 2011","@Apple Stores to close for one hour on October 19 to watch #SteveJobs #memorial http://t.co/fbjqb5RA via @9to5mac #tech #porchista" +"apple","neutral","126211975595311104","Tue Oct 18 08:24:14 +0000 2011","iPhone 4S vs Canon 5D MKII. @apple @canon @iphone http://t.co/PvKqad33" +"apple","neutral","126209902241787904","Tue Oct 18 08:16:00 +0000 2011","@Samsung wants to ban @Apple iPhone 4S in Japan and Australia. http://t.co/hWA4tWIc #smartphonewar" +"apple","neutral","126205800359280640","Tue Oct 18 07:59:42 +0000 2011","Siri says some weird things | This is my next... http://t.co/H8n7caSg via @verge @hasanga @mohoisham @apple #Siri #LMFAO" +"apple","neutral","126201991125929984","Tue Oct 18 07:44:34 +0000 2011","@apple karaoke ceel w/ @nandamaulida @putriasmara @galuhoktriana @milatus :p" +"apple","neutral","126197405015220225","Tue Oct 18 07:26:20 +0000 2011","The four American companies that have come to define 21st-century on the verge of war. @Amazon @Apple @Facebook and @G…http://t.co/PxUlr5dw" +"apple","neutral","126186795808456704","Tue Oct 18 06:44:11 +0000 2011","I have 4 @Apple products..." +"apple","neutral","126185114173583360","Tue Oct 18 06:37:30 +0000 2011","@SamsungMobileUS @Apple would disagree with you right now." +"apple","neutral","126177221571395584","Tue Oct 18 06:06:08 +0000 2011","Stay hungry, stay foolish. Steve Jobs, founder @Apple #WinningWithEL" +"apple","neutral","126171911523794944","Tue Oct 18 05:45:02 +0000 2011","RT @thejjmg: My friend @bm88 found this article on the #ios5 battery drain problem http://t.co/Iqa0ipVt @Apple ah mines being sinking..." +"apple","neutral","126165547154018304","Tue Oct 18 05:19:45 +0000 2011","iSteps @apple store bologna http://t.co/WjjBq7Ae" +"apple","neutral","126163403063570432","Tue Oct 18 05:11:13 +0000 2011","How I manage my businesses/life: @pivotaltracker, @37signals, @twitter, @googleapps, @apple, @dropbox, @wordpress, @dreamhost THX" +"apple","neutral","126157019072835584","Tue Oct 18 04:45:51 +0000 2011","Notes for Jobs @Apple store in Palo alto downtown. http://t.co/4BJjfq4h" +"apple","neutral","126149567036137473","Tue Oct 18 04:16:15 +0000 2011","Just realized Apple doesn't even have @Apple on lock. This dude R.J. Pittman waiting for thee fattest check." +"apple","neutral","126148184358653954","Tue Oct 18 04:10:45 +0000 2011","Hey @Apple, could you hook the voice memos app on the iPhone up with fine scrubbing? Journalists need love too." +"apple","neutral","126147867478982656","Tue Oct 18 04:09:29 +0000 2011","RT @danysaadia: @ATTCustomerCare @att @apple I already have an ATT wireless line. Can I buy a factory unlocked iPhone 4S with my contract?" +"apple","neutral","126146495396319232","Tue Oct 18 04:04:02 +0000 2011","Dear @Apple… If I make love to iCloud will ALL THE THINGS have my babies?" +"apple","neutral","126141631291326464","Tue Oct 18 03:44:43 +0000 2011","POLL: http://t.co/GcrpUKwF Q:R U pissed @Apple banned this video? #Apple #iPhone4S #Siri #F8ball" +"apple","neutral","126141628372090880","Tue Oct 18 03:44:42 +0000 2011","POLL: http://t.co/9q2Er1w9 Q:R U pissed @Apple banned this video? #Apple #iPhone4S #Siri #F8ball" +"apple","neutral","126141580682854400","Tue Oct 18 03:44:31 +0000 2011","More Twitter buzz for Apple Store: http://t.co/hLybd7uc - RT @ji_lee started out as a trip to the @apple store. turned into a spontaneou..." +"apple","neutral","126141157146238976","Tue Oct 18 03:42:50 +0000 2011","My friend @bm88 found this article on the #ios5 battery drain problem http://t.co/zMX85Fkd @apple" +"apple","neutral","126134865887363072","Tue Oct 18 03:17:50 +0000 2011","""#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/r0WYm1jX #PampersHelloApps""" +"apple","neutral","126130171404230656","Tue Oct 18 02:59:10 +0000 2011","Dear @apple, why did my #Apple ID change to my colleagues' when I charged on her Mac? Now can't download or update apps. #iPhone" +"apple","neutral","126125830094061568","Tue Oct 18 02:41:55 +0000 2011","You know you've gone @Apple overboard when you misread a @Yelp review as ""iPad Thai."" #fb" +"apple","neutral","126125799744094208","Tue Oct 18 02:41:48 +0000 2011","Goodbye @Evernote, Welcome #NotesintheCloud thx @Apple" +"apple","neutral","126125182405447680","Tue Oct 18 02:39:21 +0000 2011","Will @Apple's Mac ever properly be used in the financial markets or is it purely a creatives device? Shift from IBM to Mac? #finpanther" +"apple","neutral","126124917992341504","Tue Oct 18 02:38:18 +0000 2011","But @att is ""working"" with @apple to make their iPhone say 4G RT @JFSikora: @TCPJ_Mickey I'm so glad Apple is not slinging that crap." +"apple","neutral","126122372775415808","Tue Oct 18 02:28:11 +0000 2011","started out as a trip to the @apple store. turned into a spontaneous fun evening w @MolleyMoody. we danced while trying on outfits. #girls" +"apple","neutral","126118389591711744","Tue Oct 18 02:12:21 +0000 2011","RememberiN wen @apple and @blackberry were just fruit +#nostalgia" +"apple","neutral","126113944891949056","Tue Oct 18 01:54:42 +0000 2011","@dyanna299 you mean like Muhaimin @apple.com?" +"apple","neutral","126110863550717952","Tue Oct 18 01:42:27 +0000 2011","The new @apple #find my friends app is great.... except the part where you have to log in everytime. Get back to me when this is changed..." +"apple","neutral","126110770864979968","Tue Oct 18 01:42:05 +0000 2011","One thing I want from #ios is searching in the add bar similar to what Firefox or droid phones do. But thnx for a talking Google @apple ..." +"apple","neutral","126110374549405696","Tue Oct 18 01:40:30 +0000 2011","Gee, I can't imagine that the maps app graphic from this Samsung website will help their court cases vs. @Apple: http://t.co/PmanMoAh" +"apple","neutral","126109969912311810","Tue Oct 18 01:38:54 +0000 2011","Would you look at that.. @SouthwestAir does @Apple style welcomes as well. http://t.co/t0TrFnk8 #customerappreciation #custserv" +"apple","neutral","126107127231152129","Tue Oct 18 01:27:36 +0000 2011","RT @fredbenenson: Hey @twitter and @apple, this is pretty dumb. http://t.co/kubkPO0T" +"apple","neutral","126106914684796928","Tue Oct 18 01:26:46 +0000 2011","got the new @apple update" +"apple","neutral","126105236229193728","Tue Oct 18 01:20:05 +0000 2011","A brick n mortar @Google store? Coming soon to London, US next? Copying @Apple business model? http://t.co/l4VTHsv5" +"apple","neutral","126105175294357505","Tue Oct 18 01:19:51 +0000 2011","Hey @twitter and @apple, this is pretty dumb. http://t.co/kubkPO0T" +"apple","neutral","126104490511319041","Tue Oct 18 01:17:08 +0000 2011","If the iPhone 4 case fits the 4S why is @nlevenbrown @apple buying a new one?" +"apple","neutral","126104322999197696","Tue Oct 18 01:16:29 +0000 2011","@apple store to get my phone fixed... ppl lined up for 4s http://t.co/aUHQLiVy" +"apple","neutral","126104244402126848","Tue Oct 18 01:16:09 +0000 2011","@Apple why no Macbook mirroring onto AppleTV via #Airplay?" +"apple","neutral","126102037057388544","Tue Oct 18 01:07:23 +0000 2011","@ATT just so I have this right.I pre-ordered an iphone last wk and it will be here in a month. I could buy in an @apple store today.really?" +"apple","neutral","126098699196698624","Tue Oct 18 00:54:07 +0000 2011","@apple please bring back the old Siri app for us old school iPhone 3 users!" +"apple","neutral","126095965391298560","Tue Oct 18 00:43:15 +0000 2011","Upgrading my #iPad to #iOS5. When @Apple said it could take a couple of hours to install and restore it, they weren't kidding." +"apple","neutral","126094392183357443","Tue Oct 18 00:37:00 +0000 2011","Yeah, there's no @apple twitter account." +"apple","neutral","126094029015355392","Tue Oct 18 00:35:33 +0000 2011","POLL: http://t.co/Wu6EsvT5 Q:Should @Microsoft consider making its own hardware for #Windows like @Apple does? #Microsoft #Windows #Appl ..." +"apple","neutral","126094027140513792","Tue Oct 18 00:35:33 +0000 2011","POLL: http://t.co/tZBleGaQ Q:Should @Microsoft consider making its own hardware for #Windows like @Apple does? #Microsoft #Windows #Appl ..." +"apple","neutral","126091878469869568","Tue Oct 18 00:27:01 +0000 2011","@ATTCustomerCare @att @apple I already have an ATT wireless line. Can I buy a factory unlocked iPhone 4S with my contract?" +"apple","neutral","126089815136538624","Tue Oct 18 00:18:49 +0000 2011","Lol bet that Android couldnt do that RT @ceejay3k: I promise this iPhone been at 1% battery for the last 3 hours. I see you @apple." +"apple","neutral","126089347639427072","Tue Oct 18 00:16:57 +0000 2011","I promise this iPhone been at 1% battery for the last 3 hours. I see you @apple." +"apple","neutral","126087892580827137","Tue Oct 18 00:11:10 +0000 2011","@APPLE freaks is Siri available for Iphone 4 yet?" +"apple","neutral","126086553415057408","Tue Oct 18 00:05:51 +0000 2011","#Iphone5 can you please make The battery last longer thanks @Apple" +"apple","neutral","126081812236738560","Mon Oct 17 23:47:01 +0000 2011","Hey @apple next update can we get an emoji that's pointing the middle finger please?? Thanx!!" +"apple","neutral","126079649959772160","Mon Oct 17 23:38:25 +0000 2011","U know the @apple company are giving out 500 iPads because Steve jobs past away" +"apple","neutral","126078565346312192","Mon Oct 17 23:34:07 +0000 2011","Found out the hard way today that @webex does not work with @apple ios5 yet when you are following a link for a meeting.." +"apple","neutral","126073142107045888","Mon Oct 17 23:12:34 +0000 2011","RT @shanmoorthy: Next year is going to be really interesting in tech as @Google, @Amazon, @Facebook and @Apple vie for new growth areas ..." +"apple","neutral","126072051118260225","Mon Oct 17 23:08:13 +0000 2011","Saw that @apple is covered with msgs 4 steve jobs. Nice to kno ppl r still saying #thankyousteve" +"apple","neutral","126071173640499200","Mon Oct 17 23:04:44 +0000 2011","iPhone 4 S now RESERVATION only, if you want to buy it in an @apple store tomorrow you have to reserve online after 9pm tonight" +"apple","neutral","126068964685135872","Mon Oct 17 22:55:58 +0000 2011","@apple must be launching a #searchengine - imminent surely. #AppleSearch #AppleCore #bigapple haha! Please be the latter" +"apple","neutral","126066994008162305","Mon Oct 17 22:48:08 +0000 2011","@FastCompany - The Great Tech War Of 2012: @Apple, @Facebook, @Google, and @Amazon duking it out - http://t.co/HXsAlzPv via @ADerricks" +"apple","neutral","126066452787773443","Mon Oct 17 22:45:59 +0000 2011","Apple sold 4 million iPhone 4S units on first weekend - Appletell: http://t.co/vs5r1oox #iPhone4S @Apple #fb" +"apple","neutral","126065983138955265","Mon Oct 17 22:44:07 +0000 2011","@SteveJobs : @Twitter overwhelmed by reaction to @Apple founder's death - Telegraph http://t.co/tqF6pjf5" +"apple","neutral","126065529684369408","Mon Oct 17 22:42:19 +0000 2011","@Apple please put the Tab button back in Pages for iPad. That hold my finger down, wait for the insert tab, then touch that for the tab. SMH" +"apple","neutral","126061182720278528","Mon Oct 17 22:25:02 +0000 2011","Apple shares hit new all-time intraday high: http://t.co/uY35rrRU #win @Apple" +"apple","neutral","126057389333020672","Mon Oct 17 22:09:58 +0000 2011","RT @FaithMcNally: @RhiannaRaee hanging @apple in tacoma listening to the beatles!" +"apple","neutral","126056940060155904","Mon Oct 17 22:08:11 +0000 2011","@RhiannaRaee hanging @apple in tacoma listening to the beatles!" +"apple","neutral","126055880394420224","Mon Oct 17 22:03:58 +0000 2011","In front of the @apple store. So many blue shirts I feel like I'm at a smurf reunion. #iPhone4S" +"apple","neutral","126054998080622593","Mon Oct 17 22:00:28 +0000 2011","A must read article on @fastcompany about @Apple, @Facebook, @Google, and @Amazon technology war in 2012 http://t.co/nxc7Wo4S" +"apple","neutral","126054145617694720","Mon Oct 17 21:57:04 +0000 2011","@cnbcfastmomey what price should I sell @apple for" +"apple","neutral","126052649475915776","Mon Oct 17 21:51:08 +0000 2011","Hey @Apple, when did you hire @TheRock to tighten the screws on the bottom cases of your MacBook Pros? #Ouch #NiceHire" +"apple","neutral","126051375422504961","Mon Oct 17 21:46:05 +0000 2011","#SteveJobs memorial at the Leawood @Apple store. http://t.co/UVgb05G6" +"apple","neutral","126049560878526464","Mon Oct 17 21:38:51 +0000 2011","Hey @apple why is Siri not available for the Mac?" +"apple","neutral","126044185815040000","Mon Oct 17 21:17:30 +0000 2011","New phone is @apple, too bad im not going to get it till tommorrow, dont feel like driving to the mall AND GG is on tonight!!!" +"apple","neutral","126044055644807169","Mon Oct 17 21:16:59 +0000 2011","waiting for Jacob @fairview @apple storeee" +"apple","neutral","126042506717704192","Mon Oct 17 21:10:50 +0000 2011","RT @DJTREBEL1: #fail i guess @apple didnt notify these clowns that the iPhone5 isnst out yet.... http://t.co/2yCDNt1B" +"apple","neutral","126041773356232704","Mon Oct 17 21:07:55 +0000 2011","#fail i guess @apple didnt notify these clowns that the iPhone5 isnst out yet.... http://t.co/2yCDNt1B" +"apple","neutral","126041570876203009","Mon Oct 17 21:07:06 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/nDRpciY9 #PampersHelloApps" +"apple","neutral","126040050441011200","Mon Oct 17 21:01:04 +0000 2011","iPhone 4S First Weekend Sales Top Four Million: http://t.co/Dm4acTYv via @Apple themselves. Big sales for the new phone." +"apple","neutral","126031463647944704","Mon Oct 17 20:26:57 +0000 2011","@WIREDInsider beautiful, yes. though it simply reminds me of @apple's spinning wheel of death. clock, reinvented: http://t.co/tJcJAqfl" +"apple","neutral","126030091892432896","Mon Oct 17 20:21:30 +0000 2011","Dear @apple: Integrated tweeting in ios5 RAWKS. Plz consider adding @ and # lookup like @twitter app. Thx! http://t.co/5uHsTEPz" +"apple","neutral","126022527578406912","Mon Oct 17 19:51:26 +0000 2011","Easiest way to use Evernote with iPhone 4S Siri. Just tell Siri to Email Evernote and say your note. @evernote @apple #evernote #siri" +"apple","neutral","126021436149211136","Mon Oct 17 19:47:06 +0000 2011","@Apple sells record 4M #iPhone4S phones in first 3 days!" +"apple","neutral","126019432194650113","Mon Oct 17 19:39:08 +0000 2011","@brady I am surprised @apple did not think of/anticipate that......" +"apple","neutral","126018538531061760","Mon Oct 17 19:35:35 +0000 2011","#iMessage is here wid iOS5 RIP #BBM @cultofmac @apple" +"apple","neutral","126017643747606528","Mon Oct 17 19:32:02 +0000 2011","It's a good thing @apple has not came out with an iCar because I would be in line for one and im sure it would be expensive" +"apple","neutral","126016494701256704","Mon Oct 17 19:27:28 +0000 2011","RT @carlmilner: RT @petapixel: iPhone 4S vs Canon 5D Mark II - A side-by-side comparison of 1080p HD video: http://t.co/qMyMIV7z << ..." +"apple","neutral","126016405085757440","Mon Oct 17 19:27:06 +0000 2011","The iPhone update has segregated my contacts between iPhone users and non-iPhone users. They just had to call out those non-users @apple" +"apple","neutral","126014643826208768","Mon Oct 17 19:20:06 +0000 2011","@apple @internode #icloud #ios5 #piracy omg no wonder ive used all my downloads!!" +"apple","neutral","126014214467895297","Mon Oct 17 19:18:24 +0000 2011","@apple Iphone's & Ipad's beautiful technology but alltogether mainly a marketing platform. It's obscure is'nt it? Apple lost it's touch." +"apple","neutral","126014102379302912","Mon Oct 17 19:17:57 +0000 2011","@JarjarTee @apple That's really good. Makes you happy to pay the infamous @apple tax :)" +"apple","neutral","126013626426466304","Mon Oct 17 19:16:04 +0000 2011","Hall Pass had me rolling...... @Apple bee's lookin for nookie really" +"apple","neutral","126012833128390656","Mon Oct 17 19:12:55 +0000 2011","@hanrahan52 @apple Don't say things you can't take back, Joel." +"apple","neutral","126012515019784192","Mon Oct 17 19:11:39 +0000 2011","@coatgal @apple considering his circumstances - suffering faults on/off for long periods -They replaced his phone with a new as a goodwill" +"apple","neutral","126012034545496065","Mon Oct 17 19:09:44 +0000 2011","@coatgal @apple Paul had a chat with an advisor explained the problem : cost £120 repair !! +BUT wait for it , drumroll . . . ." +"apple","neutral","126012004312956928","Mon Oct 17 19:09:37 +0000 2011","#Steve #Jobs #Tribute iPhone 4S Cases Now Available #iphonecases +http://t.co/2hyurIiq +#stevejobstributecases +@ikaveri @iamsrk @apple" +"apple","neutral","126011120694726656","Mon Oct 17 19:06:07 +0000 2011","@coatgal had a really interestng experience up in the @Apple store though, Paul was with me up there, faulty #iphone4 - but out of warranty" +"apple","neutral","126010471202566144","Mon Oct 17 19:03:32 +0000 2011","@apple @appstore @mac @iOs @Ipad @siri Will siri be available for iPad 2?" +"apple","neutral","126009386022879232","Mon Oct 17 18:59:13 +0000 2011","Can this ho Siri update your tweets? cc: @apple @iPhone_News" +"apple","neutral","126008776322064384","Mon Oct 17 18:56:48 +0000 2011","@Apple's Success is Also #Google's Gain as #Mobile #Advertising Balloons http://t.co/0400ynQY" +"apple","neutral","126007705600135168","Mon Oct 17 18:52:32 +0000 2011","@_alastair w/ respect, the old alternatives were talking to a computer in india or the geek squad. @apple does a pretty great job." +"apple","neutral","126006572420833282","Mon Oct 17 18:48:02 +0000 2011","@jackiestone1 yes! Tech peeps talk so much about algorithms, but tech should always tie back to people. @apple proves this #imediasummit" +"apple","neutral","126006349959135232","Mon Oct 17 18:47:09 +0000 2011","The great tech battle ft @facebook @amazon @apple and more http://t.co/ws4lWWXy via @fastcompany" +"apple","neutral","126004552557273088","Mon Oct 17 18:40:01 +0000 2011","Another shrewd business model move by @Amazon http://t.co/OQJRRYFq Rooting for the dark horse in the @facebook @google @apple war? #tech" +"apple","neutral","126003746135224320","Mon Oct 17 18:36:48 +0000 2011","RT @Apple sells 4 million iPhone 4S in 3 days http://t.co/vElmtzNS via @reuters" +"apple","neutral","126003567315255296","Mon Oct 17 18:36:06 +0000 2011","Flowers & Apples shower the Santa Monica @Apple store in honor of #SteveJobs http://t.co/xGf60gMs" +"apple","neutral","126001989309054976","Mon Oct 17 18:29:49 +0000 2011","is it me or @blackberry is begging ppl not to go @apple #iphone http://t.co/X3gdBUyK" +"apple","neutral","126001758853009409","Mon Oct 17 18:28:55 +0000 2011","Do people loved Steve? Check out this photo from @Apple store http://t.co/jIqBGghC" +"apple","neutral","126000843798491136","Mon Oct 17 18:25:16 +0000 2011","RT @petapixel: iPhone 4S vs Canon 5D Mark II - A side-by-side comparison of 1080p HD video: http://t.co/qMyMIV7z <<@apple Wow!!" +"apple","neutral","125999022908510209","Mon Oct 17 18:18:02 +0000 2011","Why @Apple so valuable, good analysis of trended profits, iOS vs. Microsoft: Comparing the bottom lines http://t.co/k9qst4R4 via @asymco" +"apple","neutral","125998732046123009","Mon Oct 17 18:16:53 +0000 2011","@audrievv @apple You can still use #MobileMe webhosting (with or without #iWeb) after converting to #iCloud." +"apple","neutral","125996412252078080","Mon Oct 17 18:07:40 +0000 2011","http://t.co/48emAEID #SteveJobs memorial @Apple #Boylston" +"apple","neutral","125996330500890624","Mon Oct 17 18:07:20 +0000 2011","I am now the proud owner of @apple tv thanks to big bro @steveeadie and family!!" +"apple","neutral","125994997609803776","Mon Oct 17 18:02:02 +0000 2011","RT @robertoafelipe: @apple predicts #Siri in 1987 in video, the video is staged for September 2011 http://t.co/tFNRP3MD" +"apple","neutral","125994518989385729","Mon Oct 17 18:00:08 +0000 2011","Head to our FB page for a link to the @Apple Community Support Page detailing how to turn off #newsstand auto renewal. + +http://t.co/65HPp0tu" +"apple","neutral","125993702782025729","Mon Oct 17 17:56:54 +0000 2011","@apple y u run out of 64gb iPhone!? Now I have to settle for 32gb. Saved 100$ though(: +#content" +"apple","neutral","125993105722839040","Mon Oct 17 17:54:31 +0000 2011","@apple is profiting a min of $400 per 4s. They sold 4 million this weekend. That's another 1.6 Billion to Apples coffers! #worldRecession?" +"apple","neutral","125992838910586880","Mon Oct 17 17:53:28 +0000 2011","Big mo: In one week, @Apple #iCloud hits 20M users; 25M use #iOS 5 http://t.co/HTpUTBFK via @cnet" +"apple","neutral","125992594395250688","Mon Oct 17 17:52:29 +0000 2011","Smart as hell. The Great Tech War Of 2012. http://t.co/0O1CfCIZ Fab Four: @apple @google @amazon @facebook via @kvox #mustread" +"apple","neutral","125991449455104000","Mon Oct 17 17:47:57 +0000 2011","@learmonth @Cue improves iMessage + engages @Apple content partners' customers better http://t.co/hhkvlwFB with social activity messaging" +"apple","neutral","125990236743405568","Mon Oct 17 17:43:07 +0000 2011","@Twitter returns a Error 500 when we try to post the link to @Apple Support Community detailing how to turn off auto renewal subscriptions." +"apple","neutral","125990217801940992","Mon Oct 17 17:43:03 +0000 2011","RT @kvox: Smart as hell. The Great Tech War Of 2012 (@fmanjoo / @FastCompany) http://t.co/XiJYUoHZ Fab Four: @apple @google @amazon @fac ..." +"apple","neutral","125989605634879488","Mon Oct 17 17:40:37 +0000 2011","@philipgrey @apple it's kinda sad that's the most relevant tweet I've seen today. Or kinda awesome. I'm not sure. #DevProblems" +"apple","neutral","125989196132388864","Mon Oct 17 17:38:59 +0000 2011","Too bad I'm not an @Apple junkie. #icloud #apple #ios5" +"apple","neutral","125989009091592192","Mon Oct 17 17:38:15 +0000 2011","@Apple buried the ability to manage #newsstand subscriptions under App Store Settings. This is inconvenient at best." +"apple","neutral","125988775548559360","Mon Oct 17 17:37:19 +0000 2011","Smart as hell. The Great Tech War Of 2012 (@fmanjoo / @FastCompany) http://t.co/XiJYUoHZ Fab Four: @apple @google @amazon @facebook" +"apple","neutral","125988651426512899","Mon Oct 17 17:36:49 +0000 2011","@apple predicts #Siri in 1987 in video, the video is staged for September 2011 http://t.co/tFNRP3MD" +"apple","neutral","125984350989860864","Mon Oct 17 17:19:44 +0000 2011","Smh at @ItsTheSituation for trying to cut in line for a 4S. Cmon mayne if @Apple's cofounder stood in line then you are too." +"apple","neutral","125983179877253120","Mon Oct 17 17:15:05 +0000 2011","Go big or go home @Apple users - here are some apps that make your #4s iPhone ""s-s-super"" http://t.co/t74BQyim via @mashable - MS" +"apple","neutral","125980918220464128","Mon Oct 17 17:06:06 +0000 2011","RT @TechHamlet: Thank You Steve : A Song Created for Steve Jobs http://t.co/2vtX8ZJf via @TechHamlet @AzRmusic @Apple #stevejobs #apple" +"apple","neutral","125980676653723648","Mon Oct 17 17:05:08 +0000 2011","@pkedrosky @apple I am just waiting for my conf email from iCloud." +"apple","neutral","125980615664336896","Mon Oct 17 17:04:54 +0000 2011","#wheniwas14... @Apple registered the domain name http://t.co/FPvKn1kJ, thus beginning their plan for world domination" +"apple","neutral","125979228452818944","Mon Oct 17 16:59:23 +0000 2011","Thank You Steve : A Song Created for Steve Jobs http://t.co/2vtX8ZJf via @TechHamlet @AzRmusic @Apple #stevejobs #apple" +"apple","neutral","125978290367381504","Mon Oct 17 16:55:39 +0000 2011","Twitter buzz for Best Buy: http://t.co/9SesENTu - RT @acappellamedia OH: Sales guy @BestBuy: "I'm not selling or using any @apple p..." +"apple","neutral","125975779447291904","Mon Oct 17 16:45:40 +0000 2011","Anyone know why we can only read iBooks on the iPhone and iPad? Any ideas how to read them on mac? @apple" +"apple","neutral","125974955983769603","Mon Oct 17 16:42:24 +0000 2011","Why didn't @apple provide 5GB per device for cloud backup? Why just 5GB per account? Silly." +"apple","neutral","125972882240188416","Mon Oct 17 16:34:10 +0000 2011","New Blog! http://t.co/ENCSF1Ju @VerizonWireless @Motorola @Google @Apple #Android #DroidBionic #iPhone4S #4GLTE" +"apple","neutral","125969677997453312","Mon Oct 17 16:21:26 +0000 2011","Over the weekend, @Apple sold more than 4,000,000 iPhones. That’s about 16 iPhones sold every second." +"apple","neutral","125969128514260992","Mon Oct 17 16:19:15 +0000 2011","how do you sync gmail contacts with your iphone? help please! @apple @gmail" +"apple","neutral","125967560171720704","Mon Oct 17 16:13:01 +0000 2011","Loving these names. ""@Google Chases @Apple With Ice Cream Sandwich"" - http://t.co/u3LchmGD #mobile" +"apple","neutral","125967126912712705","Mon Oct 17 16:11:18 +0000 2011","YESSS @apple fix it also slow internet RT @SprintFeed: iPhone 4S owners are you having trouble with Siri connecting to internet?" +"apple","neutral","125965569659895808","Mon Oct 17 16:05:06 +0000 2011","Going to the @apple store today to get this laptop situation figured out" +"apple","neutral","125963773176582144","Mon Oct 17 15:57:58 +0000 2011","gulp...@Apple sells 4 million iPhone 4S in 3 days - Yahoo! Finance http://t.co/Zv7DXrIA via @YahooFinance" +"apple","neutral","125962667541270528","Mon Oct 17 15:53:34 +0000 2011","Is @Apple's voice #tech really as impressive as the net thinks it is? http://t.co/Rbf23CNX #siri" +"apple","neutral","125962608519036928","Mon Oct 17 15:53:20 +0000 2011","Next year is going to be really interesting in tech as @Google, @Amazon, @Facebook and @Apple vie for new growth areas http://t.co/kkaKFsCH" +"apple","neutral","125961033348153345","Mon Oct 17 15:47:05 +0000 2011","RT @rickholgate: Packed session on @Apple and @Google at #GartnerSYM. Indicator of today's mindshare?" +"apple","neutral","125960438981734400","Mon Oct 17 15:44:43 +0000 2011","Packed session on @Apple and @Google at #GartnerSYM. Indicator of today's mindshare?" +"apple","neutral","125958702455988225","Mon Oct 17 15:37:49 +0000 2011","This!!!!!! RT @DennieB_15 👦👧👩👨👶👵👴👱👲👳👷💂👸👮..... Um why No black peopleâ”â”â”@Apple" +"apple","neutral","125958525708021760","Mon Oct 17 15:37:07 +0000 2011","👦👧👩👨👶👵👴👱👲👳👷💂👸👮..... Um why No black peopleâ”â”â”@Apple" +"apple","neutral","125958117086347264","Mon Oct 17 15:35:29 +0000 2011","too many companies don't have @apple management-- 60 sec vid-http://bit.ly/pwLpgQ #entrepreneur #startup @scalecomputing #pioniq" +"apple","neutral","125957972466737152","Mon Oct 17 15:34:55 +0000 2011","@BikerVox @mariomussolini @ios_notch @apple xD thats why i have a back up PC… just incase *hides gun*" +"apple","neutral","125957965109932032","Mon Oct 17 15:34:53 +0000 2011","@apple please provide support for a Bangla keyboard in iOS. It would make the life of many Bengali iOS users easier! #iphone" +"apple","neutral","125951303770845185","Mon Oct 17 15:08:25 +0000 2011","@Apple sells 4 million on the first weekend. @Samsung sold over 30 million Galaxy S II phones globally." +"apple","neutral","125950941349421057","Mon Oct 17 15:06:59 +0000 2011","Hey @Apple - need more servers for Siri traffic? I'm sure Google can help you out with that..." +"apple","neutral","125950505389273090","Mon Oct 17 15:05:15 +0000 2011","I wish someone would gift me an unlocked iPhone 4s. #iPhone @Apple" +"apple","neutral","125950026181648385","Mon Oct 17 15:03:20 +0000 2011","So apparently that's not the correct @apple" +"apple","neutral","125949784677810176","Mon Oct 17 15:02:23 +0000 2011","I love @apple & I love my phone.. but ever since this damn update my texts keep saying they're full when there are none & that's a PROBLEM!!" +"apple","neutral","125948450620702720","Mon Oct 17 14:57:05 +0000 2011","#opinion @Samsung need to be careful in @Apple patents issues - it looks a lot like you 'learned' a lot by making their chips!" +"apple","neutral","125944856504827904","Mon Oct 17 14:42:48 +0000 2011","Now who can guarantee that @apple hd nothing 2 do with @Blackberry RIM server failure?!Days after, over 4million iPhone 4S sold in 3days!?!" +"apple","neutral","125944293671182336","Mon Oct 17 14:40:34 +0000 2011","RT @Porter_Anderson: ""3 principal global players will be active in every market...@Amazon, @Apple, & @Kobo."" @MikeShatzkin http://t.co/1 ..." +"apple","neutral","125943115449253888","Mon Oct 17 14:35:53 +0000 2011","RT @bsdalton: @apple underplayed the power of the iPad via @mitchjoel <yup #SOCAPac11" +"apple","neutral","125943078837161984","Mon Oct 17 14:35:44 +0000 2011","Ya hoo! @Apple sells 4 million iPhone 4S in 3 days http://t.co/rvdjSMMi (via @news360app) $AAPL" +"apple","neutral","125943020767019008","Mon Oct 17 14:35:30 +0000 2011","@apple underplayed the power of the iPad via @mitchjoel <yup #SOCAPac11" +"apple","neutral","125940398915977217","Mon Oct 17 14:25:05 +0000 2011","""3 principal global players will be active in every market...@Amazon, @Apple, & @Kobo."" @MikeShatzkin http://t.co/1ndxcMO1 #dbw #metacon" +"apple","neutral","125940300987371521","Mon Oct 17 14:24:42 +0000 2011","dear invisible intern/assistant: please go pick up my computer from the @apple hospital so i can upgrade to iOS5. thanks!" +"apple","neutral","125939862078619648","Mon Oct 17 14:22:57 +0000 2011","#Siri I could have done with out its not that great #iPhone4S I hear the #iPhone5 is coming out #2013 @apple.inc" +"apple","neutral","125939833775460352","Mon Oct 17 14:22:50 +0000 2011","Wow... @Apple sold 4 million iPhone 4S in a single weekend... http://t.co/WDLHZsza" +"apple","neutral","125938918540574720","Mon Oct 17 14:19:12 +0000 2011","OH: Sales guy @BestBuy: ""I'm not selling or using any @apple products during my mourning period over Job's passing."" hmmm.." +"apple","neutral","125938325151432706","Mon Oct 17 14:16:51 +0000 2011","i wish my iPOD had a feature, when it's on the dock to scroll the artist, album, and song title in huge font across it... @Apple" +"apple","neutral","125936985796919296","Mon Oct 17 14:11:31 +0000 2011","@Apple main office is #nowplaying +A milli - Lil Wayne." +"apple","neutral","125936323273048065","Mon Oct 17 14:08:53 +0000 2011","@iphone4S #IBM @apple iphone4s. Siri is not Watson @IBMWatson" +"apple","neutral","125935636300570624","Mon Oct 17 14:06:10 +0000 2011","RT @TravisDommert: Is leaving Steve's photo on the @apple home page what he would really want? In his honor...let's get back to innovati ..." +"apple","neutral","125935627056324609","Mon Oct 17 14:06:07 +0000 2011","RT @TravisDommert:Is leaving Steve's photo on the @apple home page what he would really want? In his honor...let's get back to #innov..." +"apple","neutral","125935503752171520","Mon Oct 17 14:05:38 +0000 2011","Is leaving Steve's photo on the @apple home page what he would really want? In his honor...let's get back to innovation! Eh, @mark_newton?" +"apple","neutral","125935314878476289","Mon Oct 17 14:04:53 +0000 2011","@apple @iphone sold more because its in more retail duhhhh. Also woz is correct people dont want tech answer just simple child like ones ok!" +"apple","neutral","125933630613766144","Mon Oct 17 13:58:11 +0000 2011","Is at the @apple store waiting for a poorly iMac to be seen by a genius. Hopefully this will be painless. Not too bad in here." +"apple","neutral","125932876721168384","Mon Oct 17 13:55:12 +0000 2011","@Apple: Surely iOS5 with it's integrated @Twitter would greatly benefit from moving the # to at least the '123' keyboard page? #simplethings" +"apple","neutral","125930342891790337","Mon Oct 17 13:45:08 +0000 2011","@iancollinsuk @Apple what's incredible, is that people still buy into apple as world beater's.....not true anymore." +"apple","neutral","125930143066759169","Mon Oct 17 13:44:20 +0000 2011","@Apple very disappointed Hogwarts not choice on world clock on iPhone" +"apple","neutral","125930002607906816","Mon Oct 17 13:43:46 +0000 2011","Great video @apple 1987 predicting tablets & a form of #siri at the end of this article RT @jedsinger: (cont) http://t.co/RDBeuCtt" +"apple","neutral","125927540249473024","Mon Oct 17 13:33:59 +0000 2011","IHS: iPhone 4S shows key design and component changes. http://t.co/6tfiGiDV @apple #apple #iphome4S #ios5" +"apple","neutral","125927536847880192","Mon Oct 17 13:33:59 +0000 2011","IHS: iPhone 4S shows key design and component changes. http://t.co/gZmdzvLe @apple #apple #iphome4S #ios5" +"apple","neutral","125927533614084097","Mon Oct 17 13:33:58 +0000 2011","IHS: iPhone 4S shows key design and component changes. http://t.co/UxfA8Ukg @apple #apple #iphome4S #ios5" +"apple","neutral","125927530761953281","Mon Oct 17 13:33:57 +0000 2011","IHS: iPhone 4S shows key design and component changes. http://t.co/Y3Mali6y @apple #apple #iphome4S #ios5" +"apple","neutral","125927399010467840","Mon Oct 17 13:33:26 +0000 2011","RT @iancollinsuk: Apple have just announced they sold 4 million phones this weekend. Incredible. That recession continues to bite @apple ..." +"apple","neutral","125927174514540544","Mon Oct 17 13:32:32 +0000 2011","Great post on the upcoming Great Tech Wars of 2012! @amazon @apple @google @facebook http://t.co/OSeqhT7W via @cbillich" +"apple","neutral","125927032185044992","Mon Oct 17 13:31:58 +0000 2011","RT @iancollinsuk: Apple have just announced they sold 4 million phones this weekend. Incredible. That recession continues to bite @Apple" +"apple","neutral","125926624930693121","Mon Oct 17 13:30:21 +0000 2011","Apple have just announced they sold 4 million phones this weekend. Incredible. That recession continues to bite @apple #recession" +"apple","neutral","125922989844856833","Mon Oct 17 13:15:54 +0000 2011","RT @jldavid: iPhone 4S First Weekend Sales Top Four Million: http://t.co/O7zqnY7K (via @apple)" +"apple","neutral","125922500839342080","Mon Oct 17 13:13:58 +0000 2011","RT @TalkYo_ISH: Dear @Apple Can We Get Some Afro-American Emoticons? -Management" +"apple","neutral","125922174648324096","Mon Oct 17 13:12:40 +0000 2011","#BreakingNews @Gizmodo reports: @Apple #iPhone 4S sold over $4 million over the weekend! #TrueStory + http://t.co/BEj32s6a" +"apple","neutral","125921393350160384","Mon Oct 17 13:09:34 +0000 2011","iPhone 4S First Weekend Sales Top Four Million: http://t.co/O7zqnY7K (via @apple)" +"apple","neutral","125920729194704896","Mon Oct 17 13:06:55 +0000 2011","Video: SIRI also gets angry. http://t.co/ksX2XZoR @chrisvoss @apple #apple #siri #iphone4S #Ai" +"apple","neutral","125920725595983874","Mon Oct 17 13:06:55 +0000 2011","Video: SIRI also gets angry. http://t.co/Ku288WX6 @chrisvoss @apple #apple #siri #iphone4S #Ai" +"apple","neutral","125920721200361472","Mon Oct 17 13:06:54 +0000 2011","Video: SIRI also gets angry. http://t.co/Xx1chZbN @chrisvoss @apple #apple #siri #iphone4S #Ai" +"apple","neutral","125920717966544896","Mon Oct 17 13:06:53 +0000 2011","Video: SIRI also gets angry. http://t.co/tzigD99b @chrisvoss @apple #apple #siri #iphone4S #Ai" +"apple","neutral","125920716297211904","Mon Oct 17 13:06:52 +0000 2011","@bxlewi1 hey...do you know why @Apple hasn't added 4G support yet? Seems like an odd omission for such an advanced device." +"apple","neutral","125919221845721090","Mon Oct 17 13:00:56 +0000 2011","Really @Apple and @ATT - Removing the 3G/EDGE switch on iOS 5? You do realize that you work on the 3G network in places often, right?" +"apple","neutral","125918906215968771","Mon Oct 17 12:59:41 +0000 2011","Apple sold FOUR MILLION iPhone 4S's in the first weekend: http://t.co/DdU2qM7O (via @Apple) #phonerecord #twiceiphone4sales" +"apple","neutral","125918450920062977","Mon Oct 17 12:57:52 +0000 2011","iPhone 4S 8MP camera : It´s a SONY. http://t.co/X1J8HNZu @sony @apple #apple #iphone4S #cameras" +"apple","neutral","125918447979872258","Mon Oct 17 12:57:52 +0000 2011","iPhone 4S 8MP camera : It´s a SONY. http://t.co/6uYIZeAK @sony @apple #apple #iphone4S #cameras" +"apple","neutral","125918444762828800","Mon Oct 17 12:57:51 +0000 2011","iPhone 4S 8MP camera : It´s a SONY. http://t.co/njTHOi2x @sony @apple #apple #iphone4S #cameras" +"apple","neutral","125918441013133312","Mon Oct 17 12:57:50 +0000 2011","iPhone 4S 8MP camera : It´s a SONY. http://t.co/BIqBEhTb @sony @apple #apple #iphone4S #cameras" +"apple","neutral","125917264267579393","Mon Oct 17 12:53:09 +0000 2011","@Bestbuy or @Apple to purchase my imac, iPad, and printer? #StudentDiscounts?" +"apple","neutral","125917174618525696","Mon Oct 17 12:52:48 +0000 2011","iPhone 4S: Setup SIRI for your Tweets. http://t.co/fZ216OiJ @apple #apple #iphone #siri" +"apple","neutral","125917170571026432","Mon Oct 17 12:52:47 +0000 2011","iPhone 4S: Setup SIRI for your Tweets. http://t.co/eyHmHSTM @apple #apple #iphone #siri" +"apple","neutral","125917164535418880","Mon Oct 17 12:52:46 +0000 2011","iPhone 4S: Setup SIRI for your Tweets. http://t.co/ws7LINTS @apple #apple #iphone #siri" +"apple","neutral","125917160982855680","Mon Oct 17 12:52:45 +0000 2011","iPhone 4S: Setup SIRI for your Tweets. http://t.co/uecqHL6k @apple #apple #iphone #siri" +"apple","neutral","125915210337890304","Mon Oct 17 12:45:00 +0000 2011","Ever since I cleaned my Mighty Mouse ball with alcohol to get it rolling again, any time I click the ball it acts like a left click. @Apple" +"apple","neutral","125908946702696448","Mon Oct 17 12:20:06 +0000 2011","@apple So why is Siri only available on the iPhone 4S ? - http://t.co/QZuNgyLW" +"apple","neutral","125907633466130432","Mon Oct 17 12:14:53 +0000 2011","Lol and what do those look like? RT @TalkYo_ISH: Dear @Apple Can We Get Some Afro-American Emoticons? -Management" +"apple","neutral","125892140940267522","Mon Oct 17 11:13:20 +0000 2011","Cool @apple / Steve Jobs infographic http://t.co/Prb5yx4X @MickH87 - timeline history" +"apple","neutral","125891898517889024","Mon Oct 17 11:12:22 +0000 2011","Talking @Apple keynotes over lunch with @yuvalariav. Challenged him to check something out ;-)" +"apple","neutral","125887065861787648","Mon Oct 17 10:53:10 +0000 2011","Does the iPhone4S no longer have that stupid 'network lost' pop up - possibly the most annoying thing ever that you cannot turn off @apple ?" +"apple","neutral","125882473312817152","Mon Oct 17 10:34:55 +0000 2011","Steve's last Design is apparently the @apple #apple #iPhone 5 | http://t.co/DIfQTmwm" +"apple","neutral","125878880916611072","Mon Oct 17 10:20:38 +0000 2011","OK @apple your accessories policy is out of hand. I need a powered line in to use the mic? Are you KIDDING ME?" +"apple","neutral","125866627337162752","Mon Oct 17 09:31:57 +0000 2011","A court in Sydney banned the sale of @Samsung 's Galaxy Tab in Australia over intellectual property infringement claims by @Apple #IP_wars" +"apple","neutral","125866368758333440","Mon Oct 17 09:30:55 +0000 2011","@Apple received 1m pre-orders in a single day for its new iPhone 4S #consumer_demand #steve_jobs" +"apple","neutral","125859792802693120","Mon Oct 17 09:04:47 +0000 2011","@apple can you get your iphone to stop vibrating when you get a text?" +"apple","neutral","125859488728236032","Mon Oct 17 09:03:35 +0000 2011","@Mommy_gaga, #Win an @Apple iPod Touch and get the @Pampers Hello World Baby Memories App! http://t.co/qEATNMpI #PampersHelloApps" +"apple","neutral","125854430171111424","Mon Oct 17 08:43:29 +0000 2011","#Siri on @Apple #iPhone4S can allow unauthorized access to texts and emails, even when the phone is locked!! http://t.co/0q5dLt22" +"apple","neutral","125840474132066304","Mon Oct 17 07:48:01 +0000 2011","If you sign up for iClould, will you also get iRain? @apple #icloud." +"apple","neutral","125830917578162176","Mon Oct 17 07:10:03 +0000 2011","@Apple Not Likely to Win Appstore Suit Case Against @Amazon http://t.co/GNcDGQsc" +"apple","neutral","125829040740368384","Mon Oct 17 07:02:35 +0000 2011","Say no to 1,000 things. What are you saying ""no"" to? 7 Rules of Success from Steve Jobs of @apple #prodmgmt #management #innovation" +"apple","neutral","125828984293425152","Mon Oct 17 07:02:22 +0000 2011","ok it is back @iphone @apple" +"apple","neutral","125826820057731074","Mon Oct 17 06:53:46 +0000 2011","Cat accusing tiger for roaring #Samsung on @Apple" +"apple","neutral","125824709421039616","Mon Oct 17 06:45:23 +0000 2011","@apple and @nintendo need to partner up for the app store" +"apple","neutral","125823389804929024","Mon Oct 17 06:40:08 +0000 2011","@apple stuck it to @amazon when it unveiled iCloud, showing that the placement of 'i' before the name of even a failed effort yields success" +"apple","neutral","125821979797364736","Mon Oct 17 06:34:32 +0000 2011","@nascarwriter right where I left it last week... #iphone4S #Apple @Apple http://t.co/ksJh8BAS" +"apple","neutral","125821218258550784","Mon Oct 17 06:31:30 +0000 2011","@paulbentleymelb @apple I think they call that profit-making 'genius'" +"apple","neutral","125817967240949760","Mon Oct 17 06:18:35 +0000 2011","What's up with @apple stock going up after #SteveJobs death? #Apple #AppleStock" +"apple","neutral","125815990620659713","Mon Oct 17 06:10:44 +0000 2011","@Apple needs to take down the Steve Jobs tribute and get back to promoting the brand - put the Cloud on the front page and start explaining" +"apple","neutral","125811943054393344","Mon Oct 17 05:54:39 +0000 2011","This is better than *every* magazine having their own #app! @EatMagazine @Apple supports magazines. #Ipad upgrade adds ""Newsstand"" #app" +"apple","neutral","125807897568481280","Mon Oct 17 05:38:34 +0000 2011","@twalve too busy having slappy hissy-fits with Samsung? @Apple" +"apple","neutral","125804983185719297","Mon Oct 17 05:26:59 +0000 2011","Fregon el nuevo facebook para iPod/iPhone haha alomejor ya tiene rato pero yo lo acabo de actualizar @apple" +"apple","neutral","125803571601080320","Mon Oct 17 05:21:23 +0000 2011","RT @MarcMonster: Siri keeps trying to send me to escort services....is that legal? @Apple" +"apple","neutral","125803457155301376","Mon Oct 17 05:20:56 +0000 2011","Siri keeps trying to send me to escort services....is that legal? @Apple" +"apple","neutral","125801811817922561","Mon Oct 17 05:14:23 +0000 2011","Watching iGenius on @Discovery. Based on first 13 minutes, @Apple never should have gone public in 1980." +"apple","neutral","125793487479259136","Mon Oct 17 04:41:19 +0000 2011","Ok, so it was a lion and not a leopard. Too much upgrading this weekend. @apple" +"apple","neutral","125792596114161665","Mon Oct 17 04:37:46 +0000 2011","@virgojud81 well maybe if you would set ya @apple products to send push notifications it would go through." +"apple","neutral","125792107930714113","Mon Oct 17 04:35:50 +0000 2011","@Apple's #Siri is great but #Google #Android always had that functionality youtu.be/gGbYVvU0Z5s #justsaying" +"apple","neutral","125727869363163137","Mon Oct 17 00:20:34 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/zRBak7kL #PampersHelloApps" +"apple","neutral","125727349034598401","Mon Oct 17 00:18:30 +0000 2011","hmm it seems i should have checked the identity of @apple first…" +"apple","neutral","125727044263874560","Mon Oct 17 00:17:17 +0000 2011","@apple out of respect, I really do think the Apple logo should be revised to the one you have no doubt seen… http://t.co/xNKwgxme" +"apple","neutral","125726769297891330","Mon Oct 17 00:16:12 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/en0iacwR #PampersHelloApps" +"apple","neutral","125725274317914112","Mon Oct 17 00:10:15 +0000 2011","@boothelk did you know the first @apple home computer to use a mouse was called 'the lisa' ??" +"apple","neutral","125721197437648896","Sun Oct 16 23:54:03 +0000 2011","What? Are we not buying enough iPads already @apple ? Give us a break while we learn to cope with a post Steve Apple http://t.co/w7GTxQZ0" +"apple","neutral","125708425752612864","Sun Oct 16 23:03:18 +0000 2011","@Apple do not buy an iPhone 4s that isn't in the store. THEY ARE NOT MADE YET AND DEFRAUDING YOU" +"apple","neutral","125703536632807424","Sun Oct 16 22:43:53 +0000 2011","@Apple needs to give me a contract deal i get them new customers all the time #teamiphone" +"apple","neutral","125698733768843264","Sun Oct 16 22:24:48 +0000 2011","@HTC vs @Apple who winning" +"apple","neutral","125695449423286272","Sun Oct 16 22:11:45 +0000 2011","Steve Jobs Biopic Coming Soon - @Apple #Apple #SteveJobs http://t.co/DxZBa5bG" +"apple","neutral","125695107734319104","Sun Oct 16 22:10:23 +0000 2011","@Jhoodmartin @rolandsmartin yes salute to Steve Jobs and @apple. I'm tweeting and you are watching on the IPad. Let's go Schaub time to lead" +"apple","neutral","125694587313467393","Sun Oct 16 22:08:19 +0000 2011","WOW DOGS!! + +@Apple w/Starr earlier - upstairs in the huge soho store and all of a sudden she sprung to attention... http://t.co/b2PLMYVD" +"apple","neutral","125692890474233856","Sun Oct 16 22:01:35 +0000 2011","Digital X Worldwide | Today Is Steve Jobs Day In California @apple http://t.co/QSCHuMIN" +"apple","neutral","125692845054115842","Sun Oct 16 22:01:24 +0000 2011","Watching @cotmtulsa webcast on Joseph from my iPad on my @apple TV. I love technology. http://t.co/mpBZyl72" +"apple","neutral","125691975474229248","Sun Oct 16 21:57:56 +0000 2011","Comparing the @apple & @BlackBerry apps/interfaces, can anyone imagine how @BlackBerry could have done worse? #sadreally" +"apple","neutral","125690764331196416","Sun Oct 16 21:53:08 +0000 2011","Waiting patiently for me at the @apple store. @Badog420 http://t.co/NaFaeXuh" +"apple","neutral","125689905954299904","Sun Oct 16 21:49:43 +0000 2011","@fundingroadmap @apple also knowing that this is no ending http://t.co/q08cBcuE" +"apple","neutral","125687710705926144","Sun Oct 16 21:41:00 +0000 2011","Siri wasn't able to help me find the g-spot....bush league @apple" +"apple","neutral","125686643960193024","Sun Oct 16 21:36:45 +0000 2011","@Apple MacBook Air.. satiliktir." +"apple","neutral","125685016389894144","Sun Oct 16 21:30:17 +0000 2011","RT @Porter_Anderson: ""3 principal global players will be active in every market...@Amazon, @Apple, & @Kobo."" #fbf11 #publaunch @MikeShat ..." +"apple","neutral","125679996420374530","Sun Oct 16 21:10:20 +0000 2011","@apple http://t.co/2YcsCG68 via @donorschoose" +"apple","neutral","125677838295764992","Sun Oct 16 21:01:46 +0000 2011","I just told my @apple 4S she was a stupid bitch and her reply was if you insist #lol" +"apple","neutral","125674121722998785","Sun Oct 16 20:47:00 +0000 2011","Why can't I check just Notes (without mail) on Lion for #iCloud like I do on the iOS devices @apple ?" +"apple","neutral","125669834922008576","Sun Oct 16 20:29:58 +0000 2011","You need to ask #Siri her favorite color and the average speed of an unladen swallow. Seems Someone @apple is a Monty python fan" +"apple","neutral","125667332931596290","Sun Oct 16 20:20:01 +0000 2011","""3 principal global players will be active in every market...@Amazon, @Apple, & @Kobo."" #fbf11 #publaunch @MikeShatzkin http://t.co/1ndxcMO1" +"apple","neutral","125666909080387584","Sun Oct 16 20:18:20 +0000 2011","If I was @Apple, I'd publicly denounce and file suit against @Nuance. Then buy 'em on the cheap. #Siri" +"apple","neutral","125665930339565568","Sun Oct 16 20:14:27 +0000 2011","@NoVaTyler I lost a lot that I paid for @apple @WordsWFriends etc! #nothappy oh well, poop happens" +"apple","neutral","125664999036301312","Sun Oct 16 20:10:45 +0000 2011","The folks at @apple are awesome! So glad I a #mactard" +"apple","neutral","125664891691474944","Sun Oct 16 20:10:19 +0000 2011","Waiting at end of 20 person line after checking in on time for my ""genius bar"" appt. @apple store...I do not find this system ""genius"" #FAIL" +"apple","neutral","125663967296229376","Sun Oct 16 20:06:39 +0000 2011","While the improvements in @Apple Mac OS 10.7 #Lion are great, the full-screen implementation on 2 monitors is broken." +"apple","neutral","125661036891226113","Sun Oct 16 19:55:00 +0000 2011","@mcp111 @apple @AppStore Why are you asking us? Apple don't read messages on Twitter... and Apple aren't @Apple either" +"apple","neutral","125660067482697729","Sun Oct 16 19:51:09 +0000 2011","@nansen @apple I have not heard of that. Hubby had no problems- I have not done it yet." +"apple","neutral","125657950185463808","Sun Oct 16 19:42:44 +0000 2011","RT @TerryStorch: With @twitter and @apple partnership, would be great if SMS from 40404 to iOS devices would use imessage." +"apple","neutral","125647236418912256","Sun Oct 16 19:00:10 +0000 2011","Dear @apple can I still use #iWeb for my #mobileme website if I convert to iCloud with my Apple ID for mail and iCal??? #help #dtv thanks!" +"apple","neutral","125645376790331392","Sun Oct 16 18:52:46 +0000 2011","#California Governor @JerryBrown declares today #SteveJobsDay in his State. Music video tribute http://t.co/mNr1HIx2 @mashable @cnet @apple" +"apple","neutral","125642256114909184","Sun Oct 16 18:40:22 +0000 2011","Lmao RT @RobynheaRtz: He asked Siri where's the hoes at & she told him where the nearest escorts were I love @apple" +"apple","neutral","125642161659199488","Sun Oct 16 18:40:00 +0000 2011","@Apple, why do you have such a beef with @Windows? #cantwealljustgetalong" +"apple","neutral","125642041140060160","Sun Oct 16 18:39:31 +0000 2011","He asked Siri where's the hoes at & she told him where the nearest escorts were I love @apple" +"apple","neutral","125640758966484992","Sun Oct 16 18:34:25 +0000 2011","What's up #twitpeeps? How's every @apple user liking iOS 5?" +"apple","neutral","125640679325052929","Sun Oct 16 18:34:06 +0000 2011","“@42Hermy: @Reddclay81 now that were done prasing @Apple 's #iOS5 + +Everyone go subscribe to notsosuperpro! #shamlessselfpromotionâ€" +"apple","neutral","125640515021578240","Sun Oct 16 18:33:27 +0000 2011","RT @42Hermy: @Reddclay81 now that were done prasing @Apple 's #iOS5 + +Everyone go subscribe to notsosuperpro! #shamlessselfpromotion" +"apple","neutral","125638955952640000","Sun Oct 16 18:27:16 +0000 2011","@Reddclay81 now that were done prasing @Apple 's #iOS5 + +Everyone go subscribe to notsosuperpro! #shamlessselfpromotion" +"apple","neutral","125633549847117824","Sun Oct 16 18:05:47 +0000 2011","on that @Apple shit hard, change my twitter name to @_iFynest" +"apple","neutral","125633468708302848","Sun Oct 16 18:05:27 +0000 2011","I want Siri on my iPhone4 ! @apple" +"apple","neutral","125633233982459904","Sun Oct 16 18:04:31 +0000 2011","So @apple the new iMessage is great and all, but is there a way to default to SMS when I'm not on wifi? I have limited data." +"apple","neutral","125632582795804672","Sun Oct 16 18:01:56 +0000 2011","Right. Migrate from older mbp to imac using ethernet not firewire. Thanks Drew for saving me some $. @apple http://t.co/IAHHaGXy" +"apple","neutral","125631556051140608","Sun Oct 16 17:57:51 +0000 2011","Dear @apple, yesterday you sucked. Today you're awesome! Thanks LP Apple store, crash and burn mich ave store." +"apple","neutral","125630955154190336","Sun Oct 16 17:55:29 +0000 2011","@apple store #Towson waiting at Genius bar - huge crowd waiting for #iPhone 4s @michaelmark @nicolelil http://t.co/x92sweCh" +"apple","neutral","125630836245676033","Sun Oct 16 17:55:00 +0000 2011","@Apple's Steve Jobs Gets His Day | PCWorld http://t.co/6gT6w2Od" +"apple","neutral","125630016485732352","Sun Oct 16 17:51:44 +0000 2011","Back to the Future 2 where @Apple didn't even have IPhone" +"apple","neutral","125629788563050496","Sun Oct 16 17:50:50 +0000 2011","Why didn't @Apple merge the address and search bars in Safari ala Crome in iOS 5? #simplethings" +"apple","neutral","125627732032888833","Sun Oct 16 17:42:40 +0000 2011","@reddclay81 Well then! + +P.S. Did you respond so fast because of @Apple 's #iOS5 ?" +"apple","neutral","125626286939979776","Sun Oct 16 17:36:55 +0000 2011","@adityasood @apple @Quora I totally agree, was talking w/friends about that yesterday. Siri Everywhere!" +"apple","neutral","125625630254567424","Sun Oct 16 17:34:18 +0000 2011","How much do @Apple’s factories cost? | asymco http://t.co/GH0tvqMO" +"apple","neutral","125625566203346944","Sun Oct 16 17:34:03 +0000 2011","@Apple hits @Samsung in court again - http://t.co/l6brF3yB" +"apple","neutral","125620113582993408","Sun Oct 16 17:12:23 +0000 2011","@Mvm549 well, if it told you what you did to it or how it happened, it would be concrete evidence that @Apple is logging your life. :l" +"apple","neutral","125617451705712640","Sun Oct 16 17:01:49 +0000 2011","U pick Camera&Iphone4s orCamera&Ipad2?????????????? @apple" +"apple","neutral","125617133123153921","Sun Oct 16 17:00:33 +0000 2011","@Apple, Y U NO LET ME PRE-ORDER IPHONE 4S?" +"apple","neutral","125616747863736320","Sun Oct 16 16:59:01 +0000 2011","@Blogger loves @Apple iPhone. My blogpost finally done through iPhone apps." +"apple","neutral","125614951787266048","Sun Oct 16 16:51:52 +0000 2011","Can someone explain to me how the hell iMessage differs from a text message? @Apple #fb" +"apple","neutral","125614910804738049","Sun Oct 16 16:51:43 +0000 2011","Can someone explain to me how the hell iMessage differs from a text message? @Apple" +"apple","neutral","125598495137726464","Sun Oct 16 15:46:29 +0000 2011","@Apple, feel free to accidently ship me a new iPhone 4s #nobigdeal" +"apple","neutral","125596991290998784","Sun Oct 16 15:40:30 +0000 2011","@Apple I asked Siri 'if she swallowed' she relied 'Lets see' - wtf do i do now? wank over my phone?" +"apple","neutral","125595669145722880","Sun Oct 16 15:35:15 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/8MaDmqPc #PampersHelloApps" +"apple","neutral","125595441562783744","Sun Oct 16 15:34:21 +0000 2011","@apple How much is the student discount on the MacBook Air in Canada? Your reps have said both $50 and $100. Which is it? Thanks! #laptop" +"apple","neutral","125591434056318977","Sun Oct 16 15:18:25 +0000 2011","@apple @AppStore @ihelpline when r u going to provide the ability to password protect iPad email???" +"apple","neutral","125589884445536257","Sun Oct 16 15:12:16 +0000 2011","has the new iOS5 changed the way the iPad charges? I've noticed I can charge from my laptop & iPhone charger now @toptweets @BBCClick @apple" +"apple","neutral","125589258898644992","Sun Oct 16 15:09:47 +0000 2011","You know what would be awesome? If @QVC sold @apple iPads on easy pay. #yesplease" +"apple","neutral","125588749454278656","Sun Oct 16 15:07:45 +0000 2011","Thanks @apple my girlfriend likes #siri more than me now." +"apple","neutral","125588587180863489","Sun Oct 16 15:07:07 +0000 2011","@Apple store dude ""i hear an accent where ru from?"" me ""no from ct, but ur not 1st 2 tell me that"" dude ""maybe its... http://t.co/93Ztje8p" +"apple","neutral","125587186723725312","Sun Oct 16 15:01:33 +0000 2011","SpeechTrans http://t.co/TW3SzX61 on @Apple App store listed as the #best bi-directional #Speech To Speech #Translator -- Posts on Twitter" +"apple","neutral","125586682790674434","Sun Oct 16 14:59:33 +0000 2011","@CoLDSToRAGE Haven't had a chance to check out the #android overhaul but ebooks are likely a response to @apple newsstand baked into #iOS5" +"apple","neutral","125583717354831872","Sun Oct 16 14:47:46 +0000 2011","@apple no Siri support for Canada sucks #Siri but loving the new iPhone upgrade from my 3GS #iphone4s" +"apple","neutral","125581507355086848","Sun Oct 16 14:38:59 +0000 2011","Awesome documentary about how apple was created.. #stevejobs #apple @apple http://t.co/Qetk8ds2" +"apple","neutral","125581280430669824","Sun Oct 16 14:38:05 +0000 2011","Oops! Apparently @apple is some handle-squatter. Tweetbarassment." +"apple","neutral","125564573167263746","Sun Oct 16 13:31:41 +0000 2011","This video brought a small tear to my eye http://t.co/3va2ninp. Watch to @SteveWoz opinion of #SteveJobs and @Apple" +"apple","neutral","125546017205665792","Sun Oct 16 12:17:57 +0000 2011","this reader button for safari is perfect for fic @apple #ios5 #stevejobsday" +"apple","neutral","125545914864640000","Sun Oct 16 12:17:33 +0000 2011","MacBook Pros constrained, new models appear in Apple’s inventory system @apple @macbookpro... http://t.co/blGr9Ue5" +"apple","neutral","125544363945230336","Sun Oct 16 12:11:23 +0000 2011","A Great iPhone 4S Review @engadget @iphone @apple http://t.co/gX07Qywe" +"apple","neutral","125541112491425792","Sun Oct 16 11:58:28 +0000 2011","@apple Give Siri back!" +"apple","neutral","125537487455137793","Sun Oct 16 11:44:04 +0000 2011","@Apple has lunched its new OS: iSO 5" +"apple","neutral","125528344480587776","Sun Oct 16 11:07:44 +0000 2011","Bouquet for Jobs @Apple Store Nagoya http://t.co/pmEcxPug" +"apple","neutral","125527718203887616","Sun Oct 16 11:05:14 +0000 2011","New #iOS5 notification center is a big improvement, but @apple missed an easy trick by not making it fully available on lock screen." +"apple","neutral","125512197135806464","Sun Oct 16 10:03:34 +0000 2011","@apple http://t.co/LKm2JzIN" +"apple","neutral","125498684401135616","Sun Oct 16 09:09:52 +0000 2011","The battle of voice controlled AI platforms. If they were mobile OS ind - same q's as in IE antitrust case in EU? Vlingo v @google v @apple" +"apple","neutral","125496516000485376","Sun Oct 16 09:01:15 +0000 2011","SpeechTrans http://t.co/TW3SzX61 on @Apple App store listed as the #best bi-directional #Speech To Speech #Translator - Posts on Twitter" +"apple","neutral","125493419522002944","Sun Oct 16 08:48:57 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/gIHYpzWO #PampersHelloApps" +"apple","neutral","125493125098635265","Sun Oct 16 08:47:47 +0000 2011","Or hide it.. RT @LucasBlack: Dear @Apple.Re: Newsstand. Don't advertise magazines for free:they're NOT free.Also,allow us to remove the app." +"apple","neutral","125489264157917184","Sun Oct 16 08:32:26 +0000 2011","#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/fnbKNwmj #PampersHelloApps Ends10/21" +"apple","neutral","125486845768368128","Sun Oct 16 08:22:50 +0000 2011","@Mommy_gaga #Win an @Apple iPod Touch @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/qEATNMpI #PampersHelloApps" +"apple","neutral","125460019859820544","Sun Oct 16 06:36:14 +0000 2011","I have to change my homepage from @apple. Steve Jobs looks like an owl" +"apple","neutral","125457535951060993","Sun Oct 16 06:26:22 +0000 2011","@msantram I want @apple to integrate Siri with @Quora." +"apple","neutral","125448837404954624","Sun Oct 16 05:51:48 +0000 2011","@MisThang2u nope they wouldn't even let me & my mom switch @apple she has an upgrade mine isn't do til marchî˜" +"apple","neutral","125433354488254464","Sun Oct 16 04:50:16 +0000 2011","@Apple iOS5 is all well and good and has nice new features, but I'm still waiting on an app that will go to work for me." +"apple","neutral","125432626482917376","Sun Oct 16 04:47:23 +0000 2011","As tribute to Steve @apple, i'd consider renaming #Siri to Steve - he'll get your #Jobs done! Forever remembered for his legacy." +"apple","neutral","125432518324400128","Sun Oct 16 04:46:57 +0000 2011","can't wait for @Spotify and @Apple to integrate with Siri." +"apple","neutral","125425087800291328","Sun Oct 16 04:17:25 +0000 2011","@rjpittman Why you won't use your @apple account." +"apple","neutral","125424738662223872","Sun Oct 16 04:16:02 +0000 2011","I hope @apple will release an app for Siri for us non-apple 4s users. #apple #iphone #siri" +"apple","neutral","125419216227667968","Sun Oct 16 03:54:05 +0000 2011","strange, i cannot put my newsstand app into any group. @apple wants to be our new paperboy #iOS #iphone" +"apple","neutral","125417324621737985","Sun Oct 16 03:46:34 +0000 2011","In case you were curious like me.... IPhone 4S: Which Apple #iPhone is Best for Price, Features? http://t.co/d1LonCzc @Apple #HealthIT #EHR" +"apple","neutral","125416866243018753","Sun Oct 16 03:44:45 +0000 2011","Lemme just say iOS5 is mad cool BUT it needs A LOT of bug fixes @Apple" +"apple","neutral","125416811490578435","Sun Oct 16 03:44:32 +0000 2011","I don't get it. Instead of @apple dropping the 16GB iPhone 4 down to $99, they release an 8GB one for $99???" +"apple","neutral","125410153196560384","Sun Oct 16 03:18:05 +0000 2011","Dear @Apple iMessage client for desktop please :)" +"apple","neutral","125409201907437569","Sun Oct 16 03:14:18 +0000 2011","#Siri is kinda slow, tho impressed it got my accent and commands first time out. @apple" +"apple","neutral","125406528487424003","Sun Oct 16 03:03:40 +0000 2011","Why doesn't the Genius have songs for Watch the Throne yet? HUH? @APPLE ?????" +"apple","neutral","125406003863883776","Sun Oct 16 03:01:35 +0000 2011","""#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/eYLm3pav #PampersHelloApps""" +"apple","neutral","125405939015757824","Sun Oct 16 03:01:20 +0000 2011","SpeechTrans http://t.co/TW3SzX61 on @Apple App store listed as the #best bi-directional #Speech To Speech #Translator ---- Posts on Twitter" +"apple","neutral","125402652610797569","Sun Oct 16 02:48:16 +0000 2011","@Blackberry -vs- @Apple iPhone?" +"apple","neutral","125398913313284096","Sun Oct 16 02:33:25 +0000 2011","At @Apple HQ" +"apple","neutral","125398813543374848","Sun Oct 16 02:33:02 +0000 2011","Our trip to SF and Silicon Valley is about to end. Visit @Apple HQ is one of the final stops. http://t.co/ikoOqdoc" +"apple","neutral","125394805449699329","Sun Oct 16 02:17:05 +0000 2011","#iPhone4S in #Space http://t.co/glOaXhLe via @youtube + +#nasa @apple" +"apple","neutral","125379023307153408","Sun Oct 16 01:14:23 +0000 2011","RT @stalkertard: hey @apple, how about if IPhone4S = ""IPhone 4 Steve"" and you donate a small portion of the proceeds towards cancer rese ..." +"apple","neutral","125373658389692416","Sun Oct 16 00:53:04 +0000 2011","""#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/DLLabuAK #PampersHelloApps""" +"apple","neutral","125373427661029376","Sun Oct 16 00:52:09 +0000 2011","Do wish @hudl would come up with a native @apple Mac version of their video editor especially with HD video. But love everything @hudl" +"apple","neutral","125372628746768384","Sun Oct 16 00:48:58 +0000 2011","@_courtneytaylor @apple did you get a 4S?" +"apple","neutral","125369026351349760","Sun Oct 16 00:34:39 +0000 2011","@apple should make Siri avalible for all devices not exclusive to the iPhone 4S" +"apple","neutral","125368381728763904","Sun Oct 16 00:32:06 +0000 2011","Still a line @apple store Hillsdale mall #94402 http://t.co/wzVxNz8E" +"apple","neutral","125367492976717824","Sun Oct 16 00:28:34 +0000 2011","@apple http://t.co/awASwV5u" +"apple","neutral","125366519680086016","Sun Oct 16 00:24:42 +0000 2011","@Apple Why do you have a volume control in the fast app switcher!? Buttons. Volume buttons. You have them. Give me a brightness slider! #wtf" +"apple","neutral","125364154629492737","Sun Oct 16 00:15:18 +0000 2011","Proof that my dad loves my sis more. RT @AnnaCorinne16 woke up to my dad at my door w my new iphone. he surprised me & went to @apple!" +"apple","neutral","125361209137565696","Sun Oct 16 00:03:36 +0000 2011","@apple iphone 4s arrival on Tuesday!! #iPhone4S" +"apple","neutral","125360952878182400","Sun Oct 16 00:02:35 +0000 2011","Too many iPhones. @apple @dcoult #FirstWorldProblems http://t.co/Ck4yfFSY" +"apple","neutral","125360877359742976","Sun Oct 16 00:02:16 +0000 2011","Dear @apple, as a hoh/deaf user, can you please add options for stronger and longer vibrate for #iphone4s ? Thank you! #hoh #deaf #ally" +"apple","neutral","125359697770450944","Sat Oct 15 23:57:35 +0000 2011","At the Genius Bar... Waiting @apple store" +"apple","neutral","125359622193295360","Sat Oct 15 23:57:17 +0000 2011","no kidding “@thejabberwockey: Also @EricGreenspan, don't forget @Apple stop closing the app store after each bloody purchase...â€" +"apple","neutral","125357901580746752","Sat Oct 15 23:50:27 +0000 2011","So, what is behind that giant silver door at the @apple store? #MacLife" +"apple","neutral","125351067620880385","Sat Oct 15 23:23:18 +0000 2011","@robcollingridge @Apple @Google and with a URL that works http://t.co/vGSf529q" +"apple","neutral","125350537821569024","Sat Oct 15 23:21:11 +0000 2011","@robcollingridge @Apple @Google Been looking at Samsung ChatON tonight, very promising. Integrated with key apps http://t.co/DZsGPhqt" +"apple","neutral","125347828754169856","Sat Oct 15 23:10:25 +0000 2011","@thejcurt unlocking it at @apple store. RIP, Mr. Jobs." +"apple","neutral","125345723020607488","Sat Oct 15 23:02:03 +0000 2011","Siri is cool but more importantly handy and FUNNY. @Apple please configure it for Canada. What's with the racism, EH?" +"apple","neutral","125344722196766720","Sat Oct 15 22:58:05 +0000 2011","RT @Apple Yea, our stores are open 7 days a week RT @Wasalu_Flora Me too? RT @DJ_BMONEY iPad ..+ iPhone 4s..= My Christmas gifts to my self." +"apple","neutral","125344351218974720","Sat Oct 15 22:56:36 +0000 2011","#IOS5 needed to include a landscape mode for reminders @Apple" +"apple","neutral","125336929825849344","Sat Oct 15 22:27:07 +0000 2011","De la nada mi #iPhone4 se fue a negro y empezó a vibrar, hubo que resetearlo, que onda con #iOS5 WTFH @Apple" +"apple","neutral","125336798690942977","Sat Oct 15 22:26:36 +0000 2011","http://t.co/CvEKdOIQ line at @Apple northpark #dallas mall. #4GSiPhone" +"apple","neutral","125335012961828866","Sat Oct 15 22:19:30 +0000 2011","@davidcohen Sounds like 2nd day strategy isn't working. We got ours yesterday - in and out in no time. @Apple staff awesome/efficient!" +"apple","neutral","125332871174037504","Sat Oct 15 22:10:59 +0000 2011","@sprint is slow, slow, slow. beware when signing with #SlowSprint. @apple #iphone #iphone4 #iphone4s #Siri #android #TheBufferingNetwork" +"apple","neutral","125332609428496384","Sat Oct 15 22:09:57 +0000 2011","Waiting on line @Apple store at west 14th Street." +"apple","neutral","125327281181835264","Sat Oct 15 21:48:47 +0000 2011","In honor of Steve Jobs. @apple #RIPSteve http://t.co/NyjhkNoU" +"apple","neutral","125326760769372160","Sat Oct 15 21:46:42 +0000 2011","when is @apple gonna update the fact that i can't see emoji on twitter for mac?" +"apple","neutral","125325397712846848","Sat Oct 15 21:41:17 +0000 2011","RT @ajrogers Purdie Rogers tribute to #SteveJobs and @apple. We spray painted the Macintosh Plus w.. http://t.co/AlDicTd1" +"apple","neutral","125324916009615360","Sat Oct 15 21:39:23 +0000 2011","RT @ajrogers Purdie Rogers tribute to #SteveJobs and @apple. We spray painted the Macintosh Plus w.. http://t.co/oMXjhZh0" +"apple","neutral","125321084525490176","Sat Oct 15 21:24:09 +0000 2011","@apple so why is the ios support for 2nd gen ipods discontinued? It is only 3 years old? Give us an ios update please!" +"apple","neutral","125318029390249984","Sat Oct 15 21:12:01 +0000 2011","Just had a look on eBay for iPhone 4s and some have 13,16,24 bids at well over £500 some £530 (16gb white) , they are £499 delivered @apple" +"apple","neutral","125317541504626688","Sat Oct 15 21:10:04 +0000 2011","#SaturdayCute: Shit That #Siri Says. @Apple designers knew what people were going to ask the new digital assistant... http://t.co/Ecji1iIo" +"apple","neutral","125317300860620801","Sat Oct 15 21:09:07 +0000 2011","@thatstevegray done that buddy, just surprised that there's no option from @apple & if you turn it back on it brings back the pics u delete" +"apple","neutral","125315460030922752","Sat Oct 15 21:01:48 +0000 2011","SpeechTrans http://t.co/TW3SzX61 on @Apple App store listed as the #best bi-directional #Speech To Speech #Translator --- Posts on Twitter" +"apple","neutral","125315263183851521","Sat Oct 15 21:01:01 +0000 2011","So @apple changed the look of their on/off toggle. I wonder how long it will take for all the hip web apps to follow..." +"apple","neutral","125315080081518592","Sat Oct 15 21:00:17 +0000 2011","I want everybody on earth to iMessage me. Is there anyway the world can iMessage me without giving them my phone number??? @Apple" +"apple","neutral","125312357797863425","Sat Oct 15 20:49:28 +0000 2011","RT @MyMelange: Wall to wall people, security and a huge line outside just to get in the @Apple store. http://t.co/i1khkgJm" +"apple","neutral","125309663913840640","Sat Oct 15 20:38:46 +0000 2011","Whoa! Line at @apple store in Glendale for iPhone 4S http://t.co/yZlpogXB" +"apple","neutral","125305567148388352","Sat Oct 15 20:22:29 +0000 2011","Follow me and I follow you!! @iOS5 @fansoflilwayne @follow @apple" +"apple","neutral","125300705836793856","Sat Oct 15 20:03:10 +0000 2011","Salesperson at #soho @Apple store ""it doesn't matter how many people are lined up out front. They're not gonna get one"" #iphone4s" +"apple","neutral","125300603059576833","Sat Oct 15 20:02:46 +0000 2011","@apple and I'm looking at the outside & all you see is sticky notes about steve jobs" +"apple","neutral","125283873331494913","Sat Oct 15 18:56:17 +0000 2011","@samsung and @apple never rest as they battle in court yet again over tablets.(not the pills)" +"apple","neutral","125281502866059264","Sat Oct 15 18:46:52 +0000 2011","Iphone update is great but seriously @apple let me delete stuff I dont want like gamecenter. Steve wont mind..." +"apple","neutral","125278676949544960","Sat Oct 15 18:35:38 +0000 2011","@rjakesuk mine is past the warranty and I don't have AppleCare. #ios5 @apple" +"apple","neutral","125276525472911360","Sat Oct 15 18:27:05 +0000 2011","If @Apple could put the USB female connector in cars would be #Dope" +"apple","neutral","125275795252977664","Sat Oct 15 18:24:11 +0000 2011","What do we have to do to get Flash on #iPad? @Apple please please please change your mind. Thank you in advance! @kusi_sk @PeterNemcok" +"apple","neutral","125275280678993920","Sat Oct 15 18:22:09 +0000 2011","@ireton Perhaps it's time for @Apple to come out with an app called iSuck that lets you know which apps are killing your battery life" +"apple","neutral","125273317673414656","Sat Oct 15 18:14:21 +0000 2011","20 min line @apple store @short pump." +"apple","neutral","125270965268643840","Sat Oct 15 18:05:00 +0000 2011","Lol @Apple support playing Mary J Blige during the wait time." +"apple","neutral","125267017942052866","Sat Oct 15 17:49:19 +0000 2011","RT @TheMacMob: Sprint, Verizon & AT&T @Apple Stores http://t.co/ZOzR0UUz" +"apple","neutral","125266503657472000","Sat Oct 15 17:47:16 +0000 2011","@xwordy iCloud was new; I didn't need weather or stocks but they were there pre #ios5 so I'm not hopeful that this will help @apple" +"apple","neutral","125261029834899456","Sat Oct 15 17:25:31 +0000 2011","Thinking more, I want Android to do an iMessage service & for @Google & @Apple to interconnect. They would if the cared about their users." +"apple","neutral","125260105154437121","Sat Oct 15 17:21:50 +0000 2011","@apple in Austin: domain location - umbrellas are nice but being staffed properly would be better." +"apple","neutral","125257803790159873","Sat Oct 15 17:12:42 +0000 2011","bored - need a new intriguing #app for my #iPad. Any suggestions? @apple #yyc @Gadget_Guy" +"apple","neutral","125252442836320256","Sat Oct 15 16:51:24 +0000 2011","@KeepnUpWitTBoyd @AppStore @iTunesMusic they fixed a bug Thursday after I had done my update? Thus my issue, spent Fri @apple #iOS5" +"apple","neutral","125250721280040961","Sat Oct 15 16:44:33 +0000 2011","And @Apple I still have an old MBP from 2006 which I use - which can't run #Lion, I still like to sync it!" +"apple","neutral","125250617911418881","Sat Oct 15 16:44:09 +0000 2011","@moLifer26 @apple When we find people standing in the middle of blood-soaked rooms saying, ""Siri told me to?"" #noreallyihearvoices" +"apple","neutral","125250078108684288","Sat Oct 15 16:42:00 +0000 2011","...now with Siri were talking to our iPhones... how will we ever know if people are really truly crazy? :P +#iphone4s #Siri #apple @apple" +"apple","neutral","125246898830458880","Sat Oct 15 16:29:22 +0000 2011","@notleifgarrett @apple you'll be able to afford that mansion one day." +"apple","neutral","125245892814045184","Sat Oct 15 16:25:22 +0000 2011","@fabi_m not that I know of. I think it is only on iOS, should get @apple to make a desktop app :)" +"apple","neutral","125244798671142912","Sat Oct 15 16:21:01 +0000 2011","@notleifgarrett @apple wished their on hold music played ""Pictures of You""." +"apple","neutral","125231250247135233","Sat Oct 15 15:27:11 +0000 2011","@RealEstateGuyWI All about yesterday's @Apple upgrade. Helpful but maddening" +"apple","neutral","125222749034659840","Sat Oct 15 14:53:24 +0000 2011","after being on hold with @apple for the last 30 mins, i really like their music selection of on-hold music. White Stripes, Ray Lamontagne..." +"apple","neutral","125218106778992640","Sat Oct 15 14:34:57 +0000 2011","""#Win an @Apple iPod Touch from @Mommy_gaga, get the @Pampers Hello World Baby Memories App! http://t.co/g8Uof7uz #PampersHelloApps""" +"apple","neutral","125211793655218178","Sat Oct 15 14:09:52 +0000 2011","@vlingo is a POOR substitute for Siri!! Yo @APPLE, gimmie Siri!!!!" +"apple","neutral","125196751387889665","Sat Oct 15 13:10:06 +0000 2011","@Apple Scrapple. (:" +"apple","neutral","125193298624258049","Sat Oct 15 12:56:23 +0000 2011","@tvnewschick @apple Oh no! Why not?! I want it to be love-love-love! It's a lot of dough to spend otherwise..." +"apple","neutral","125184976579862530","Sat Oct 15 12:23:18 +0000 2011","One of the great #entrepreneurs has died. #Steve #Jobs has passed away. Our hearts go out to his family and everyone at @Apple" +"apple","neutral","125085987431923713","Sat Oct 15 05:49:58 +0000 2011","@fashionNOGuilt haha! tomorrow should be less hectic too! 'cause everyone and their moms was at the @apple store today! ;]" +"apple","irrelevant","126405660308021248","Tue Oct 18 21:13:52 +0000 2011","Casi 30 años usando PC, de ellos casi 4 Mac y tengo q decir que el Macbook Air 11"" de @apple es de largo lo mejor. No imagino como mejorarlo" +"apple","irrelevant","126403953058529280","Tue Oct 18 21:07:05 +0000 2011","Ud alguna vez ha llamado al servicio de @apple support? Se nota!! Con razon le gusta apple : @" +"apple","irrelevant","126402391259103232","Tue Oct 18 21:00:53 +0000 2011","@waze_it @macneo Bellissimo video. Sono sempre più convinto di prendere un @Apple iPhone e gettare via il @Nokia N97 che mi ritrovo." +"apple","irrelevant","126399172495679488","Tue Oct 18 20:48:05 +0000 2011","ipad: ""mi aparato favorito de la vida"" frase celebre de Azay Carrasco @apple #ipad" +"apple","irrelevant","126394266145665025","Tue Oct 18 20:28:35 +0000 2011","Cenaze nedeniyle kapalıyız' @apple http://t.co/j4L892Kk" +"apple","irrelevant","126391727408947200","Tue Oct 18 20:18:30 +0000 2011","Dancing bear @apple store Lincoln center http://t.co/XBmjUpMq" +"apple","irrelevant","126387209824776192","Tue Oct 18 20:00:33 +0000 2011","#BlackBerry #RIM esta tratando de abarcar el mercado imitando a android y en ciertas cosas a @Apple pero sinceramente no creo que llegue :)" +"apple","irrelevant","126385587740610563","Tue Oct 18 19:54:06 +0000 2011","iPhoneã®ã‚¢ãƒ‰ãƒ¬ã‚¹ã¯@apple.ne.jpã«ã™ã‚Œã°ã„ã„ã®ã«(´ι_`)" +"apple","irrelevant","126381904621600768","Tue Oct 18 19:39:28 +0000 2011","sail phoenix next stop----> after quimera rock @apple house" +"apple","irrelevant","126379095004160001","Tue Oct 18 19:28:18 +0000 2011","che palle sto cacchio di #WP7 di merda con un #Whatsapp che funziona di cacca per colpa del sistema di #notifica! @apple send me an #iphone" +"apple","irrelevant","126373281099026432","Tue Oct 18 19:05:12 +0000 2011","Twitter intègré dans ios5, mais le correcteur français ne connaît pas ce mot et propose titrer... Peut mieux faire comme intégration @Apple" +"apple","irrelevant","126367728754884609","Tue Oct 18 18:43:08 +0000 2011","Chill lol RT @apple: 😂😂😂😂😂😂😂😂 RT @Styl_Standin: What is imessage??" +"apple","irrelevant","126362562865528832","Tue Oct 18 18:22:37 +0000 2011","@LennyDubz you do know that @apple is not the company. and why u blasting me on #twitter???" +"apple","irrelevant","126360606042374144","Tue Oct 18 18:14:50 +0000 2011","@bassponton @iphoneclub @kpn @iphone @apple ja. En dan heb je voor dat geld ook nog een datalimiet...." +"apple","irrelevant","126355839274594304","Tue Oct 18 17:55:54 +0000 2011","Y sigue sin avisarme cuando me mencionan en Twitter... #pidounparche #actualizenlaapp @apple @steve_jobs #mugrero" +"apple","irrelevant","126355573586399232","Tue Oct 18 17:54:50 +0000 2011","Kan @Samsung och @Apple sluta slÃ¥ss om kunderna i rättssalen och med världens bästa produkter istället sÃ¥ blir det mycket bättre!" +"apple","irrelevant","126352049070809089","Tue Oct 18 17:40:50 +0000 2011","Y @Apple? ""@BlackberryVzla: RIM anuncia más de 150 millones de dispositivos Vendidos, 1 Billón de Apps Descargadas ymás http://t.co/5IFKXvg4" +"apple","irrelevant","126346705292640257","Tue Oct 18 17:19:36 +0000 2011","Wat een belachelijke prijzen KPN-prijzen iPhone 4S duiken op http://t.co/ep9kZUPb via @iPhoneclub @kpn @iphone @apple" +"apple","irrelevant","126346563147673600","Tue Oct 18 17:19:02 +0000 2011","Fullscreen überall nur der #AppStore hat keine Option dafür. Warum? @apple" +"apple","irrelevant","126346004688674816","Tue Oct 18 17:16:49 +0000 2011","Love how @rogersbiz tell u they're not allowed to sell the 4s for $ more than @Apple but yet want $100 more for the 64G" +"apple","irrelevant","126342441057001472","Tue Oct 18 17:02:39 +0000 2011","Me acabo de convertir en un #fanboy de @apple. El ipad2 con el cristal roto y me lo cambian aunque no lo cubre la garantía. Increíble!" +"apple","irrelevant","126340074777489408","Tue Oct 18 16:53:15 +0000 2011","@apple bees w. @DEEJAY_TINYT :)" +"apple","irrelevant","126332817134190592","Tue Oct 18 16:24:25 +0000 2011","Complimenti sinceri ad @apple per #IOS5 va benissimo su #iPhone e #iPad" +"apple","irrelevant","126331879883415552","Tue Oct 18 16:20:41 +0000 2011","RT @supaluga: RT @modalnho: New mov: ""ショウケース@apple store 銀座"" 記録 / starring:ichiro_, REPEAT PATTERN, moda → http://t.co/u82a1Ivk" +"apple","irrelevant","126319126913363968","Tue Oct 18 15:30:01 +0000 2011","Apple vende 4 millones de iPhone 4S en tres días http://t.co/FH5T5Mi6 #turismo #martesviajero @apple" +"apple","irrelevant","126313259572793345","Tue Oct 18 15:06:42 +0000 2011","Mooi verhaal over @Dropbox en hoe zij een geweldig aanbod van @apple Steve Jobs hebben geweigerd in het verleden. http://t.co/eARoBIne!" +"apple","irrelevant","126310736577298432","Tue Oct 18 14:56:40 +0000 2011","@imightbewrong @apple @inthequeencity #teamapple" +"apple","irrelevant","126307114959372289","Tue Oct 18 14:42:17 +0000 2011","RT @modalnho: New mov: ""ショウケース@apple store 銀座"" 記録 / starring:ichiro_, REPEAT PATTERN, moda → http://t.co/u82a1Ivk" +"apple","irrelevant","126304243144597505","Tue Oct 18 14:30:52 +0000 2011","Verzoek aan @apple voor iphone: koppel agenda aan bellen. Zo kun je in afspraak opgeslagen telnummer meteen aantikken en bellen. #thnx" +"apple","irrelevant","126303928039116800","Tue Oct 18 14:29:37 +0000 2011","cc @apple http://t.co/8TnodVS8" +"apple","irrelevant","126290039138291712","Tue Oct 18 13:34:26 +0000 2011","ã•ã£ãスパルガ先生ã¨ãƒ©ãƒ¼ãƒ¡ãƒ³å±‹ã§modaã«ã¤ã„ã¦èªžã£ã¦ã„ãŸ@modalnho""ショウケース@apple store 銀座"" 記録 / starring:ichiro_,REPEAT PATTERN,COFFEE SHOPS,moda → http://t.co/zbI6rZ5s" +"apple","irrelevant","126271901340401665","Tue Oct 18 12:22:21 +0000 2011","En @Apple mucho diseño pero llevo media hora esperando a mi clase que poca formalidad" +"apple","irrelevant","126270990459219968","Tue Oct 18 12:18:44 +0000 2011","So ya my ITunes is in Japanese??? +@apple +#lostasamothafucka" +"apple","irrelevant","126264647652343808","Tue Oct 18 11:53:32 +0000 2011","ãŠã£ã¨ãƒ¼ã€å‡ºæ¼”è€…ã‚·ãƒ§ãƒƒãƒ—ã‚¹ãŒæŠœã‘ã¦ãŸ! -- New mov: ""ショウケース@apple store 銀座"" 記録 / starring:ichiro_,REPEAT PATTERN,COFFEE SHOPS,moda → http://t.co/kmJ0o1Ja" +"apple","irrelevant","126264035007143936","Tue Oct 18 11:51:06 +0000 2011","@Apple -Handy: @iPhone -4S-Besitzer schimpfen über Netzprobleme - http://t.co/abHuRvyc" +"apple","irrelevant","126263600548556800","Tue Oct 18 11:49:22 +0000 2011","@Apple erneut im Fadenkreuz - Umweltverschmutzung? @Apple -Zulieferer muss Produktion stoppen http://t.co/aH9KgrLY" +"apple","irrelevant","126260304819662849","Tue Oct 18 11:36:17 +0000 2011","RT @modalnho: New mov: ""ショウケース@apple store 銀座"" 記録 / starring:ichiro_, REPEAT PATTERN, moda → http://t.co/kmJ0o1Ja" +"apple","irrelevant","126236984644612096","Tue Oct 18 10:03:37 +0000 2011","iPadã®ãƒ“ジãƒã‚¹æ´»ç”¨ã‚»ãƒŸãƒŠãƒ¼@Appleストア銀座ãªã†ã€‚イシン(株)ã®é«˜æœ¨ã•んを見ã«ããŸã‚ˆã€‚" +"apple","irrelevant","126232767821381632","Tue Oct 18 09:46:51 +0000 2011","New mov: ""ショウケース@apple store 銀座"" 記録 / starring:ichiro_, REPEAT PATTERN, moda → http://t.co/kmJ0o1Ja" +"apple","irrelevant","126215978341236736","Tue Oct 18 08:40:08 +0000 2011","@GuySie Heb je gisteren Tros Radar gezien over Apple. Garantie werd niet nagekomen door @Apple en devices zeer snel kapot." +"apple","irrelevant","126195701704163328","Tue Oct 18 07:19:34 +0000 2011","@Notre4Dame @apple haha no fuck that its still took sux and a half hours and i have to be up at 7" +"apple","irrelevant","126186608113356800","Tue Oct 18 06:43:26 +0000 2011","RT @aranzibia: Las aplicaciones más populares para iPhone (vía AppleWeblog) http://t.co/m68Tzn1K #iphone @apple" +"apple","irrelevant","126175729024122880","Tue Oct 18 06:00:12 +0000 2011","Las aplicaciones más populares para iPhone (vía AppleWeblog) http://t.co/m68Tzn1K #iphone @apple" +"apple","irrelevant","126173465253384193","Tue Oct 18 05:51:12 +0000 2011","#iOS5 op iPad 2 geïnstalleerd. Tijdrovend en bepaald niet probleemloos. @Apple gaat meer en meer de #Windows kant op. ;-(" +"apple","irrelevant","126164921485492224","Tue Oct 18 05:17:15 +0000 2011","@Twitter CEO @DickC on @Apple, Privacy, Free Speech and @Google; Far From IPO http://t.co/ztq9Sx7x" +"apple","irrelevant","126149195957673984","Tue Oct 18 04:14:46 +0000 2011","@Karofsky @Apple how's that pear coming? oops i mmeant apple!" +"apple","irrelevant","126138637652992001","Tue Oct 18 03:32:49 +0000 2011","Por cierto @Apple, me sigue gustando mas MobileMe que iCloud. La sincronización de mis ordenadores, la gestión de carpetas... nada que ver." +"apple","irrelevant","126097345124368385","Tue Oct 18 00:48:44 +0000 2011","Welp. RT @Tree @Apple you didn't fall too far from me, huh? RT @NikkiNotNiki Nia Long's mom is gorgeous.." +"apple","irrelevant","126054622564589569","Mon Oct 17 21:58:58 +0000 2011","جهاز من كل ثلاثه تحمل iOS 5 +""@TechCrunch: iOS 5 Already Installed On 1 In 3 Eligible Devices http://t.co/2Hijv2LK by @grg"" @apple @ios5" +"apple","irrelevant","126054568273518592","Mon Oct 17 21:58:46 +0000 2011","@Apple Mooi systeem hoor dat films huren, t zou alleen leuker zijn als je ze ook werkelijk kan bekijken!!! #NAAIDOZEN http://t.co/Qg0RYdVX" +"apple","irrelevant","126042611709521921","Mon Oct 17 21:11:15 +0000 2011","@Apple podría lanzar un #iPad Mini en el 2012, según los últimos rumores. http://t.co/3mkPJTFX" +"apple","irrelevant","126024290201124864","Mon Oct 17 19:58:26 +0000 2011","Viens de tester #ios5 sur un 3GS et franchement c'est pas mal du tout ! @apple" +"apple","irrelevant","126022958710915072","Mon Oct 17 19:53:09 +0000 2011","http://t.co/k8RQwXty +para que descargues apps para tu dispositivo #iOS de @Apple." +"apple","irrelevant","126022708524888064","Mon Oct 17 19:52:09 +0000 2011","Por fin Adobe para #iOS, al parecer @Apple se reconcilia en esta nueva gestión." +"apple","irrelevant","126005063595466753","Mon Oct 17 18:42:02 +0000 2011","Lekkere garantie volgens #radar bij @apple. #Ned1" +"apple","irrelevant","126001383869644800","Mon Oct 17 18:27:25 +0000 2011","RT @IsmaelLaRosa: En la tienda de @Apple WoooooW Que legado dejaste #SteveJobs iAdmire U. Está Repletoooo iFull LOL" +"apple","irrelevant","126000885485678592","Mon Oct 17 18:25:26 +0000 2011","En la tienda de @Apple WoooooW Que legado dejaste #SteveJobs iAdmire U. Está Repletoooo iFull LOL" +"apple","irrelevant","125998496535937024","Mon Oct 17 18:15:57 +0000 2011","querida @apple, até quando o steve vai ficar na sua pagina inicial?" +"apple","irrelevant","125996653990772737","Mon Oct 17 18:08:37 +0000 2011","@MissHell23 Umm, pretty sure I typed @Apple, not you, but okies, why'd you do it, then? *grin*" +"apple","irrelevant","125993886249267200","Mon Oct 17 17:57:37 +0000 2011","Contra todo pronóstico @Samsung podría ser el fabricante del #chipA6 para @Apple http://t.co/6utPqa5w" +"apple","irrelevant","125992545552576512","Mon Oct 17 17:52:19 +0000 2011","Perry ricks @Apple @Lenox http://t.co/9kYgOvB1" +"apple","irrelevant","125991634855923712","Mon Oct 17 17:48:41 +0000 2011","@pnt21 ドンマイー@(・â—・)@Apple社ä¿è¨¼è–„ã„ã‹ã‚‰æ°—ã‚’ã¤ã‘ã¦ã­" +"apple","irrelevant","125990804488601600","Mon Oct 17 17:45:23 +0000 2011","Descuentos y offertas para applicaciones de @apple http://t.co/8VNd57qk #mobil" +"apple","irrelevant","125982640263274496","Mon Oct 17 17:12:56 +0000 2011","#MobileMe, #iCloud, #AppleID... cómo sobrevivir a la transición de las cuentas de @Apple http://t.co/3lMPiCjK" +"apple","irrelevant","125973789526863872","Mon Oct 17 16:37:46 +0000 2011","@Bonita1108 es pura envidia xq @Apple no te ha regalado nada con o sin fallas #VivaBlackBerry" +"apple","irrelevant","125970459404673026","Mon Oct 17 16:24:32 +0000 2011","RT @mpastrana: RT @apple no pasa nada, todo esta bien, no pasa nada, todo esta en orden http://t.co/zwECWhD" +"apple","irrelevant","125968277083136000","Mon Oct 17 16:15:53 +0000 2011","Se tan arruinando mis audifonos d mi #iphone @apple quien me quiere regalar unos? http://t.co/56eeE9uY" +"apple","irrelevant","125967413299773440","Mon Oct 17 16:12:26 +0000 2011","Asi o mas mala la #version5.0 de @apple..." +"apple","irrelevant","125967315488608257","Mon Oct 17 16:12:03 +0000 2011","@macworld_brasil vcs conseguiram fazer funcionar o find my Mac? O meu sempre pede para reportar o erro a @Apple !" +"apple","irrelevant","125965369532878849","Mon Oct 17 16:04:19 +0000 2011","> 4 mln iPhones verkocht in eerste weekend: http://t.co/1pPZz1oX (via @apple)" +"apple","irrelevant","125965364667486209","Mon Oct 17 16:04:17 +0000 2011","> 4 mln iPhones verkocht in eerste weekend: http://t.co/OP26muCr (via @apple)" +"apple","irrelevant","125959699089719297","Mon Oct 17 15:41:47 +0000 2011","Curso de DJ na @apple em breve, informaçoes 30114020 ou 91287199 +msn: dj_sandrocosta@hotmail.com +@djtetekerbes" +"apple","irrelevant","125959482588143616","Mon Oct 17 15:40:55 +0000 2011","@dgosset @lolopb oh putain… j'ai vidé mon spam il y a peu… Le mail de applecert@apple.com est dans ce qu'il reste…" +"apple","irrelevant","125957826500771840","Mon Oct 17 15:34:20 +0000 2011","RT @BikerVox: http://t.co/EVItL2eO @KentStoffels @MArioMUssolini @iOS_Notch @Apple" +"apple","irrelevant","125957742698561537","Mon Oct 17 15:34:00 +0000 2011","http://t.co/EVItL2eO @KentStoffels @MArioMUssolini @iOS_Notch @Apple" +"apple","irrelevant","125948329694724097","Mon Oct 17 14:56:36 +0000 2011","¿Quién dijo crisis? @Apple ha vendido 4 millones de unidades del #iPhone4S en sólo tres días" +"apple","irrelevant","125930962545672192","Mon Oct 17 13:47:35 +0000 2011","Resultado de ventas de @apple 4millones de #iPhone4s vendidos" +"apple","irrelevant","125928640394432513","Mon Oct 17 13:38:22 +0000 2011","@hervepierre mdr! ouais une mauvaise expérience avec les vendeurs @Apple store m'a fait hair l'iphone ;)" +"apple","irrelevant","125910538550124545","Mon Oct 17 12:26:26 +0000 2011","bagi yg acc USnya di disabled sama apple,email aja itunes store supportnya minta di re enabled lagi +iTunesStoreSupport@apple.com" +"apple","irrelevant","125877369796968448","Mon Oct 17 10:14:38 +0000 2011","PederÄine umobolne, oću APDEJT ne FORMAT SVEGA ISUS VAM JEBO MATER @apple" +"apple","irrelevant","125873952953352192","Mon Oct 17 10:01:03 +0000 2011","O poder de #pensardiferente: a mágica de #SteveJobs foi transformar egomania e obsessão em trunfos http://t.co/qkUoeEzv @apple @ipad @ipod" +"apple","irrelevant","125862601677737985","Mon Oct 17 09:15:57 +0000 2011","講習中@apple shop GINZA" +"apple","irrelevant","125857117407166464","Mon Oct 17 08:54:09 +0000 2011","#epicfail @apple bij upgraden naar IOS5 instellingen kwijt en ebooks verloren. het kost me dus geld zo'n upgrade, WAARDELOOS!" +"apple","irrelevant","125827656238379008","Mon Oct 17 06:57:05 +0000 2011","Hey @apple , je sais que tu es encore un peu remué, mais tu pourrais ajouter le verbe avoir à ton dictionnaire ?" +"apple","irrelevant","125826633713201152","Mon Oct 17 06:53:01 +0000 2011","@chachasikes @iphone @apple on the iphone4s???" +"apple","irrelevant","125825293473685505","Mon Oct 17 06:47:42 +0000 2011","@ferchowii @Adr_Mart_Com @Apple no tengo" +"apple","irrelevant","125815316596002816","Mon Oct 17 06:08:03 +0000 2011","追悼ジョブズæ°â€¦@Apple Store#fb http://t.co/V07PBNdP" +"apple","irrelevant","125797001337122817","Mon Oct 17 04:55:16 +0000 2011","^___^ que amable es la gente de @apple" +"apple","irrelevant","125724424774221826","Mon Oct 17 00:06:53 +0000 2011","Como puedo checar mi correo si tengo cuenta @apple.com?? :S osea no q onda con eso!! jajaja" +"apple","irrelevant","125707107495452673","Sun Oct 16 22:58:04 +0000 2011","RT @Leahjanell: This Moment w/ @Shyah & @DianiGroves is brought to you by @NBA @Apple @Riesling oh and http://t.co/Dp4XiDs2" +"apple","irrelevant","125705666592641024","Sun Oct 16 22:52:21 +0000 2011","This Moment w/ @Shyah & @DianiGroves is brought to you by @NBA @Apple @Riesling oh and http://t.co/tmQ50JRU" +"apple","irrelevant","125705646942330880","Sun Oct 16 22:52:16 +0000 2011","Partner with @apple. I'd pay $50 for aniBulb. "@washingtonpost: Would you pay $25 for this lightbulb? http://t.co/zGFsehAO"" +"apple","irrelevant","125699684693065728","Sun Oct 16 22:28:34 +0000 2011","todos quieren destruir a @blackberryhelp ves que estas en la mala @sebitasrodri mira que @apple ya tiene chat" +"apple","irrelevant","125695680135172096","Sun Oct 16 22:12:40 +0000 2011","@chefreggie06 @rolandsmartin @Apple get ready Tampa Bay the Martin's are coming to town! Bring it on Buccaneers!" +"apple","irrelevant","125695094836826112","Sun Oct 16 22:10:20 +0000 2011","@heidiknyc @apple @Badog420 Cute dinner when ur back from shanghai xx" +"apple","irrelevant","125653144993660928","Sun Oct 16 19:23:38 +0000 2011","@apple - dette blir for dumt. http://t.co/zqKMkq91" +"apple","irrelevant","125650076759638016","Sun Oct 16 19:11:27 +0000 2011","ninguem desliga o 3g do celular -.- “@giuchavez: Olha aqui @apple: na proxima atualizaçao eu quero um atalho pra ligar/desligar a 3g Ok ..." +"apple","irrelevant","125647972087242754","Sun Oct 16 19:03:05 +0000 2011","Teach Different: iTeach Therefore iAm http://t.co/yozd8HCx @apple @teachforus @teachforamerica #ipad #stevejobs" +"apple","irrelevant","125622089502830592","Sun Oct 16 17:20:14 +0000 2011","@Apple et je devrais arrêter de vous glorifier et ça c'est pas cool..." +"apple","irrelevant","125621628917915648","Sun Oct 16 17:18:24 +0000 2011","yo @Apple va falloir faire quelque chose pour l'app vidéo. Le fait de pas pouvoir laisser la vidéo jouer en faisant autre chose < diarrhée." +"apple","irrelevant","125602732278169601","Sun Oct 16 16:03:19 +0000 2011","Quiero migrar @Apple!" +"apple","irrelevant","125595437938905088","Sun Oct 16 15:34:20 +0000 2011","O #itunes acaba de me avisar q esta disponível a versão 5 pra atualizar meu #Iphone 4. Como não cair de amores pela @apple e #SteveJobs ?!!" +"apple","irrelevant","125590191502131200","Sun Oct 16 15:13:29 +0000 2011","@apple pratet om en versjon av iPod touch til barn. De ombestemte seg da tittelen nærmet seg #iTouch Kids." +"apple","irrelevant","125568051277086721","Sun Oct 16 13:45:31 +0000 2011","Olha aqui @apple: na proxima atualizaçao eu quero um atalho pra ligar/desligar a 3g Ok ? Aff fazem tudo pela metade !!! Steve,da um jeito ai" +"apple","irrelevant","125542968844226560","Sun Oct 16 12:05:50 +0000 2011","interesante por decirlo menos sabes que la cámara del #iphone4s le pertenece a #sony ... mmm ojalá ande bien y no sea mala jugada de @apple" +"apple","irrelevant","125542941287649280","Sun Oct 16 12:05:44 +0000 2011","Oja, ik ben gelijk al mijn apps kwijt bedankt @apple ." +"apple","irrelevant","125534067495141376","Sun Oct 16 11:30:28 +0000 2011","#Deal & #Schnäppchen Update: [iPad] iPad 1. Generation generalüberholt ab 389 € @Apple - http://t.co/xDtdZurD" +"apple","irrelevant","125526544377577472","Sun Oct 16 11:00:35 +0000 2011","@ChArEnCe RaMoS @apple jane @heiMachO @tintin_Julielmo new followers..:) na follow back ko na po kayo..:)" +"apple","irrelevant","125521498055254016","Sun Oct 16 10:40:31 +0000 2011","[iPad] iPad 1. Generation generalüberholt ab 389 € @Apple: Der Tablet von Apple bleibt weiterhin recht preisstab... http://t.co/h8ANhAVu" +"apple","irrelevant","125521344342392832","Sun Oct 16 10:39:55 +0000 2011","Kan @Apple de #tekstaanvulling op #iphone en #ipad niet omdraaien? #eigenteksteerst #weetapplehetaltijdbeter" +"apple","irrelevant","125516954407677952","Sun Oct 16 10:22:28 +0000 2011","[iPad] iPad 1. Generation generalüberholt ab 389 € @Apple http://t.co/tOv1xKQ8" +"apple","irrelevant","125490577130258432","Sun Oct 16 08:37:39 +0000 2011","@HAZI クック@apple.com?" +"apple","irrelevant","125485687339352064","Sun Oct 16 08:18:13 +0000 2011","@Le_M_Poireau Mais moi je voudrais pas que @apple donne mon fichier @LaLibreBe. Marre d'être spammée par les journaux désespérés." +"apple","irrelevant","125421514928558080","Sun Oct 16 04:03:14 +0000 2011","O #piratasdainformatica só não disse se o Bill Gates tbm é dono da @Apple" +"apple","irrelevant","125411589905068033","Sun Oct 16 03:23:47 +0000 2011","Né @Apple, vai trabalhar no iPhone5" +"apple","irrelevant","125409044222586880","Sun Oct 16 03:13:40 +0000 2011","I guess I should go @hublot @apple and @ZegnaOfficial shopping next week. Birthdays only come once a year! Lol" +"apple","irrelevant","125406418777018368","Sun Oct 16 03:03:14 +0000 2011","ASSISTINDO O FILME CHAPA!! PIRATAS DA INFORMÃTICA ESSE FILME E FODA VIVA @APPLE" +"apple","irrelevant","125405429583970305","Sun Oct 16 02:59:18 +0000 2011","Un abrazo a tod@s desde el @Apple Store en #NewYork. Es un honor sumarse a la gente que honra la memora y el #genio de #SteveJobs." +"apple","irrelevant","125375333162684416","Sun Oct 16 00:59:43 +0000 2011","iphone行列並んã ãƒ¼@apple store銀座 ã©ã®ãらã„ã‹ã‹ã‚‹ã‹ã­ã€‚16Gã—ã‹ãªã„ãã†ã§ã™ã€‚" +"apple","irrelevant","125361267555835905","Sun Oct 16 00:03:49 +0000 2011","New Arrivals and 70 percent off Clearance of Matching G http://t.co/qQcwyl4b@Lds @Scriptures @Apple @E @Book" +"apple","irrelevant","125353260520443904","Sat Oct 15 23:32:00 +0000 2011","Today very cheap for Patio Furniture San Anton http://t.co/Sd3i1bjm@Plastic @Apple @Bags" +"apple","irrelevant","125352405482217473","Sat Oct 15 23:28:37 +0000 2011","Causando na loja da @apple" +"apple","irrelevant","125346522618535937","Sat Oct 15 23:05:14 +0000 2011","Fico manso os preços desses IPHONEs,absurdos R$ 2.500! Assim não dá Fía @apple, Só se eu fizesse programa na Beira Mar :(" +"apple","irrelevant","125339193802100736","Sat Oct 15 22:36:07 +0000 2011","Find more the best Angel Christmas Tree O http://t.co/d5G1rY5d@Starfrit @Apple @Peeler" +"apple","irrelevant","125336882862231552","Sat Oct 15 22:26:56 +0000 2011","Find more the best Maytag Washer Control Board deal.Check pric http://t.co/SNdkAg3e@Best @Apple @Peeler" +"apple","irrelevant","125333140414808065","Sat Oct 15 22:12:03 +0000 2011","Find more the best Cyst On Knee deal.Check price of best Cyst On Knee and offers now. http://t.co/qzYnNuvI@Automatic @Apple" +"apple","irrelevant","125325092841467904","Sat Oct 15 21:40:05 +0000 2011","Do not check other site for Uhmw Conveyor.Now you go best site of Uhmw Conveyor. http://t.co/8RMV5D2q@9 @Pin @Cable @Apple" +"apple","irrelevant","125321769203666944","Sat Oct 15 21:26:52 +0000 2011","Do not check other site for Ed Hardy Black Long Sl http://t.co/hengQGqp@Unlocked @Apple @Touch @Phones" +"apple","irrelevant","125319263027343360","Sat Oct 15 21:16:55 +0000 2011","RT @ThBenkoe: hey @apple! dass man selektiv keine einzelnen fotos (peinliche, überflüssige etc.) aus dem fotostream löschen kann, ist do ..." +"apple","irrelevant","125305753903964161","Sat Oct 15 20:23:14 +0000 2011","Esperaba mas de "IOS 5" @Apple" +"apple","irrelevant","125301831286013952","Sat Oct 15 20:07:39 +0000 2011","Hello you are find Mesh Jacket Motorcycle? Our site have more Mesh http://t.co/tYg4Ualc@Ace @Hardware @Apple @Pee" +"apple","irrelevant","125275735815491584","Sat Oct 15 18:23:57 +0000 2011","to reiniciando meu computador. ó as idéia @apple" +"apple","irrelevant","125232405517844481","Sat Oct 15 15:31:46 +0000 2011","@chriswglowe @apple @gadgetlab you can tell me tonight ;-) cracking open the vodka tonight if you fancy ?" +"apple","irrelevant","125228207002759168","Sat Oct 15 15:15:05 +0000 2011","Buy more products of Hemp Jewelry Instructions now.Free ship http://t.co/928bPS4t@Lehmans @Apple @Peeler" +"apple","irrelevant","125219664488960000","Sat Oct 15 14:41:09 +0000 2011","@rogriffo VC Viu ontem A @Apple Estava dando IPhone 3 No Site dos estados Unidos" +"apple","irrelevant","125209676416679936","Sat Oct 15 14:01:27 +0000 2011","Sabias que.... RT @MkDirecto Los usuarios del #iPad desayunan y cenan con la #tableta de @Apple: http://t.co/eYXCQQmK" +"apple","irrelevant","125206785584922624","Sat Oct 15 13:49:58 +0000 2011","Today very cheap for Hemp For Fuel.And Free shipping for Hemp For Fuel too. http://t.co/8gyJLbMW@Farberware @Apple @Peeler" +"apple","irrelevant","125184213342367744","Sat Oct 15 12:20:16 +0000 2011","Discount Hemp Knots today.Cheap price too.Save money for best Hemp Knots. http://t.co/9H3jopLq@Avanti @Apple @Peeler" +"apple","irrelevant","125082707389718529","Sat Oct 15 05:36:56 +0000 2011","Today very cheap for 2198597 Icemaker.And Free shipping for 2198597 Icemaker too. http://t.co/6CLkWAiO@French @Apple @Peeler" +"google","positive","126534770095169536","Wed Oct 19 05:46:54 +0000 2011","Took me months to decide, but tonight I have chosen #google over Amazon S3 for image storage solution." +"google","positive","126534201880219648","Wed Oct 19 05:44:39 +0000 2011","RT @spacecanard Today's the Day. ICS <3 #android #google #samsung" +"google","positive","126534020367519744","Wed Oct 19 05:43:56 +0000 2011","The new Nexus looks good! UI feel a bit confusing but I guess Android users are ok with that! Now, try it :) #Google #android #nexusprime" +"google","positive","126533948925952000","Wed Oct 19 05:43:38 +0000 2011","Very nice trivia game by #Google. Helps you use their search better http://t.co/ikN087ZE" +"google","positive","126533885109600256","Wed Oct 19 05:43:23 +0000 2011","Very nice trivia game by #Google. Helps you use their search better http://t.co/c5Tu3dZK #fb" +"google","positive","126533562781544448","Wed Oct 19 05:42:06 +0000 2011","Cant build a website. Get a #Google website #Free ! http://t.co/7qUy4uOU" +"google","positive","126533349727666176","Wed Oct 19 05:41:16 +0000 2011","#Android ICS is looking pretty good. Not quite sure if its worth the 4.0 numbering leap. #google" +"google","positive","126533268119109632","Wed Oct 19 05:40:56 +0000 2011","@Tita_Ramos #Google it =)" +"google","positive","126533166352699392","Wed Oct 19 05:40:32 +0000 2011","RT @p1j Android 4.0 Ice Cream Sandwich is Here with a Google Nexus in new Avtar! http://t.co/MTu7tGx7 #android #google #nexus #BigMAMA" +"google","positive","126532897715929088","Wed Oct 19 05:39:28 +0000 2011","Me too RT @MohamedG: Exciting day, It's Ice cream Sandwich day ;) #Google #Android" +"google","positive","126531552367751169","Wed Oct 19 05:34:07 +0000 2011","WOW the new #Google #Nexus is just beautiful. totally gonna boost google's market share in the smart phone market." +"google","positive","126531180907601920","Wed Oct 19 05:32:39 +0000 2011","hahaha integrated data usage manager... in sucha brilliant way, sleek design, superb and I'm only watching videos LOL #Google" +"google","positive","126530945976238080","Wed Oct 19 05:31:43 +0000 2011","I fancy an Ice Cream Sandwich #google #android. Wonder it will works on my HTC Desire?" +"google","positive","126530924576907264","Wed Oct 19 05:31:37 +0000 2011","Google Ice Cream Sandwich - sounds yummy! What do you think? http://t.co/gMsgXcq8 +#google #Android #ice cream sandwich" +"google","positive","126530807891374082","Wed Oct 19 05:31:10 +0000 2011","#Google Me!!!!!!!!" +"google","positive","126530189579649024","Wed Oct 19 05:28:42 +0000 2011","Exciting day, It's Ice cream Sandwich day ;) #Google #Android" +"google","positive","126530027939569665","Wed Oct 19 05:28:04 +0000 2011","Android 4.0 Ice Cream Sandwich is Here with a Google Nexus in new Avtar! http://t.co/mSWX6Eo6 #android #google #nexus #BigMAMA" +"google","positive","126529770778411008","Wed Oct 19 05:27:02 +0000 2011","@Bla1ze I was referring to #Google & #Icecreamsandwich. It looks amazing. What imo android was missing. I ... http://t.co/rE4nLZgI" +"google","positive","126528982807089152","Wed Oct 19 05:23:54 +0000 2011","http://t.co/QV4m1Un9 Forget the phone.. Nice UI. Liking the Scroll Feature #android #google #nexus" +"google","positive","126528978239496194","Wed Oct 19 05:23:53 +0000 2011","Simply Amazing! http://t.co/9qmG3irA #Google #Samsung #Galaxy #Nexus #Prime #Android4 #IceCreamSandwich" +"google","positive","126528804192653312","Wed Oct 19 05:23:12 +0000 2011","#Google has finally unveiled the much awaited #Android OS Ice Cream Sandwich & it sure looks good! http://t.co/sNL38EM1" +"google","positive","126528264117293056","Wed Oct 19 05:21:03 +0000 2011","#google Finally!! All searches for logged in users will be on https! http://t.co/RaUeRrtF" +"google","positive","126526219587039233","Wed Oct 19 05:12:56 +0000 2011","#RIM should really take an in debt look at #Google's strategy... They released the #NDK for #Icecreamsandwich 3 HOURS after release. #ics" +"google","positive","126525469897146368","Wed Oct 19 05:09:57 +0000 2011","RT @vattam: Man! I've fallen in love with Galaxy Nexus! #google #samsung #galaxynexus #android" +"google","positive","126525368860540928","Wed Oct 19 05:09:33 +0000 2011","When in doubt... #Google it!!!" +"google","positive","126525172969766912","Wed Oct 19 05:08:46 +0000 2011","[GReader Share] Five Reasons Why Google is Winning the War in Photosharing http://t.co/coQCLb9R - #photography #fotografia #google+" +"google","positive","126524301259194368","Wed Oct 19 05:05:18 +0000 2011","RT @Assim99: Dear #Google, I want the Galaxy Nexus NOW. Please send it to me by email or something - I know you have the technology" +"google","positive","126523731710443521","Wed Oct 19 05:03:02 +0000 2011","RT @shagorikah: Telegraph reports that the biggest threat to #facebook is its power users have shifted to #twitter or #google+ http://t. ..." +"google","positive","126523530903953408","Wed Oct 19 05:02:15 +0000 2011","Sorry #Apple #Google and #Samsung just made you look bad. #Android is king" +"google","positive","126523525598162944","Wed Oct 19 05:02:13 +0000 2011","Telegraph reports that the biggest threat to #facebook is its power users have shifted to #twitter or #google+ http://t.co/JHdTYKCV" +"google","positive","126522990585315328","Wed Oct 19 05:00:06 +0000 2011","#Facebook power users 'have gone to #Google+ and #Twitter' - Telegraph http://t.co/vwCiOv1f Who are they? #socialmedia" +"google","positive","126522810821644288","Wed Oct 19 04:59:23 +0000 2011","Not impressed much with the new Android update. But good signs: a readable font, emphasis on design, and less nerdiness. #google" +"google","positive","126522714713370624","Wed Oct 19 04:59:00 +0000 2011","Video: #Google wallet, see the wow effect http://t.co/mcW78VtG" +"google","positive","126522621251682304","Wed Oct 19 04:58:38 +0000 2011","When I'm about to tweet something and I cant remember how to spell I go straight to #Google" +"google","positive","126522262768726016","Wed Oct 19 04:57:12 +0000 2011","Dear #Google, I want the Galaxy Nexus NOW. Please send it to me by email or something - I know you have the technology" +"google","positive","126521286053724160","Wed Oct 19 04:53:19 +0000 2011","#GOOGLE #ANDROID 4.0 out ;-) Available with #SAMSUNG nexus!" +"google","positive","126520029410885632","Wed Oct 19 04:48:20 +0000 2011","Maybe not the most efficient way to browse, but fun: #Google releases an Infinite Digital Bookcase http://t.co/X1qzeX7f by @MeghanKel" +"google","positive","126519483752914944","Wed Oct 19 04:46:10 +0000 2011","@jowyang it's difficult to pass up #google+ considering the social seo benefits of google+ vs facebook" +"google","positive","126519329025040384","Wed Oct 19 04:45:33 +0000 2011","Ice Cream Sandwich to stop carriers bullying smartphone users #google #android http://t.co/BZNy74Nn" +"google","positive","126519123772588032","Wed Oct 19 04:44:44 +0000 2011","I agree with @wellis68 that the ""Hangouts"" on #Google+ are friggin' awesome!â€" +"google","positive","126519017405030400","Wed Oct 19 04:44:19 +0000 2011","http://t.co/Pl4ZeEvH - Ice Cream Sandwich to stop carriers bullying smartphone users #google #android http://t.co/BgKiMGJF" +"google","positive","126518882939838464","Wed Oct 19 04:43:46 +0000 2011","#google #galaxynexus #icecream great" +"google","positive","126516914678808578","Wed Oct 19 04:35:57 +0000 2011","The next #Google #Nexus phone is here!!. http://t.co/edSwoXoC" +"google","positive","126516779886456832","Wed Oct 19 04:35:25 +0000 2011","Google Earth Helps Locate Salmonella Hotspots http://t.co/mcz9RSDf #google" +"google","positive","126516304336257025","Wed Oct 19 04:33:32 +0000 2011","#ThingsWeAllHate that person that acts like they know every fucking thing bitch you not #Google" +"google","positive","126515760855134208","Wed Oct 19 04:31:22 +0000 2011","#samsung galaxy nexus = orbital laser. #apple iphone 4s = wooden stick #google" +"google","positive","126514474378203136","Wed Oct 19 04:26:15 +0000 2011","/want http://t.co/srbm3V0K #Google #Nexus #Samsung #Galaxy" +"google","positive","126513620686352384","Wed Oct 19 04:22:52 +0000 2011","Ice Cream Sandwich u r really delicious Can ditch iPhone for u that's for sure. #Google launches #Android4 aka #IceCreamSandwich :)" +"google","positive","126513526385819648","Wed Oct 19 04:22:29 +0000 2011","#google+ (: lovin it" +"google","positive","126513425043030016","Wed Oct 19 04:22:05 +0000 2011","""Samsung and Google have closely collaborated to push the mobile experience forward. We are pleased to ... http://t.co/ecT8fYIA #Google+" +"google","positive","126512728297844736","Wed Oct 19 04:19:19 +0000 2011","RT @RoycinD: FINALLY ! Power + Volume = Screenshot. No more rooting required! #Google #ICS #galaxynexus" +"google","positive","126512631937904640","Wed Oct 19 04:18:56 +0000 2011","I want the new #google #galaxynexus. Android 4.0 is going to be the fucks" +"google","positive","126512208451600384","Wed Oct 19 04:17:15 +0000 2011","#Google #Nexus press conference http://t.co/2WV01o5v #slick" +"google","positive","126511545160171520","Wed Oct 19 04:14:37 +0000 2011","S/O to #google for getting me through high school and college....you are appreciated" +"google","positive","126511426926944256","Wed Oct 19 04:14:09 +0000 2011","iScream u scream v all scream #android #IceCreamSandwich. A job well done by #google and a major game changer in the #mobile #OS space" +"google","positive","126511000907288576","Wed Oct 19 04:12:27 +0000 2011","@immad_immad #google, always thinking ahead" +"google","positive","126510977335300096","Wed Oct 19 04:12:22 +0000 2011","@VentureBeat I like the #google virtual bookcase thank you for sharing" +"google","positive","126510551789604864","Wed Oct 19 04:10:40 +0000 2011","#Google's #IceCreamSandwich was revealed. I'm getting an #Android whenever I get a new phone, but I'm keeping my #iPhone for now." +"google","positive","126509929619132417","Wed Oct 19 04:08:12 +0000 2011","#Android #Google Ice Cream Sandwich Feature Closer Look - Roboto Type Face Is ""A Pleasure"" To Read http://t.co/DJsp2G5B #DhilipSiva" +"google","positive","126509528287166464","Wed Oct 19 04:06:36 +0000 2011","Fantastic work from #Samsung and #Google on the #GalaxyNexus superphone and the new #Android #ICS. Very impressive." +"google","positive","126508393203642368","Wed Oct 19 04:02:06 +0000 2011","... I think I should work for #MelaleucaTheWellnessCompany ... Probably the best company to work for .. oh and #Google" +"google","positive","126507456019968000","Wed Oct 19 03:58:22 +0000 2011","i love #google" +"google","positive","126506850781888512","Wed Oct 19 03:55:58 +0000 2011","Can't wait for Ice Cream Sandwich. http://t.co/RtDzjq2U #android #google" +"google","positive","126506410195431424","Wed Oct 19 03:54:13 +0000 2011","i need a hookup, stop teasing me #google http://t.co/07YR58wL i want the new #nexus" +"google","positive","126506064387637249","Wed Oct 19 03:52:50 +0000 2011","well.....#google's #icecreamsandwich and new accompanying phone are, as usual, neat, but nothing ground-breaking" +"google","positive","126505384428052481","Wed Oct 19 03:50:08 +0000 2011","#Android #Google Device Frame Generator Updated For Galaxy Nexus - Your Screenshots Just Got Prettier http://t.co/9bWt2W6z #DhilipSiva" +"google","positive","126505187752943616","Wed Oct 19 03:49:21 +0000 2011","Mmm... #SmarterPhone RT @Android: Introducing Ice Cream Sandwich, the delicious new version of Android: http://t.co/AXl2K1Gs #ICS #google" +"google","positive","126505144878772224","Wed Oct 19 03:49:11 +0000 2011","So so excited for all of the new Android features! Awesomeness :) #Android #ICS #Google #GalaxyNexus" +"google","positive","126504782465732608","Wed Oct 19 03:47:45 +0000 2011","#IceCreamSandwich went way and beyond what I expected. Can't wait to get it on my Nexus to play with! #Google" +"google","positive","126504452680187905","Wed Oct 19 03:46:26 +0000 2011","Check this video out -- Introducing Galaxy Nexus. Simple, beautiful, beyond smart http://t.co/fiTI6ZIA é€éŽ @youtube #android #nexus #google" +"google","positive","126504346639802368","Wed Oct 19 03:46:01 +0000 2011","Cream on the inside, clean on the outside. Ice Cream phone job. #Google" +"google","positive","126504216004005888","Wed Oct 19 03:45:30 +0000 2011","#Google+ great for small businesses? I think so. The platform has some really beneficial features. Thoughts? http://t.co/FytcyEG2" +"google","positive","126504013939216384","Wed Oct 19 03:44:41 +0000 2011","RT @Mr_Lister: Loves the new presentations tool for Google Docs - particularly themes and adding video via URL #google #sd33 #edchat #ed ..." +"google","positive","126503946092158976","Wed Oct 19 03:44:25 +0000 2011","brilliant WebGL Bookcase #google http://t.co/CR3A6k9U" +"google","positive","126503805369069568","Wed Oct 19 03:43:52 +0000 2011","#Google's like my wife - searches things for me and completes my sentences." +"google","positive","126503790412181504","Wed Oct 19 03:43:48 +0000 2011","#Android 4.0 Ice Cream: Introducing Galaxy Nexus. Simple, beautiful, beyond smart http://t.co/vUWgyx3N #Google" +"google","positive","126503029548654593","Wed Oct 19 03:40:47 +0000 2011","http://t.co/Izh7KaiU #google #nexus #prime <3 #android" +"google","positive","126502415322193920","Wed Oct 19 03:38:20 +0000 2011","Interesting bookcase..RT @VentureBeat: #Google releases an Infinite Digital Bookcase http://t.co/urxJixCe by @MeghanKel" +"google","positive","126502014560649216","Wed Oct 19 03:36:45 +0000 2011","#google is my bff rite about now!" +"google","positive","126501732443361280","Wed Oct 19 03:35:37 +0000 2011","have to finish faster the only way is to google it!!!! #google :D" +"google","positive","126501468902658048","Wed Oct 19 03:34:35 +0000 2011","Good to see #Google finally turning their focus on user experience. http://t.co/AYhOKgo8 #IceCreamSandwich #Android" +"google","positive","126500614552289282","Wed Oct 19 03:31:11 +0000 2011","ICS looks awesome, please let me have it on my phone! #android #google #motorola" +"google","positive","126499965869625345","Wed Oct 19 03:28:36 +0000 2011","Iphone 4s almost had me ... ALMOST !! Ice Cream Sandwich FTW !! http://t.co/p32Ehduq #android #google" +"google","positive","126499581520384001","Wed Oct 19 03:27:05 +0000 2011","RT @Sarahhhhh112: @_xmas_carroll Yeah I got #skype and #google+ and #twitter so who needs facebook?" +"google","positive","126499428965158912","Wed Oct 19 03:26:28 +0000 2011","There you go - according to http://t.co/4Y8aS4zz #Google+ is builtin to the OS/Nexus line? Smart move :)" +"google","positive","126499145014980608","Wed Oct 19 03:25:21 +0000 2011","Oh yes, and now - The Android Beam! Alright, I made the right decision to be on Team #Google, Team #Android!" +"google","positive","126499143282737152","Wed Oct 19 03:25:20 +0000 2011","I just add my profile on the #Google+ WebGL Globe project. Add yours ! http://t.co/UWeYalKb via @AddThis" +"google","positive","126498734409396224","Wed Oct 19 03:23:43 +0000 2011","Thanks Android for admitting you're ugly. New font is a good start! #ics #google" +"google","positive","126498608815149056","Wed Oct 19 03:23:13 +0000 2011","Ice Cream Sandwich's Face Unlock really works! http://t.co/2SjN2BzZ #icecreamsandwich #google #galaxynexus" +"google","positive","126498587499696128","Wed Oct 19 03:23:08 +0000 2011","I'm so ready for Ice Cream Sandwich! #ICS #Nexus #Google #Android #SiriWho? http://t.co/Cjonncvk @GetGlue #Android" +"google","positive","126497976314109952","Wed Oct 19 03:20:42 +0000 2011","Mmmm Ice Cream Sandwich #Android #Google" +"google","positive","126497860752646146","Wed Oct 19 03:20:14 +0000 2011","Just seen a taste of #Google's ice cream sandwich, now I want a bite" +"google","positive","126497655785402368","Wed Oct 19 03:19:26 +0000 2011","Probably the best #IceCreamSandwich (yet)! #Samsung and #Google's event live blog by @engadget! Why haven't I switched to #Android again?" +"google","positive","126496772586610688","Wed Oct 19 03:15:55 +0000 2011","RT @YashasJoshi: #android 4.0!! Ice Cream Sandwich will now make all other smartphone operating systems look outdated. #Androidbeam Wo ..." +"google","positive","126496739531304960","Wed Oct 19 03:15:47 +0000 2011","Instant photo sharing and the people app are what's best in Ice Cream Sandwich, IMO. #google #ics" +"google","positive","126496342901133313","Wed Oct 19 03:14:12 +0000 2011","The new android #Nexus phone makes the #iPhone look like a cheap toy at dollar store! #tcot #android #google #RealEstate @Burkepatch" +"google","positive","126496262668292096","Wed Oct 19 03:13:53 +0000 2011","Sweet Ice Cream Sandwich RT @Pocketlint Google Android 4.0 Ice Cream Sandwich officially detailed #google #ics http://t.co/iUY0DF5S" +"google","positive","126496155856142336","Wed Oct 19 03:13:28 +0000 2011","""@SamsungMobileCA: Raise your hand if you now want an #Android4 #IceCreamSandwich powered #GALAXYNexus phone! #Google #Samsung""-I do!" +"google","positive","126496005809127424","Wed Oct 19 03:12:52 +0000 2011","Siri, which #Android device should I replace my #iPhone with? #Apple vs. #Google" +"google","positive","126495843116265475","Wed Oct 19 03:12:13 +0000 2011","The new Google Nexus page is live! - http://t.co/zrqoeWxI #nexus #google #android #icecreamsandwich" +"google","positive","126495812724338688","Wed Oct 19 03:12:06 +0000 2011","I'm most excited about Android beam & face detection unlock for Android ICS. #AndroidBeam #Google #IceCreamSandwich" +"google","positive","126495283176685569","Wed Oct 19 03:10:00 +0000 2011","Are you using #Google+ and #Linkedin yet? These are essential tools to get your company and/or you on the FIRST page of Google. Contact..." +"google","positive","126494976396898305","Wed Oct 19 03:08:47 +0000 2011","RT @SebasCoulombe: Thanks to Samsung and Google, I'm now craving for some Ice Cream Sandwich ! #IceCreamSandwich #Google #Samsung" +"google","positive","126494883367235585","Wed Oct 19 03:08:25 +0000 2011","Introducing Galaxy Nexus. Simple, beautiful, beyond smart http://t.co/qaMj3MeR #android #ICS #google #samsung" +"google","positive","126494838689513473","Wed Oct 19 03:08:14 +0000 2011","So glad #Google adopted a design philosophy for #Android 4.0 a.k.a It shows soooo much! This is the polish I've been waiting for!" +"google","positive","126494645281755136","Wed Oct 19 03:07:28 +0000 2011","Thoughts on Android 4.0 #ICS? Excited to play with the new features. #android #google" +"google","positive","126494573966016512","Wed Oct 19 03:07:11 +0000 2011","Register now for your Galaxy Nexus :) http://t.co/37WkIoXC #Android #Google" +"google","positive","126494442290020352","Wed Oct 19 03:06:39 +0000 2011","Wow! RT @dalmaer: WebGL infinite bookcase UI http://t.co/aijYsfHO #google" +"google","positive","126494344566943744","Wed Oct 19 03:06:16 +0000 2011","#Google + #Samsung = Perfect #Icecream sandwich #GalaxyNexus" +"google","positive","126494339248562176","Wed Oct 19 03:06:15 +0000 2011","Thanks to Samsung and Google, I'm now craving for some Ice Cream Sandwich ! #IceCreamSandwich #Google #Samsung" +"google","positive","126494280318582784","Wed Oct 19 03:06:01 +0000 2011","Loved the Google release of Android OS 4.0 Ice Cream Sandwich! #Samsung #Android #Google #IceCreamSandwich #amazing!" +"google","positive","126494221879357440","Wed Oct 19 03:05:47 +0000 2011","Google ICS looks awesome, can't wait til it gets ported over to my evo, face unlock?! ... #android #google" +"google","positive","126494100252925954","Wed Oct 19 03:05:18 +0000 2011","#ICS is Sick, good work #google team" +"google","positive","126494070385283072","Wed Oct 19 03:05:11 +0000 2011","I gotta say, Google's got some pretty catchy advertisements for Android and Chrome. #google #android" +"google","positive","126493889761787904","Wed Oct 19 03:04:28 +0000 2011","@jessecablek I want it too. Now just wondering who will be carrying it in November. Or direct purchase from #google / #samsung" +"google","positive","126493850914131968","Wed Oct 19 03:04:18 +0000 2011","#Google's #Nexus really i wanna it now #Awesome" +"google","positive","126493722916560896","Wed Oct 19 03:03:48 +0000 2011","Well the event is now over, time to change my pants #Google #Android #Samsung #Jizz" +"google","positive","126493648757071873","Wed Oct 19 03:03:30 +0000 2011","I'm an iOs user but the #ICS is awesome, great job #google" +"google","positive","126493639605092352","Wed Oct 19 03:03:28 +0000 2011","Oh yeah I'm in I want one now #Google really did a great job #ICS" +"google","positive","126493624270725120","Wed Oct 19 03:03:24 +0000 2011","RT @ThatTechGuy92: LITERALLY HAD MY MIND BLOWN AWAY JUST NOW!!!!! #Google #Samsung" +"google","positive","126493538945994752","Wed Oct 19 03:03:04 +0000 2011","@BrandonMiniman @Jaime_Rivera I'm an iOs user but the #ICS is awesome, great job #google" +"google","positive","126493495933403136","Wed Oct 19 03:02:54 +0000 2011","#google + #motorola + #verizon = perfect combination! <3" +"google","positive","126493472931844096","Wed Oct 19 03:02:48 +0000 2011","Loved the introduction of the Google Galaxy Nexus & Ice Cream Sandwich. Really looking forward to my new phone #android #ics #nexus #google" +"google","positive","126493405760069632","Wed Oct 19 03:02:32 +0000 2011","GO, GO, GO Ice Cream Sandwich developers! #Android SDK r14 is LIVE @ http://t.co/OeUrmtLX! #Google" +"google","positive","126493371790397440","Wed Oct 19 03:02:24 +0000 2011","#android 4.0!! Ice Cream Sandwich will now make all other smartphone operating systems look outdated. #Androidbeam Wowww.. #google" +"google","positive","126493365775777792","Wed Oct 19 03:02:23 +0000 2011","Galaxy Nexus = #Google #Android #Samsung - the dream team! :) http://t.co/R5juJSBr" +"google","positive","126493265661919232","Wed Oct 19 03:01:59 +0000 2011","yumyumyum...I want Ice Cream Sandwich... #google #android" +"google","positive","126493192110612480","Wed Oct 19 03:01:41 +0000 2011","Soon. Getting ready for the party. #google #android #icecreamsandwich http://t.co/eZjCzLt6" +"google","positive","126493141271449600","Wed Oct 19 03:01:29 +0000 2011","RT @SamsungMobileCA: Raise your hand if you now want an #Android4 #IceCreamSandwich powered #GALAXYNexus phone! #Google #Samsung" +"google","positive","126493101807239168","Wed Oct 19 03:01:20 +0000 2011","All this talk about about how good #icecreamsandwich looks is making me hungry #google #omnomnomnom" +"google","positive","126493037110099968","Wed Oct 19 03:01:04 +0000 2011","O. M. G. #Google is nuts with #ICS and this new phone!" +"google","positive","126493034014720000","Wed Oct 19 03:01:04 +0000 2011","iPhone users find the nearest trash for your device it's the correct place ! #Android #Apple #Google #iPhone" +"google","positive","126492983968268289","Wed Oct 19 03:00:52 +0000 2011","#Google / #Samsung #Galaxy #Nexus #ICS press release is now done, I'll go back to my regular tweeting intervals now :) I WANT IT!" +"google","positive","126492981460082688","Wed Oct 19 03:00:51 +0000 2011","Raise your hand if you now want an #Android4 #IceCreamSandwich powered #GALAXYNexus phone! #Google #Samsung" +"google","positive","126492970793971712","Wed Oct 19 03:00:49 +0000 2011","Ready for some Ice Cream Sandwich! #google #android" +"google","positive","126492852615262208","Wed Oct 19 03:00:20 +0000 2011","Alright, well #Apple just got its ass kicked in the face by #Google so hard with its new Googles #Nexus phone. It will be brilliant." +"google","positive","126492838870515713","Wed Oct 19 03:00:17 +0000 2011","LITERALLY HAD MY MIND BLOWN AWAY JUST NOW!!!!! #Google #Samsung" +"google","positive","126492656124702721","Wed Oct 19 02:59:33 +0000 2011","What I think about #AndroidIceCreamSandwich ? Thats the most delicious piece of sandwich ever. Get me it real fast #GOOGLE !" +"google","positive","126492628152889344","Wed Oct 19 02:59:27 +0000 2011","iphone users i reaaly feel sorry for you after i saw ice-cream sandwich #Android today, #Google #apple #Android #iPhone" +"google","positive","126492595500236800","Wed Oct 19 02:59:19 +0000 2011","Pretty big deal from #Samsung and #Google def can't wait to get my hands on the #GalaxyNexus and #ICS" +"google","positive","126492495394775043","Wed Oct 19 02:58:55 +0000 2011","I cannot wait for a new #ICS phone!!! #Google #android" +"google","positive","126492415749128192","Wed Oct 19 02:58:36 +0000 2011","RT @eboyee: Dostupan SDK - WOW R.E.S.P.E.C.T. #Google #IceCreamSandwich" +"google","positive","126492266016673794","Wed Oct 19 02:58:00 +0000 2011","Dostupan SDK - WOW R.E.S.P.E.C.T. #Google #IceCreamSandwich" +"google","positive","126492259993657345","Wed Oct 19 02:57:59 +0000 2011","OMG the #Android 4.0 SDK is available NOW @ http://t.co/OeUrmtLX!! #Google" +"google","positive","126492242373386240","Wed Oct 19 02:57:55 +0000 2011","The new #Samsungnexus and 4.0 OS make the #iPhone4s look like an old bag phone. Nice job #Google and #Samsung." +"google","positive","126492101549621249","Wed Oct 19 02:57:21 +0000 2011","Calling all #marketing executives in #houston - sign up for #google #training - http://t.co/vo542MrZ - I can't wait to be there for the week" +"google","positive","126492040954527744","Wed Oct 19 02:57:07 +0000 2011","Android Beam - sharing contacts, articles, files, and games, pretty cool #Google #ICS" +"google","positive","126492009748889600","Wed Oct 19 02:56:59 +0000 2011","Slick beam capabilities included in Android 4.0. Touch 2 devices together and select ""Beam"" and content is transferred #Google #Android #ICS" +"google","positive","126491707247300608","Wed Oct 19 02:55:47 +0000 2011","really digging the #UI of of ""#Quick #Response"" from #Google android great for politely declining calls lol" +"google","positive","126491689396338688","Wed Oct 19 02:55:43 +0000 2011","goodbye Bump ... #Google #ICS" +"google","positive","126491556831182848","Wed Oct 19 02:55:11 +0000 2011","@WPdesignteam @joebelfiore @windowsphone - You guys just taught #Google #Android how to design a decent phone. So much Metro. Good Job guys!" +"google","positive","126491420260442112","Wed Oct 19 02:54:39 +0000 2011","People App in #ICS looks promising! As usual #Google is good at integrating different apps and make it a total new user experience!" +"google","positive","126491304036278272","Wed Oct 19 02:54:11 +0000 2011","On a positive note, advanced control of your data usage is a really great idea ;) #google #nexus" +"google","positive","126491109982613505","Wed Oct 19 02:53:25 +0000 2011","#google #icecreamsandwich totally blowing #Apple iShit away..." +"google","positive","126491084854530049","Wed Oct 19 02:53:19 +0000 2011","I love Ice Cream Sandwich :) #Android #Google #Samsung" +"google","positive","126491009558392832","Wed Oct 19 02:53:01 +0000 2011","#NerdBoner in full effect. Watching #Samsung #Google #NexusPrime #IceCreamSandwich live event! +http://t.co/Rk9kPHss" +"google","positive","126490970803027969","Wed Oct 19 02:52:52 +0000 2011","Great, live contact management, quick contact card, add people directly to.your home screen #Android4.0 #Google" +"google","positive","126490696214511616","Wed Oct 19 02:51:46 +0000 2011","#Google has done it again ..... #IceCream Sandwich" +"google","positive","126490548306579457","Wed Oct 19 02:51:11 +0000 2011","New contact app builds on a magazine style layout and aggregates contact info from multiple sources. Very slick. #Google #Android #ICS" +"google","positive","126490282878443520","Wed Oct 19 02:50:08 +0000 2011","Nexus Prime looks NICE. #google #android" +"google","positive","126490137944268800","Wed Oct 19 02:49:33 +0000 2011","New Galaxy Nexus: Video looks awesome! #nexus #samsung #google #android bit.ly/nEJbyE" +"google","positive","126489924596793345","Wed Oct 19 02:48:42 +0000 2011","Infinite Spiral Bookcase for #Google Books looks amazing, interested to see where it is headed: http://t.co/RXWD6sN8" +"google","positive","126489848004608000","Wed Oct 19 02:48:24 +0000 2011","New Galaxy Nexus: App Improvements - Inbuilt Panoramic Pictures #nexus #samsung #google #android bit.ly/nEJbyE" +"google","positive","126489841096597504","Wed Oct 19 02:48:22 +0000 2011","With the new #Google #Android 4.0 Ice-Cream Sandwich, you can take Panorama picture right Out-of-the-Box... SWEET!" +"google","positive","126489713782685696","Wed Oct 19 02:47:52 +0000 2011","RT @RespectMaSwagg: #ThingsWeAllLove ---->> #Google" +"google","positive","126489671730597888","Wed Oct 19 02:47:42 +0000 2011","Camera app now has Panorama capabilities! #Google #Android #ICS" +"google","positive","126489424807735296","Wed Oct 19 02:46:43 +0000 2011","I want some of these #google toys" +"google","positive","126489397507014656","Wed Oct 19 02:46:37 +0000 2011","Good god the camera app in Icecream Sandwich is amazing.. and that camera is FAST! #samsung #google #android #galaxyNexus" +"google","positive","126489393526616064","Wed Oct 19 02:46:36 +0000 2011","Better gallery app to be included with Android 4.0 as well. #Google #Android #ICS" +"google","positive","126489263025033216","Wed Oct 19 02:46:05 +0000 2011","#ics shutter speed is insane! Wow! Two thumbs up #google" +"google","positive","126489200567664640","Wed Oct 19 02:45:50 +0000 2011","Android 4.0 FINALLY includes photo editing tools. #Google #Android #ICS" +"google","positive","126489182263721984","Wed Oct 19 02:45:45 +0000 2011","New Galaxy Nexus: OS /Hardware Improvements - Camera Zero Shutter Lag #nexus #samsung #google #android bit.ly/nEJbyE" +"google","positive","126489085102661632","Wed Oct 19 02:45:22 +0000 2011","Data Usage feature looks amazing and useful! #Google #GalaxyNexus" +"google","positive","126489066077302784","Wed Oct 19 02:45:18 +0000 2011","Wow, very fast camera on Galaxy Nexus - faster than N9 i think :o #google" +"google","positive","126488933772169216","Wed Oct 19 02:44:46 +0000 2011","RT @Mrgareth: ""Top Three 'playing nice' scores were #Google, #Salesforce and #Xero"" in @itnews_au ""Which #Clouds Play Nice"" report: http ..." +"google","positive","126488863374983168","Wed Oct 19 02:44:29 +0000 2011","The new camera app along with the ""zero delay"" camera on the Galaxy Nexus is very nice! #Google #Android #ICS" +"google","positive","126488589046517760","Wed Oct 19 02:43:24 +0000 2011","Unbelievable Data Usage info on Ice Cream Sandwich + +#icecreamsandwich #googland #google #android #ics" +"google","positive","126488572772622336","Wed Oct 19 02:43:20 +0000 2011","Data usage in ICS.. Looks like I don't need Watchdog 3G Pro anymore.. hah! #Android #ICS #Google" +"google","positive","126488519265894400","Wed Oct 19 02:43:07 +0000 2011","This Data Usage feature looks amazing on ICS. #android #samsung #google #GALAXYNexus" +"google","positive","126488454921068544","Wed Oct 19 02:42:52 +0000 2011","Icecream Sandwich looking good so far. #android #google" +"google","positive","126488447098695680","Wed Oct 19 02:42:50 +0000 2011","@kymbersanden #google my favorite study guide" +"google","positive","126488384410619906","Wed Oct 19 02:42:35 +0000 2011","Ice cream Sandwich looks awesome #Google #ICS." +"google","positive","126488304341360640","Wed Oct 19 02:42:16 +0000 2011","New Galaxy Nexus: OS Improvements - Data Usage tools inbuilt #nexus #samsung #google #android bit.ly/nEJbyE" +"google","positive","126488294325370880","Wed Oct 19 02:42:14 +0000 2011","#Google #Android Icecream Sandwich OS for Nexus Prime Released! [LIVE BLOG] | http://t.co/LF03ZKUz #technology #hk" +"google","positive","126488249739915264","Wed Oct 19 02:42:03 +0000 2011","what i love about #GOOGLE it always shows me that am not the only retarded person on this planet when am searching for stupid things :D" +"google","positive","126488202935664640","Wed Oct 19 02:41:52 +0000 2011","RT @SamsungMobileCA: #GALAXYNexus has arrived! #Samsung #Google" +"google","positive","126488018616987648","Wed Oct 19 02:41:08 +0000 2011","New calendar app with pinch to zoom capabilities. Far superior to the current one! #Google #Android #ICS" +"google","positive","126487844830191617","Wed Oct 19 02:40:26 +0000 2011","New Galaxy Nexus: App Improvements - Gmail offline search of last 30 days #nexus #samsung #google #android bit.ly/nEJbyE" +"google","positive","126487523366150144","Wed Oct 19 02:39:10 +0000 2011","I love how it syncs your #Chrome bookmarks with your phone! #icecreamsandwich #google" +"google","positive","126487508589621248","Wed Oct 19 02:39:06 +0000 2011","Thank god Gmail app has been redesigned under Android 4.0. Conversations automatically expanded. #Google #Android #ICS" +"google","positive","126487332865056768","Wed Oct 19 02:38:24 +0000 2011","Seriously!? Why do I still find the yellow pages ph book at my doorstep? These days I just #google it on my iphone." +"google","positive","126487172487462912","Wed Oct 19 02:37:46 +0000 2011","#excellent #AngryBirds #Google #Chrome #Commercial 1080p HD http://t.co/LLsLrxFM via @youtube" +"google","positive","126487153655029760","Wed Oct 19 02:37:42 +0000 2011","Watching the Android announcement from Google and Samsung. IceCream Sandwich looks really interesting. http://t.co/oeZK9iV1 #google #android" +"google","positive","126487090723700737","Wed Oct 19 02:37:27 +0000 2011","As an iPhone user, I think the Samsung Nexus Galaxy looks great. And Ice Cream Sandwich looks fantastic. http://t.co/Z9IDoNi7 #Google" +"google","positive","126487082905518081","Wed Oct 19 02:37:25 +0000 2011","WHAT!!!!!! #Google #Chrome syncs with your #Android device's bookmarks!?!?! NICE!" +"google","positive","126486849706401792","Wed Oct 19 02:36:29 +0000 2011","The Samsung Galaxy Nexus and Ice Cream Sandwich are sick! #android #icecreamsandwich #google" +"google","positive","126486332104130561","Wed Oct 19 02:34:26 +0000 2011","Inline keyboard spell check is nice #ICS with native voice dictation. #Android #Google" +"google","positive","126486322675318784","Wed Oct 19 02:34:23 +0000 2011","I can't wait to get the new #Galaxy #Nexus with #IceCreamSandwich... best yet it supports my #Google Wallet http://t.co/bqMuHxkK via @cnet" +"google","positive","126485171280166913","Wed Oct 19 02:29:49 +0000 2011","ICS on the Galaxy Nexus has very smooth and responsive screen and widget navigation. #Google #Android #ICS" +"google","positive","126484977176158208","Wed Oct 19 02:29:03 +0000 2011","New Galaxy Nexus: OS Improvements - Built on Ice Cream Sandwich version of Android #nexus #samsung #google #android bit.ly/nEJbyE" +"google","positive","126484769285480448","Wed Oct 19 02:28:13 +0000 2011","New Galaxy Nexus: OS Improvements - Stacks & Scrolling introduced for widgets for richer content #samsung #google #android bit.ly/nEJbyE" +"google","positive","126484631389356032","Wed Oct 19 02:27:40 +0000 2011","Updated live wallpaper. Virtual buttons that disappear when you are in full screen mode. More awesome widgets. #Android #Google" +"google","negative","126534476875567104","Wed Oct 19 05:45:44 +0000 2011","There are too many #google services that do not work when you are a #googleapps user. #googleplus above all + profiles + #plusone" +"google","negative","126533446293127168","Wed Oct 19 05:41:39 +0000 2011","Starting to doubt #google android policy: no released android sources since 2.3.3! Curious for an opensource project to say the least..." +"google","negative","126532210210783232","Wed Oct 19 05:36:44 +0000 2011","Youtube video views are still not updating properly. #google #youtube #fail" +"google","negative","126527166195314688","Wed Oct 19 05:16:41 +0000 2011","Did #Google really just highlight a new font #Roboto as part of their #ICS announcement for #Android...?! Seriously." +"google","negative","126526648928579584","Wed Oct 19 05:14:38 +0000 2011","RT @businessuplift: Why #facebook and #google have got it wrong http://t.co/PDbF7H3f" +"google","negative","126520550876127232","Wed Oct 19 04:50:24 +0000 2011","#google+ is really confusing.." +"google","negative","126520337289580544","Wed Oct 19 04:49:33 +0000 2011","is mulling #google+ but hmmm, just aint shore bout nuther social media profile." +"google","negative","126518920122335233","Wed Oct 19 04:43:55 +0000 2011","ATHEN's review of #Google Docs #accessibility shows a lot of work needs to be done. http://t.co/rad9WdSw #a11y HT @chadleaman" +"google","negative","126517970179600385","Wed Oct 19 04:40:09 +0000 2011","Gonna dump one of my gmail accounts. not only do I get tons of spam I get tons of false positives. Not cool. #Google" +"google","negative","126517608697708545","Wed Oct 19 04:38:43 +0000 2011","The awkward moment when google #chrome take a hell of a lot time to load #googleplus #google" +"google","negative","126516572343910400","Wed Oct 19 04:34:36 +0000 2011","#Google #Cloud Service Blackouts Threaten Cloud Users: During a 30-day period in August and September, for ... http://t.co/G0ElVRVJ #TCN" +"google","negative","126516408317251585","Wed Oct 19 04:33:56 +0000 2011","Why Google+ Has Uphill Battle Vs. Facebook: Sean Parker http://t.co/L8RSX1Yf #google" +"google","negative","126515822750478337","Wed Oct 19 04:31:37 +0000 2011","RT @LCmediaHouse: 4chan's Chris Poole: #Facebook & #Google Are Doing It Wrong http://t.co/ZIxfa91N | via @RWW #socialmedia" +"google","negative","126515697181409280","Wed Oct 19 04:31:07 +0000 2011","Lecture Notes in Financial Economics pdf ebook: http://t.co/7gJQvEwN #google" +"google","negative","126514511862706176","Wed Oct 19 04:26:24 +0000 2011","I hate tron theme #Google #ICS" +"google","negative","126507644742672384","Wed Oct 19 03:59:07 +0000 2011","Now, really #google how hypocritical is this? http://t.co/6nHuYFeh #analytics" +"google","negative","126506609823334400","Wed Oct 19 03:55:00 +0000 2011","Codename or not, awesome or not, I have a hard time taking ANYTHING called 'Ice Cream Sandwich' seriously. #google #android #sticky #melts" +"google","negative","126505594290057216","Wed Oct 19 03:50:58 +0000 2011","RT @IAmDkT: The only letdown of the Samsung Galaxy Nexus phone is the 5.0 MP Camera. #ICS #Samsung #SamsungGalaxyNexus #SGN #Android #Google" +"google","negative","126503098071007232","Wed Oct 19 03:41:03 +0000 2011","#Google introduce new Android tablet called 'Ice Cream Sandwich'? I'm a #Singaporean, and I'm (very) confused." +"google","negative","126502732369629184","Wed Oct 19 03:39:36 +0000 2011","#ICS introduces Facial recognition to unlock phone, however - the demo didn't go as planned.http://engt.co/q2KeC7 #Google Goof up! #Android" +"google","negative","126502274204831744","Wed Oct 19 03:37:47 +0000 2011","Ice cream sandwich, really #google?" +"google","negative","126501869920075776","Wed Oct 19 03:36:10 +0000 2011","@kursed #Google is terrible at presentations. Hated the terrible camera interference from the huge screen behind the speakers. #headache" +"google","negative","126499555742203904","Wed Oct 19 03:26:58 +0000 2011","just watched the #GalaxyNexus announcement by #Google and #Samsung.. The only thing keeping me from buying? It's only on #Verizon. #FAIL" +"google","negative","126497996350304257","Wed Oct 19 03:20:47 +0000 2011","@MrTonySays I tried to #Google the word #Visigoth and apparently #Google didn't know either cause my phone froze." +"google","negative","126497929593761792","Wed Oct 19 03:20:31 +0000 2011","RT @SHlFT: dear #google & #samsung... learn some presentation 101 first. please." +"google","negative","126497514168922112","Wed Oct 19 03:18:52 +0000 2011","The only letdown of the Samsung Galaxy Nexus phone is the 5.0 MP Camera. #ICS #Samsung #SamsungGalaxyNexus #SGN #Android #Google" +"google","negative","126497333209858049","Wed Oct 19 03:18:09 +0000 2011","#Google, I've blocked your global IP network from all my sites. J/K. Stop being evil." +"google","negative","126496987192373248","Wed Oct 19 03:16:46 +0000 2011","BOO to no GoogleMusic news, US carrier news, or price points. #google #ICS #android" +"google","negative","126495219058348032","Wed Oct 19 03:09:45 +0000 2011","From it's laughable file: #google will #encrypt #search results BUT only if U have ""logged in"" so they can maximize their #data #collection!" +"google","negative","126494895501348864","Wed Oct 19 03:08:27 +0000 2011","@adiman_ #google #fail" +"google","negative","126494402381225984","Wed Oct 19 03:06:30 +0000 2011","+1 RT @SHlFT: dear #google & #samsung... learn some presentation 101 first. please." +"google","negative","126494319749238784","Wed Oct 19 03:06:10 +0000 2011","dear #google & #samsung... learn some presentation 101 first. please." +"google","negative","126494303173361664","Wed Oct 19 03:06:06 +0000 2011","+1 RT @tah_med: the background is SO horrible for this stream #Samsung #Google #NexusPrime they should've seen what #Adobe did at #AdobeMAX" +"google","negative","126494261922381824","Wed Oct 19 03:05:56 +0000 2011","WHERE THE HECK IS THE @ANDROID SOURCE CODE ?? WTF #GOOGLE" +"google","negative","126493910037037058","Wed Oct 19 03:04:32 +0000 2011","Not greatly impressed with #Google and #Samsung presentation skills." +"google","negative","126493616519647232","Wed Oct 19 03:03:23 +0000 2011","Android ice-cream sandwich's ""Face Unlock"" fails to perform recognition when demo'ed on stage :D +#google #Nexus http://t.co/CSAeFRHc" +"google","negative","126493030185316352","Wed Oct 19 03:01:03 +0000 2011","good job, #google. did anyone get when any of this shit is actually shipping? #infofail" +"google","negative","126492741764005890","Wed Oct 19 02:59:54 +0000 2011","We need dates! #google #samsung #launchfail" +"google","negative","126492244151771137","Wed Oct 19 02:57:55 +0000 2011","Also, the #Google problem. @chucktodd: Problem for Santorum's electability argument: the double-digit loss in 2006" +"google","negative","126491870900666368","Wed Oct 19 02:56:26 +0000 2011","They need to talk about the battery life impact of #AndroidBeam. It can't be good news. #Google #Android #ICS" +"google","negative","126491825660887040","Wed Oct 19 02:56:16 +0000 2011","another android demo #fail! #google" +"google","negative","126491776482689024","Wed Oct 19 02:56:04 +0000 2011","Too bad #google didn't have a tribute graphic for #DennisRitchie." +"google","negative","126491509527805952","Wed Oct 19 02:55:00 +0000 2011","Presentation fail. #Google #Samsung" +"google","negative","126491502493962240","Wed Oct 19 02:54:58 +0000 2011","The #Google guys are illustrating the perils of a live demonstration admirably. #GalaxyNexus #ICS #Android" +"google","negative","126491480087986176","Wed Oct 19 02:54:53 +0000 2011","Android Crashing: 1, Google 'Quick Response' Feature: 0 #Google #ICS #Android #QuickResponse" +"google","negative","126490767345725441","Wed Oct 19 02:52:03 +0000 2011","Why is #Google advertising Adwords Express on #Bing? Hmmm, I know Ballmer is happy :-D. #Google, your already a monopoly." +"google","negative","126490750958567424","Wed Oct 19 02:51:59 +0000 2011","#Google People app...the next revolution in stalking." +"google","negative","126490477288636416","Wed Oct 19 02:50:54 +0000 2011","first thing on #google's to do list: hire a performance coach." +"google","negative","126489936944832512","Wed Oct 19 02:48:45 +0000 2011","#Samsung, #Google Unveil Phone http://t.co/hOB37hbO" +"google","negative","126488700975726593","Wed Oct 19 02:43:51 +0000 2011","Why is #Google whoring for SAMSUNG? #ICS" +"google","negative","126487669533442048","Wed Oct 19 02:39:45 +0000 2011","If #Google don't fix pinch-zoom on #Android 2.3.6 soon, I might have to leave the fold and go #CyanogenMod" +"google","negative","126487604035198976","Wed Oct 19 02:39:29 +0000 2011","facial recognition failed #IceCreamSandwich jajajajajajaj #FacialUnlock #samsumg #Google" +"google","negative","126487206570373120","Wed Oct 19 02:37:54 +0000 2011","Ouch, crash and burn. #Android facial recognition in #IceCreamSandwich didn't work during the presentation. http://t.co/6vsasZyT #Google" +"google","negative","126487152338026496","Wed Oct 19 02:37:41 +0000 2011","Google is gonna need to do better than this to beat #iOS #Android #icecreamsandwich #Google http://t.co/tBNyX7p7" +"google","negative","126486838549557248","Wed Oct 19 02:36:26 +0000 2011","RT @RoycinD: Face Unlock ! #ICS Literally knows your face! #Google #galaxynexus FAILED !" +"google","negative","126486654830645249","Wed Oct 19 02:35:43 +0000 2011","#fail on the face recognition not working #google #samsung" +"google","negative","126486634458914816","Wed Oct 19 02:35:38 +0000 2011","The face recognition unlock would be cool if it works. #android #google #icecreamsandwich" +"google","neutral","126535080557551616","Wed Oct 19 05:48:08 +0000 2011","On Google+ then go here http://t.co/oTBwkxo9 #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126535043588964352","Wed Oct 19 05:47:59 +0000 2011","Who else is on g+ ? Add for add? Let me know. #TFB #TAF #google+" +"google","neutral","126535016049160192","Wed Oct 19 05:47:53 +0000 2011","Have anyone of you found a Twitter to Google+ tool? #Google #Twitter" +"google","neutral","126534917290070016","Wed Oct 19 05:47:29 +0000 2011","#Google unveils next iteration of #Android - #icecreamsandwich - with several revamps incl facial recognition security http://t.co/xMrZVkav" +"google","neutral","126534362148782080","Wed Oct 19 05:45:17 +0000 2011","Google Offers to compete with Groupon, Living Social - Pittsburgh Post-Gazette http://t.co/Wwj3xvdm #google" +"google","neutral","126534144170790912","Wed Oct 19 05:44:26 +0000 2011","What does it take to get a job at #Google? #recruiting http://t.co/eVjiH7oD" +"google","neutral","126534083797987328","Wed Oct 19 05:44:11 +0000 2011","Samsung Galaxy Nexus, Android Ice Cream Sandwich unveiled http://t.co/wWrYcM04 #android #google #samsung" +"google","neutral","126534080627093504","Wed Oct 19 05:44:10 +0000 2011","http://t.co/e5ClGzsI #google" +"google","neutral","126533944832311298","Wed Oct 19 05:43:37 +0000 2011","#Google #Ice Cream Sandwich (#Android 4.0): a hands-on ##screenshot #gallery http://t.co/bpZRz337" +"google","neutral","126533740628422656","Wed Oct 19 05:42:49 +0000 2011","Now it's time for some Ice Cream Sandwich! #Samsung launched it's latest Android Galaxy Nexus phone powered by #Google" +"google","neutral","126533730671149056","Wed Oct 19 05:42:47 +0000 2011","in the theater .. there are are a good 300 ppl here. #guae #google #dubai http://t.co/OjykYcfo" +"google","neutral","126533567311392769","Wed Oct 19 05:42:07 +0000 2011","#Android #Google Samsung Galaxy Nexus official hands-on [Video] http://t.co/6GX3R9MT #DhilipSiva" +"google","neutral","126533547803680770","Wed Oct 19 05:42:03 +0000 2011","RT @NeowinFeed: Android 4.0 Ice Cream Sandwich announcement round-up #android #google http://t.co/L5m9uwD7 #neowin" +"google","neutral","126533491495157760","Wed Oct 19 05:41:49 +0000 2011","RT @iPadPowerUsers On Google+ then go here http://t.co/1OGs2xA9 #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126533487590248448","Wed Oct 19 05:41:48 +0000 2011","Did #Facebook Just Beat #Google At Its Own Game? http://t.co/SiJsB3ME #socialmedia" +"google","neutral","126533485837037568","Wed Oct 19 05:41:48 +0000 2011","Did #Facebook Just Beat #Google At Its Own Game? http://t.co/xVonCBeb #socialmedia" +"google","neutral","126532820507176960","Wed Oct 19 05:39:09 +0000 2011","#Google Says ICS Is Coming To The #Nexus S And “Theoretically†Should Work With 2.3 Devices http://t.co/TY0HDQAm" +"google","neutral","126532798994583552","Wed Oct 19 05:39:04 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/pw4mfbSe #Google" +"google","neutral","126532569259970560","Wed Oct 19 05:38:10 +0000 2011","Bitch please your vagina has been used more than #google ." +"google","neutral","126532361587396610","Wed Oct 19 05:37:20 +0000 2011","#Google #IceCreamSandwich Debuts as IPhone Sets Record: Tech. http://t.co/TrENUYF4" +"google","neutral","126532316859338752","Wed Oct 19 05:37:09 +0000 2011","RT @aljonkar: The new Samsung Galaxy nexus.. + +http://t.co/UtbpXrsj + +#Android +#Samsung +#Google +#WoW + +CC @kha1989led +@eisaahmad" +"google","neutral","126532178082410496","Wed Oct 19 05:36:36 +0000 2011","#Google search to have default encryption http://t.co/rwgKPbGu" +"google","neutral","126532141302558720","Wed Oct 19 05:36:28 +0000 2011","#google- http://t.co/Hkk3qMMe" +"google","neutral","126532121773883392","Wed Oct 19 05:36:23 +0000 2011","Google Defaults to Encrypted HTTPS Searches for Logged In Users #Google http://t.co/6yFZvoxS" +"google","neutral","126532064823619584","Wed Oct 19 05:36:09 +0000 2011","#Samsung & #Google unveil latest #Android #phone using #IceCreamSandwich http://t.co/eHoeBMDW" +"google","neutral","126532054136524800","Wed Oct 19 05:36:07 +0000 2011","RT @webseoanalytics: #SEO Tip:Google considers the first lines of the page important.Put menu,headers+important text on top. #google" +"google","neutral","126532019999096832","Wed Oct 19 05:35:59 +0000 2011","@WEirDOsRunSHIT lol #google" +"google","neutral","126531893649874945","Wed Oct 19 05:35:28 +0000 2011","@tando_m @nonimsi sorry to butt in but there is one :) I can't remember what it's called though. #Google" +"google","neutral","126531814071336960","Wed Oct 19 05:35:09 +0000 2011","#Android #Google Android 4.0 Ice Cream Sandwich SDK now Available for Download http://t.co/Fue4VXxe #DhilipSiva" +"google","neutral","126531693522857984","Wed Oct 19 05:34:41 +0000 2011","GMusic Unites #Google #Music with #Apple iOS [#Apps] http://t.co/R0ih6ePa #republished" +"google","neutral","126531628087513088","Wed Oct 19 05:34:25 +0000 2011","#SEO Tip:Google considers the first lines of the page important.Put menu,headers+important text on top. #google" +"google","neutral","126531561691693056","Wed Oct 19 05:34:09 +0000 2011","On Google+ then go here http://t.co/SxmLkaRR #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126531560194326530","Wed Oct 19 05:34:09 +0000 2011","#SEOTIPS To find out what web pages of your website are indexed by #Google simply Google --> site:http://t.co/xlsu6iPa" +"google","neutral","126531556255870976","Wed Oct 19 05:34:08 +0000 2011","RT @Infograhpic: The Perks of Working at #Google, #Facebook, #Twitter and More [INFOGRAPHIC] http://t.c... @MediaTip" +"google","neutral","126531542733430784","Wed Oct 19 05:34:05 +0000 2011","#Android #Google Ice Cream Sandwich Is Finally Official, And Here's What To Expect http://t.co/Wxhb8JXO #DhilipSiva" +"google","neutral","126531524945395713","Wed Oct 19 05:34:01 +0000 2011","Empire Avenue. The Social Stock Market. What Is Your Online Activity Worth? http://t.co/AKlnmKAi #twitter #facebook #Google+ #socialmedia" +"google","neutral","126531522487533568","Wed Oct 19 05:34:00 +0000 2011","RT @NeowinFeed: Google shows off new Ice Cream Sandwich features #google #android http://t.co/LlJpGM2a #neowin" +"google","neutral","126531297249202176","Wed Oct 19 05:33:06 +0000 2011","The Perks of Working at #Google, #Facebook, #Twitter and More [INFOGRAPHIC] http://t.co/YT87krbP #sm #socialmedia... http://t.co/1yCwl874" +"google","neutral","126531285475799040","Wed Oct 19 05:33:03 +0000 2011","#Google To Begin #Encrypting Searches & Outbound Clicks By Default With SSL Search - http://t.co/vc573Pte" +"google","neutral","126531278517448705","Wed Oct 19 05:33:02 +0000 2011","RT @GLaraLopez: The Perks of Working at #Google, #Facebook, #Twitter and More [INFOGRAPHIC] http://t.co/0KcgbMWF #sm #socialmedia #redes ..." +"google","neutral","126530795987939328","Wed Oct 19 05:31:07 +0000 2011","#seo #serps Ballmer: 70% Of The Time, Google & Bing Are The Same, So Try Bing! by @dannysullivan http://t.co/ZE9AiMYI #google #yahoo #bing" +"google","neutral","126530766829129728","Wed Oct 19 05:31:00 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus: (Cross-posted on the Official Google blog)Bea... http://t.co/K10SvnNr #google #mobile" +"google","neutral","126530553477468160","Wed Oct 19 05:30:09 +0000 2011","G#Music Unites #Google Music with #Apple iOS [#Apps] http://t.co/3ze3feV4" +"google","neutral","126530551384506368","Wed Oct 19 05:30:08 +0000 2011","G#Music Unites #Google Music with #Apple iOS [#Apps] http://t.co/S8V8vfmh" +"google","neutral","126530550394650625","Wed Oct 19 05:30:08 +0000 2011","The Perks of Working at #Google, #Facebook, #Twitter and More [INFOGRAPHIC] http://t.co/0KcgbMWF #sm #socialmedia #redessociales" +"google","neutral","126530532640169986","Wed Oct 19 05:30:04 +0000 2011","#Twitter and #Google 'just can't agree' on Realtime Search deal - Computerworld : http://t.co/XNj052NO #av" +"google","neutral","126530487840813056","Wed Oct 19 05:29:53 +0000 2011","Google To Begin Encrypting Searches & Outbound Clicks By Default With SSL Search http://t.co/C0Ykfjla #google #search" +"google","neutral","126530423026233346","Wed Oct 19 05:29:38 +0000 2011","Who are the top folks on #Google+? See this tag cloud: http://t.co/rp7LSoEE via +@jowyang" +"google","neutral","126529904731881472","Wed Oct 19 05:27:34 +0000 2011","#Google unveils what's new in #Android 4 'Ice Cream Sandwich' http://t.co/uOLAYieA" +"google","neutral","126529833487446016","Wed Oct 19 05:27:17 +0000 2011","#GOOGLE me if u dnt understand me Ơ̴̴͡.̮Ơ̴̴̴͡" +"google","neutral","126529492582797313","Wed Oct 19 05:25:56 +0000 2011","Is #Google #Flight Search a threat to the OTA and #travel #Meta #Search #market in India? Check- http://t.co/RuhglJMt #News" +"google","neutral","126529490737303552","Wed Oct 19 05:25:56 +0000 2011","@attentionspan i don't think the link you posted suggests that #google+ traffic is in downward spiral, just corrected for novelty" +"google","neutral","126529470814363648","Wed Oct 19 05:25:51 +0000 2011","So, @Gizmodo @TechCrunch -- answer the question we really want to know -- ICS + Nexus vs. iPhone 4S? #Google #Apple" +"google","neutral","126529468981452800","Wed Oct 19 05:25:50 +0000 2011","#ThatsDisrespectful Like using #Google to search for #Bing" +"google","neutral","126529403067969537","Wed Oct 19 05:25:35 +0000 2011","#Samsung Galaxy Nexus #smartphone, Android 4.0 Ice Cream Sandwich unveiled by #Google… http://t.co/nds5B4ej #BT" +"google","neutral","126529224642281472","Wed Oct 19 05:24:52 +0000 2011","Galaxy Nexus Vs iPhone 4S: Which smartphone wins? http://t.co/bF6QUCwt via @productreviews #galaxynexus #iphone4s #google #apple #ics" +"google","neutral","126529168715431936","Wed Oct 19 05:24:39 +0000 2011","Samsung, Google Unveil Phone http://t.co/tzj1bYNc +#Google" +"google","neutral","126529111819681792","Wed Oct 19 05:24:25 +0000 2011","#SEO Tip:Make sure that your important keywords have high keyword density in your pages #google #bing #yahoo" +"google","neutral","126529099018674176","Wed Oct 19 05:24:22 +0000 2011","Android Ice Cream Sandwich is out! #android #Google" +"google","neutral","126529019284946944","Wed Oct 19 05:24:03 +0000 2011","Android Ice Cream Sandwich and Galaxy Nexus: Everything You Need to Know - http://t.co/MSqsVMDK #google" +"google","neutral","126528767228248064","Wed Oct 19 05:23:03 +0000 2011","New #Google #Nexus -> http://t.co/qR1eijhw" +"google","neutral","126528551766867968","Wed Oct 19 05:22:12 +0000 2011","#Google Docs #Updates Presentations With Real-Time #Collaboration, New Themes, Transitions and More [Updates] http://t.co/JWSPetbd" +"google","neutral","126528448050118656","Wed Oct 19 05:21:47 +0000 2011","Google Adsense Tools http://t.co/2IjBZdvn #Adsense #Google #Tools" +"google","neutral","126528318542589952","Wed Oct 19 05:21:16 +0000 2011","RT @LocalJoost: #ICS"" putting people at the heart""? You have GOT to be joking #Google. Now even copying slogans? Ah I get it, THOSE are ..." +"google","neutral","126528307004051458","Wed Oct 19 05:21:13 +0000 2011","#Google #Docs presentations get real-time collaboration, transition effects and more: Google… http://t.co/COSF8WUS" +"google","neutral","126528290352660480","Wed Oct 19 05:21:09 +0000 2011","Online Music Industry is witnessing the astounding revolution. Can #Google manage to seal the supreme console? http://t.co/QcDddYrn" +"google","neutral","126528288729473024","Wed Oct 19 05:21:09 +0000 2011","Online Music Industry is witnessing the astounding revolution. Can #Google manage to seal the supreme console? http://t.co/iUi7JtCM" +"google","neutral","126528287831891968","Wed Oct 19 05:21:09 +0000 2011","On Google+ then go here http://t.co/63B8wYPw #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126528286871396352","Wed Oct 19 05:21:09 +0000 2011","Online Music Industry is witnessing the astounding revolution. Can #Google manage to seal the supreme console? http://t.co/Jrc4qstJ" +"google","neutral","126528248329940992","Wed Oct 19 05:20:59 +0000 2011","Your face unlocks the phone, as facial recognition replaces a traditional pass code. #Google Nexus Prime smartphone" +"google","neutral","126528157573578754","Wed Oct 19 05:20:38 +0000 2011","Android 4.0 Ice Cream Sandwich announcement round-up #android #google http://t.co/ExcNswwx" +"google","neutral","126527897065373696","Wed Oct 19 05:19:36 +0000 2011","#Android: This Is the #Samsung Galaxy Nexus, #Google's New Official Android Phone(via @Gizmodo) - The... http://t.co/wS4GS9uk" +"google","neutral","126527760662413312","Wed Oct 19 05:19:03 +0000 2011","Android 4.0 Ice Cream Sandwich announcement round-up #android #google http://t.co/3SvjOtjT" +"google","neutral","126527757151776768","Wed Oct 19 05:19:02 +0000 2011","Android 4.0 Ice Cream Sandwich announcement round-up #android #google http://t.co/8DjJHFGu" +"google","neutral","126527756036091904","Wed Oct 19 05:19:02 +0000 2011","Android 4.0 Ice Cream Sandwich announcement round-up #android #google http://t.co/L5m9uwD7 #neowin" +"google","neutral","126527743528673280","Wed Oct 19 05:18:59 +0000 2011","#LOL #google http://t.co/aeJ1Vrgj" +"google","neutral","126527523529043968","Wed Oct 19 05:18:07 +0000 2011","#Android #Google The Galaxy Nexus' System Apps Have Been Dumped http://t.co/LCZNPDHR #DhilipSiva" +"google","neutral","126527262190346240","Wed Oct 19 05:17:04 +0000 2011","#Google has made search more #secure, this change #encrypts your search queries and Google's result pages. Read more: http://t.co/Rnnjwo6r" +"google","neutral","126527122218041345","Wed Oct 19 05:16:31 +0000 2011","Android 4.0 OS Ice Cream Sandwich and the Galaxy Nexus is out.seems that smartphones are named by hungry developers #Google+" +"google","neutral","126527077905207296","Wed Oct 19 05:16:20 +0000 2011","Anyone seen this? If your brand needs to track #SEO keyword landing through to a conversion, well #Google just capped …http://t.co/g4coBopL" +"google","neutral","126526945008689152","Wed Oct 19 05:15:49 +0000 2011","RT @whymicrosoft: Google’s Ad revenue for Q3 was 96% of total revenue. So, are #Google customers the commodity being sold to advertisers???" +"google","neutral","126526850452299776","Wed Oct 19 05:15:26 +0000 2011","Ice Cream Sandwich arrives http://t.co/Z4j7WhmS #google #icecream #android #samsung" +"google","neutral","126526838251065344","Wed Oct 19 05:15:23 +0000 2011","it's in the curve baby http://t.co/ORDg1z2F #google #nexus #android #samsung" +"google","neutral","126526793380409344","Wed Oct 19 05:15:12 +0000 2011","Is Google planning to launch its own iTunes rival? - The Times of India http://t.co/1x73o5Yz #google" +"google","neutral","126526782001262592","Wed Oct 19 05:15:10 +0000 2011","#Android #Google We saw Ice Cream Sandwich on a Phone, but What About a Tablet? http://t.co/6McjpONv #DhilipSiva" +"google","neutral","126526594973052928","Wed Oct 19 05:14:25 +0000 2011","#SEO tip:in multilang sites create folders for each lang.submit the urls in #google #webmaster tools+Use GeographicTarget" +"google","neutral","126526457974489088","Wed Oct 19 05:13:52 +0000 2011","Going in with #mikedunn one of the #chosenfewdjs all in with #chicago #housemusic! Might want to #google him!" +"google","neutral","126526371785752576","Wed Oct 19 05:13:32 +0000 2011","#Google releases 50 new features for presentations in #GoogleDocs http://t.co/0x6crRao" +"google","neutral","126525872915226624","Wed Oct 19 05:11:33 +0000 2011","Ballmer: 70% Of The Time, #Google & #Bing Are The Same, So Try Bing!: http://t.co/rA5PgOsD" +"google","neutral","126525730526998528","Wed Oct 19 05:10:59 +0000 2011","Recap: Google will host G-South Africa in JHB on 3rd and 4th November. http://t.co/A88ZBr2O #google" +"google","neutral","126525539442892800","Wed Oct 19 05:10:13 +0000 2011","Believe it or not #political process is changing across all countries. Thanks to new social media #Facebook , #twitter, #google+" +"google","neutral","126525506018492416","Wed Oct 19 05:10:06 +0000 2011","#Google #Ice Cream Sandwich (#Android 4.0): a #hands_on #screenshot #gallery http://t.co/61bZRSWI via @engadget" +"google","neutral","126525491954987008","Wed Oct 19 05:10:02 +0000 2011","Logitech Revue Companion Box with Google TV and Keyboard Controller by #Logitech #Google #TVBox via #amazon http://t.co/NkPjGP3W" +"google","neutral","126525475723022336","Wed Oct 19 05:09:58 +0000 2011","#Google #Ice Cream Sandwich (#Android 4.0): a #hands_on #screenshot #gallery http://t.co/E8dCQo7s" +"google","neutral","126525465715425282","Wed Oct 19 05:09:56 +0000 2011","#Google announces NFC-based #Android Beam for sharing between phones" +"google","neutral","126525210483638272","Wed Oct 19 05:08:55 +0000 2011","If you need to modify your payment details that are on your phone for the Android Market, go here: http://t.co/z4gS3PxC #Google" +"google","neutral","126525169756946432","Wed Oct 19 05:08:45 +0000 2011","#SEO #backlinks #google #index Free Tool http://t.co/1tTp1axq" +"google","neutral","126525031135195136","Wed Oct 19 05:08:12 +0000 2011","Samsung Galaxy Nexus, Google Android 4.0 Live Blog : http://t.co/wwSYXAbn #android #google #mobile #news" +"google","neutral","126525007525462016","Wed Oct 19 05:08:07 +0000 2011","#Android #Google Ice Cream Sandwich Feature Closer Look - Resizable Widgets, Folders, Multitasking, And... http://t.co/CQCaWORD #DhilipSiva" +"google","neutral","126524889296412672","Wed Oct 19 05:07:38 +0000 2011","Google encrypting search for signed-in users http://t.co/R2xDFqRs #encryption #google #search #secure" +"google","neutral","126524784120037376","Wed Oct 19 05:07:13 +0000 2011","#Google #Android #IceCreamSandwich features - http://t.co/qXFdDrt4. #ICS" +"google","neutral","126524764071264257","Wed Oct 19 05:07:09 +0000 2011","On Google+ then go here http://t.co/RCSwZlxK #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126524648874717184","Wed Oct 19 05:06:41 +0000 2011","#ICS"" putting people at the heart""? You have GOT to be joking #Google. Now even copying slogans? Ah I get it, THOSE are not patented ;)" +"google","neutral","126524290945384449","Wed Oct 19 05:05:16 +0000 2011","The new Samsung Galaxy nexus.. + +http://t.co/UtbpXrsj + +#Android +#Samsung +#Google +#WoW + +CC @kha1989led +@eisaahmad" +"google","neutral","126524242635403264","Wed Oct 19 05:05:04 +0000 2011","#Google and #Samsung unveil #Galaxy #Nexus, #Android 4 at event: http://t.co/oTqfW9XQ | #CTIA #Hong #Kong #conference #environments" +"google","neutral","126524229997953024","Wed Oct 19 05:05:01 +0000 2011","New Solar plant seen on I-15 on the way to Vegas, invested in by Google. http://t.co/Pzw8OVnm RT #eco #google #solar #greenpower" +"google","neutral","126524074179567617","Wed Oct 19 05:04:24 +0000 2011","#SEO Tip:Meta nofollowed or robots.txt blocked pages DO receive pagerank juice when they get links. #google" +"google","neutral","126524003266473984","Wed Oct 19 05:04:07 +0000 2011","What Is Ice Cream Sandwich? - http://t.co/otA4rqi9 #Google" +"google","neutral","126523791735144448","Wed Oct 19 05:03:17 +0000 2011","RT @/iPadPowerUsers-On Google+ then go here http://t.co/tMGHv24E #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126523776195231744","Wed Oct 19 05:03:13 +0000 2011","RT @MAGGadget: Fight: Galaxy Nexus vs. Droid RAZR vs. iPhone 4S http://t.co/P3qHh63d #Android #iPhone #Google" +"google","neutral","126523549493112832","Wed Oct 19 05:02:19 +0000 2011","RT @ZDNet: #Google steps up its privacy game & launches Search Encryption #GoodToKnow privacy & personal data control http://t.co/t05c34yY" +"google","neutral","126523519357030400","Wed Oct 19 05:02:12 +0000 2011","@Bravo u should have a cast of Latina #housewives WE have some pretty successful Latina Women in this world #GOOGLE them.." +"google","neutral","126523435772948480","Wed Oct 19 05:01:52 +0000 2011","RT @iPadPowerUsers On Google+ then go here http://t.co/r4bIDq9D #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126523421231300608","Wed Oct 19 05:01:48 +0000 2011","@loribooLIVE like Manny from degrassi (: #google her !" +"google","neutral","126523245649342464","Wed Oct 19 05:01:07 +0000 2011","And here's the run-down on #Android4.0 aka #IceCreamSandwich http://t.co/f1V3HJAe #google" +"google","neutral","126522997522702336","Wed Oct 19 05:00:07 +0000 2011","Here's what we know about the #GalaxyNexus http://t.co/7bYXbQVY #google #android" +"google","neutral","126522979080339456","Wed Oct 19 05:00:03 +0000 2011","Tips to make #Google always #crawl your #website http://t.co/SVYSckH7" +"google","neutral","126522978971299840","Wed Oct 19 05:00:03 +0000 2011","Do the Creep #ahh @JokeAholic_P and @AmiraAldewick #google+dates" +"google","neutral","126522853779705856","Wed Oct 19 04:59:33 +0000 2011","RT @xenophin: #Samsung and #Google's Ice Cream Sandwich event is at 3am UK time. Waking up to a lot of news then ... #nonUSAtimezonel ..." +"google","neutral","126522802466598912","Wed Oct 19 04:59:21 +0000 2011","wait.... since when does putting honeycomb and gingerbread together make ice cream sandwich? #google ..." +"google","neutral","126522667703599104","Wed Oct 19 04:58:49 +0000 2011","#Android #Google Samsung Galaxy Nexus Coming Soon To Three UK http://t.co/FQbfdiYE #DhilipSiva" +"google","neutral","126522664939565057","Wed Oct 19 04:58:48 +0000 2011","#Android #Google Nexus S Getting Ice Cream Sandwich, OS “Theoretically†Should Work on Any Android... http://t.co/kqBWWiyd #DhilipSiva" +"google","neutral","126522423771267072","Wed Oct 19 04:57:51 +0000 2011","@Wilko_LR ""Google to launch iTunes AND Spotify rival? http://t.co/SralgsCi†need to start using #google+" +"google","neutral","126522225674301440","Wed Oct 19 04:57:03 +0000 2011","#Google to stop giving referral data - by moving to #SSL? http://t.co/wTjiX29b" +"google","neutral","126521973672120320","Wed Oct 19 04:56:03 +0000 2011","How to use Google Alerts to... http://t.co/crMAaAGQ #seo #search #google #hack #sem #it #business #web #marketing #online #yes #hot #winning" +"google","neutral","126521718272573440","Wed Oct 19 04:55:02 +0000 2011","Samsung Galaxy Nexus vs. Galaxy S II, Nexus One and Nexus S (macro hands-on) http://t.co/F9l7GnWT #Google #NexusS" +"google","neutral","126521582809128961","Wed Oct 19 04:54:30 +0000 2011","“@rianru: #Google предÑтавит новую верÑию #Android 4.0 http://t.co/ttgycujs†+ +#Vfacebook" +"google","neutral","126521573330010112","Wed Oct 19 04:54:28 +0000 2011","RT @iPadPowerUsers On Google+ then go here http://t.co/nCqrwPYb #GPlus #Googleplus #Google #teamfollowback…" +"google","neutral","126521564979150848","Wed Oct 19 04:54:26 +0000 2011","#SEO Tip: #javascript can be a friend +an enemy.Use it wisely to block the content u want #google #bing #yahoo" +"google","neutral","126521489116766208","Wed Oct 19 04:54:08 +0000 2011","On Google+ then go here http://t.co/XP4apWAH #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126521384657616896","Wed Oct 19 04:53:43 +0000 2011","quick hands on #Galaxy #Nexus by #Samsung #google #android #ICS #4 #icecreamsandwich http://t.co/22cF2m6d" +"google","neutral","126521228524658688","Wed Oct 19 04:53:06 +0000 2011","#Google #NexusPrime Spec sheet: http://t.co/iuYKWVRQ #Android #Mobile" +"google","neutral","126521130197585920","Wed Oct 19 04:52:42 +0000 2011","Why not send a Google card this Christmas? http://t.co/zSfuV48d #xmas #google #squidoo" +"google","neutral","126521102783619072","Wed Oct 19 04:52:36 +0000 2011","#Google and #Samsung reveal the #NexusPrime running the #IceCreamSandwich...#Mobile #Android" +"google","neutral","126520984122572801","Wed Oct 19 04:52:07 +0000 2011","#Google confirms #Nexus S will get #Ice_Cream_Sandwich -- for real this time (#Gingerbread devices, too) http://t.co/4x69YDfE" +"google","neutral","126520920352358401","Wed Oct 19 04:51:52 +0000 2011","#Samsung, #Google Android 4 reveal: Live Event Blog by #engadet http://t.co/DL68lKQU" +"google","neutral","126520914413236224","Wed Oct 19 04:51:51 +0000 2011","RT @bleublancrouge_: Surprising !Google's Management Isn't Using #Google+ : http://ow.ly/6QuOk @degusta" +"google","neutral","126520707969581056","Wed Oct 19 04:51:02 +0000 2011","#Android #Google Ice Cream Sandwich new features not mentioned during webcast http://t.co/m2UGgiev #DhilipSiva" +"google","neutral","126520695218913282","Wed Oct 19 04:50:59 +0000 2011","#Android #Google Ice Cream Sandwich UI enhancements – a new spin on Android http://t.co/DAPpm4av #DhilipSiva" +"google","neutral","126520611190218752","Wed Oct 19 04:50:39 +0000 2011","Ice Cream Sandwitch comes alive today #android #google" +"google","neutral","126520531934654465","Wed Oct 19 04:50:20 +0000 2011","RT @tatn: #Google, #Samsung unveil Ice Cream Sandwich-powered Galaxy Nexus http://t.co/qtiCdIvd via @CNET #Android" +"google","neutral","126520511483219968","Wed Oct 19 04:50:15 +0000 2011","#Android #Google Texas Instruments: “It’s Not the Number of Cores, It’s Sophistication†http://t.co/aXDKhkb0 #DhilipSiva" +"google","neutral","126520508903718912","Wed Oct 19 04:50:14 +0000 2011","#Android #Google Android Beam finally makes NFC social http://t.co/P9PMpCFl #DhilipSiva" +"google","neutral","126520504822661120","Wed Oct 19 04:50:13 +0000 2011","#Android #Google Texas Instruments confirms Galaxy Nexus has newer OMAP4460 processor http://t.co/sh316tBN #DhilipSiva" +"google","neutral","126520501442056193","Wed Oct 19 04:50:12 +0000 2011","The competition: #Google introduces #Android 4.0 Ice Cream Sandwich and the Galaxy Nexus http://t.co/4vGGHF43" +"google","neutral","126520500246687745","Wed Oct 19 04:50:12 +0000 2011","#Android #Google Video: Google’s Nexus site updated with new device and ICS features http://t.co/rZBmyLTV #DhilipSiva" +"google","neutral","126520116795015169","Wed Oct 19 04:48:41 +0000 2011","I would also like ice Cream Sandwich on my phone #google #android http://t.co/uTHuUECJ" +"google","neutral","126520107089395712","Wed Oct 19 04:48:38 +0000 2011","#google Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/YLRL4lZy" +"google","neutral","126520074717765632","Wed Oct 19 04:48:31 +0000 2011","RT @raovallab: How #Google Ventures Chooses Which #Startups Get Its $200 Million http://t.co/UGsBdxQm via @mashbusiness @mashable" +"google","neutral","126520020871294977","Wed Oct 19 04:48:18 +0000 2011","RT @stuartmiles: http://t.co/hM2x3yim via @pocketlint #moto #google #android" +"google","neutral","126519912821825538","Wed Oct 19 04:47:52 +0000 2011","RT @AndroidTopNews: Nexus Prime rumored to be released on Nov. 3 http://t.co/77HbvLjI #News #google #Ice_Cream_Sandwich #ics #Nexus_Prime" +"google","neutral","126519469500669952","Wed Oct 19 04:46:06 +0000 2011","Samsung Galaxy Nexus & Android 4.0 Quick Specs - http://t.co/s1QfUznv #galaxynexus #android #ics #samsung #google" +"google","neutral","126519331956862976","Wed Oct 19 04:45:34 +0000 2011","Android 4.0 SDK now available #google #android http://t.co/TBtuewi1" +"google","neutral","126519306832982016","Wed Oct 19 04:45:28 +0000 2011","RT @webseoanalytics: The most important KPIs in #Google #Analytics: http://t.co/uUzhIMGv #marketing #webmaster" +"google","neutral","126519290722648064","Wed Oct 19 04:45:24 +0000 2011","It's not Google vs Apple. It is Apple & Google vs the old way - GigaOm http://t.co/LJlc6YzS #google" +"google","neutral","126519251631747072","Wed Oct 19 04:45:14 +0000 2011","RT @TecBuzz: Google steps up its privacy game, launches Good To Know http://t.co/22QbKMpG #google" +"google","neutral","126519229515169792","Wed Oct 19 04:45:09 +0000 2011","#Android #Google What’s New in Ice Cream Sandwich Android 4.0 http://t.co/z76pv0QV #DhilipSiva" +"google","neutral","126519227006988288","Wed Oct 19 04:45:08 +0000 2011","#Android #Google Galaxy Nexus http://t.co/GXVKqi6z #DhilipSiva" +"google","neutral","126519072195223552","Wed Oct 19 04:44:32 +0000 2011","Hmm, Google I really need a new phone. Please release the Galaxy Nexus asap! #android #google" +"google","neutral","126519044798029824","Wed Oct 19 04:44:25 +0000 2011","The most important KPIs in #Google #Analytics: http://t.co/uUzhIMGv #marketing #webmaster" +"google","neutral","126519034488426496","Wed Oct 19 04:44:23 +0000 2011","#Google confirms that the Nexus S will actually get the ice cream sandwich update." +"google","neutral","126519019699314688","Wed Oct 19 04:44:19 +0000 2011","http://t.co/Pl4ZeEvH - Android 4.0 SDK now available #google #android http://t.co/sVlrVxTR" +"google","neutral","126518955526455296","Wed Oct 19 04:44:04 +0000 2011","RT @theunlockr: Nexus S will get Ice-Cream Sandwich too - #Google - http://t.co/Igq9kdIL #android #samsung #ics" +"google","neutral","126518773401391104","Wed Oct 19 04:43:20 +0000 2011","@TreyRatcliff Congrats on making it to the #google #android 4.0 demo :) http://t.co/EPIv7ny5 #icecreamsandwich" +"google","neutral","126518709358575616","Wed Oct 19 04:43:05 +0000 2011","Is it bad if I just want it because its called the #icecreamsandwich ?http://t.co/Wt3C2rP1 #android #sansung #google" +"google","neutral","126518669554626560","Wed Oct 19 04:42:56 +0000 2011","Nexus S will get Ice-Cream Sandwich too - #Google - http://t.co/Igq9kdIL #android #samsung #ics" +"google","neutral","126518471893848064","Wed Oct 19 04:42:08 +0000 2011","#Android #Google Android 4.0 Ice Cream Sandwich – All New Core Applications Overview http://t.co/XLtT7Wr9 #DhilipSiva" +"google","neutral","126518469507289090","Wed Oct 19 04:42:08 +0000 2011","#Android #Google New UI and Core Features in Android 4.0 Ice Cream Sandwich Overview http://t.co/BD2omnIb #DhilipSiva" +"google","neutral","126518466315431936","Wed Oct 19 04:42:07 +0000 2011","#Android #Google Android 4.0 Ice Cream Sandwich Has Hardware Acceleration http://t.co/ijWr1mUJ #DhilipSiva" +"google","neutral","126518461055774721","Wed Oct 19 04:42:06 +0000 2011","#Android #Google Ice Cream Sandwich Feature Closer Look - Redesigned Lock Screen And Virtual Button Bar http://t.co/FqsPN4An #DhilipSiva" +"google","neutral","126518313160409088","Wed Oct 19 04:41:31 +0000 2011","RT @Androidheadline Three UK to carry the Samsung Galaxy Nexus http://t.co/bLtq3VrK #android #Samsung #UK #telecom #biznews #Google #news" +"google","neutral","126518252355584001","Wed Oct 19 04:41:16 +0000 2011","RT @bytenow: The next pure #Google phone is the #Galaxy #Nexus by #Samsung. It's fast and has a nice screen - http://t.co/9RwvaUkq" +"google","neutral","126518222487961600","Wed Oct 19 04:41:09 +0000 2011","The next pure #Google phone is the #Galaxy #Nexus by #Samsung. It's fast and has a nice screen - http://t.co/9RwvaUkq" +"google","neutral","126518218343972864","Wed Oct 19 04:41:08 +0000 2011","On Google+ then go here http://t.co/18qxaSp2 #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126518146063532032","Wed Oct 19 04:40:51 +0000 2011","#so #society #network #google #empowerment http://t.co/bYn1njee notes #Facebook should not be taken lightly" +"google","neutral","126518121946292224","Wed Oct 19 04:40:45 +0000 2011","Like to get my hands on an ice cream sandwich #android #google" +"google","neutral","126518025863168000","Wed Oct 19 04:40:22 +0000 2011","#google Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/Ac8JPjPK" +"google","neutral","126517966589267968","Wed Oct 19 04:40:08 +0000 2011","#Android #Google HTC on ICS: 'reviewing its features and functionality to determine our upgrade plans' http://t.co/GrhboOar #DhilipSiva" +"google","neutral","126517947329036288","Wed Oct 19 04:40:03 +0000 2011","HTC on ICS: 'reviewing its features and functionality to determine our upgrade plans' http://t.co/UdkGr3Cb #android #google" +"google","neutral","126517930027515904","Wed Oct 19 04:39:59 +0000 2011","RT @Stephaniezogrh Samsung, Google Unveil Latest Android OS, Phone - PCWorld http://t.co/0dkQs3h3 #Android #Samsung #Apple #Google #tech" +"google","neutral","126517793792344064","Wed Oct 19 04:39:27 +0000 2011","@rohit_bmw Rohit, thanks for being part of my #google+ network!" +"google","neutral","126517583347326976","Wed Oct 19 04:38:37 +0000 2011","Ice Cream Sandwich is out! http://t.co/YyceZpDy #android #ios #ics #google" +"google","neutral","126517558152134656","Wed Oct 19 04:38:31 +0000 2011","RT @BuzzzIt: BuzZz...: Google Wallet Introduction Video http://t.co/Q00gYwQH #Google #Wallet" +"google","neutral","126517526363504641","Wed Oct 19 04:38:23 +0000 2011","Google encrypts searches: http://t.co/q8inLdFy #Google" +"google","neutral","126517510915899392","Wed Oct 19 04:38:19 +0000 2011","Samsung, Google Unveil Latest Android OS, Phone - PCWorld #google" +"google","neutral","126517487503294464","Wed Oct 19 04:38:14 +0000 2011","#Samsung #Galaxy Nexus to Launch on Three of UK http://t.co/xaXnOPO6 #talkandroid #google #networkspecs #nttdocomo" +"google","neutral","126517179272278016","Wed Oct 19 04:37:00 +0000 2011","Great Mission :: Ice Cream Sandwich, #Google mission was to build a mobile OS that works on both phones and tablets" +"google","neutral","126517076889321472","Wed Oct 19 04:36:36 +0000 2011","RT @lcashdol: Too bad #google didn't have a tribute graphic for #DennisRitchie." +"google","neutral","126516982936895488","Wed Oct 19 04:36:13 +0000 2011","Like traditional VC firms, Google Ventures makes its investment decisions solely ... #google #ventures #spencer http://t.co/TBGqSFos" +"google","neutral","126516936862474241","Wed Oct 19 04:36:02 +0000 2011","Noarchive is used to restrict search engines from saving a cached copy of the page #Google" +"google","neutral","126516898845310977","Wed Oct 19 04:35:53 +0000 2011","#Google to begin defaulting logged-in #users to #secure #search: According to a blog post by… http://t.co/64Cj3GuH" +"google","neutral","126516776652640256","Wed Oct 19 04:35:24 +0000 2011","#Google Wallet now supported by seven new retailers [video] http://t.co/LCrWw4ld via @BGR" +"google","neutral","126516749242871809","Wed Oct 19 04:35:18 +0000 2011","WOW that #AmberCole video hit #Google!! I'm done haha....." +"google","neutral","126516744742387712","Wed Oct 19 04:35:17 +0000 2011","RT @AccessNetworks: We're waiting patiently for Google+ business pages. Anyone else wanting to jump on board? #avtweeps #google+ http:// ..." +"google","neutral","126516481679818752","Wed Oct 19 04:34:14 +0000 2011","RT @MarcDaley: #Samsung #Google announce new phone and then the #Apple #AppleStore goes down. + +A little strange or am I reading too much ..." +"google","neutral","126516342319890432","Wed Oct 19 04:33:41 +0000 2011","Galaxy Nexus Promotion Video http://t.co/JIV8z67u #samsung #google #android #ics #smartphone" +"google","neutral","126516212749443072","Wed Oct 19 04:33:10 +0000 2011","#Android #Google Ice Cream Sandwich Official Video http://t.co/ppH5CW1p #DhilipSiva" +"google","neutral","126516054452211712","Wed Oct 19 04:32:32 +0000 2011","Google releases 50 new features for presentations in Google Docs: Google is rolling out a s... http://t.co/HkCBc7pm #google+ #googleplus" +"google","neutral","126515992737218560","Wed Oct 19 04:32:17 +0000 2011","My next phone samsung galaxy nexus. Ice cream sandwich 4.0. #android #google http://t.co/mjYSLzie" +"google","neutral","126515637978796032","Wed Oct 19 04:30:53 +0000 2011","Ice Cream Sandwich Android: #IceCreamSandwichAndroid #Ice #Cream #Sandwich #Android http://t.co/pVRaHhxR #google #youtube" +"google","neutral","126515624984846336","Wed Oct 19 04:30:50 +0000 2011","RT @fucktec: #Google confirms #Nexus S will get #Ice_Cream_Sandwich -- for real this time (#Gingerbread devices, too) http://t.co/RajAK9P2" +"google","neutral","126515576507084800","Wed Oct 19 04:30:38 +0000 2011","I'm not a windows phone fan, but I'm a #Metro UI fan, which, IMO, has been fortunately (or sadly) copied by #Google into #ICS." +"google","neutral","126515415592607744","Wed Oct 19 04:30:00 +0000 2011","RT @LCmediaHouse: #Google vs Facebook: Get the Whole Picture for Your Marketing http://t.co/tm2xaijB #SocialMedia" +"google","neutral","126515341567344641","Wed Oct 19 04:29:42 +0000 2011","I have been watching Captain American movie I will give you what I think of #Google Ice Cream Sandwich going to watch it now" +"google","neutral","126515215255871488","Wed Oct 19 04:29:12 +0000 2011","#google #galaxy #nexus intro video #samsung #ICS #android #4 http://t.co/tOGMpP8I" +"google","neutral","126515104723374080","Wed Oct 19 04:28:46 +0000 2011","#Samsung #Galaxy #Nexus hands-on (video) http://t.co/9MNNerc5 #mobile #android4 #galaxynexus #google" +"google","neutral","126514999375048705","Wed Oct 19 04:28:21 +0000 2011","#Samsung #Google announce new phone and then the #Apple #AppleStore goes down. + +A little strange or am I reading too much into this?" +"google","neutral","126514945373376512","Wed Oct 19 04:28:08 +0000 2011","Fight: Galaxy Nexus vs. Droid RAZR vs. iPhone 4S http://t.co/P3qHh63d #Android #iPhone #Google" +"google","neutral","126514719828885504","Wed Oct 19 04:27:14 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus: (Cross-posted on the Google Mobile blog) +Beaming a vi... http://t.co/Q2mvqxJf #google" +"google","neutral","126514719770161152","Wed Oct 19 04:27:14 +0000 2011","#Google #News: Unwrapping Ice Cream Sandwich on the Galaxy Nexus: (Cross-posted on the Google Mob... http://t.co/0PkK7J5D #harshgandhitk" +"google","neutral","126514718188900352","Wed Oct 19 04:27:14 +0000 2011","#Google #News: Unwrapping Ice Cream Sandwich on the Galaxy Nexus: (Cross-posted on the Google Mob... http://t.co/AgC6PCz7 #harshgandhitk" +"google","neutral","126514697875894272","Wed Oct 19 04:27:09 +0000 2011","On Google+ then go here http://t.co/AwDFxDcI #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126514441314500608","Wed Oct 19 04:26:07 +0000 2011","#Android #Google Download: Android 4.0 Ice Cream Sandwich SDK System Dump http://t.co/VyyAsBM6 #DhilipSiva" +"google","neutral","126514438680489985","Wed Oct 19 04:26:07 +0000 2011","#Android #Google Nexus Site Updated With ICS and Galaxy Nexus, Register to be Notified http://t.co/GHPDQALt #DhilipSiva" +"google","neutral","126514435949993984","Wed Oct 19 04:26:06 +0000 2011","#Android #Google Official Promo Video Reveals Verizon Wireless Destination http://t.co/xKH7UGY4 #DhilipSiva" +"google","neutral","126513983338450944","Wed Oct 19 04:24:18 +0000 2011","#google's galaxy #nexus just came out with #anroid4.0. http://t.co/p5kuIoox" +"google","neutral","126513869467299840","Wed Oct 19 04:23:51 +0000 2011","RT @EcommNewsUpdate Steve Yegge's Google Plus Rant - Google's Pathetic Afterthought http://t.co/4Tt5OsvN #blog #facebook #google #googleplus" +"google","neutral","126513499890397185","Wed Oct 19 04:22:23 +0000 2011","BuzZz...: Google Wallet Introduction Video http://t.co/Q00gYwQH #Google #Wallet" +"google","neutral","126513430508212224","Wed Oct 19 04:22:07 +0000 2011","Google is rolling out a suite of new changes to its Google Docs service, including 50 new features for ... http://t.co/3j4Vd8WF #Google+" +"google","neutral","126513429103128577","Wed Oct 19 04:22:06 +0000 2011","Ballmer also said that the company has a ""variety of different ways"" to respond to Hangouts, the group ... http://t.co/brBjtB86 #Google+" +"google","neutral","126513426968215552","Wed Oct 19 04:22:06 +0000 2011","As far as I can tell, Google engineer Steve Yegge never intended to become famous for criticizing the c... http://t.co/SO1rCq0y #Google+" +"google","neutral","126513317320736768","Wed Oct 19 04:21:40 +0000 2011","#Google confirms #Nexus S will get #Ice_Cream_Sandwich -- for real this time (#Gingerbread devices, too) http://t.co/apLj6Tsk via @engadget" +"google","neutral","126513312857985024","Wed Oct 19 04:21:38 +0000 2011","#Google confirms #Nexus S will get #Ice_Cream_Sandwich -- for real this time (#Gingerbread devices, too) http://t.co/RajAK9P2" +"google","neutral","126513307589935104","Wed Oct 19 04:21:37 +0000 2011","www.geoloco.tv - #Geoloco, future of tech, brands, ads in location revolution - 11-3 San Francisco - keynotes #Google, #Facebook, #Starbucks" +"google","neutral","126513117944496128","Wed Oct 19 04:20:52 +0000 2011","#Google confirms #Nexus S will get Ice Cream Sandwich -- for real this time (#Gingerbread… http://t.co/vIoR326D" +"google","neutral","126513115830554625","Wed Oct 19 04:20:51 +0000 2011","#Samsung Galaxy #Nexus hands-on (#video) http://t.co/w4muSKEm #android #galaxynexus #galaxynexus #google" +"google","neutral","126513110264717312","Wed Oct 19 04:20:50 +0000 2011","#Google announces #NFC-based #Android Beam for sharing between phones http://t.co/VonJhtjE #android40 #androidbeam" +"google","neutral","126512932933734402","Wed Oct 19 04:20:08 +0000 2011","#Android #Google First Official Galaxy Nexus / Ice Cream Sandwich Promo Video With Feature Highlights... http://t.co/LtrC34VG #DhilipSiva" +"google","neutral","126512842194161664","Wed Oct 19 04:19:46 +0000 2011","the announcement is in hong kong....... gotcha #icecreamsandwhich #android #galaxynexus #mynextphone #google #ics #droid" +"google","neutral","126512471845515264","Wed Oct 19 04:18:18 +0000 2011","Discover #Keywords & #Business Critical Information to be No 1 on #Google read aspenIbiz #blog post http://t.co/Rotiieo0" +"google","neutral","126512460629946370","Wed Oct 19 04:18:15 +0000 2011","Texas Instruments confirms Galaxy Nexus has newer OMAP4460 processor http://t.co/wqQV4lyu #google #googlenexus" +"google","neutral","126512277720535040","Wed Oct 19 04:17:32 +0000 2011","Samsung and #Google Announce Galaxy Nexus and #Android 4.0–Ice Cream Sandwich http://t.co/g6EOfUYY #news #android40" +"google","neutral","126512269847826432","Wed Oct 19 04:17:30 +0000 2011","Android 4.0 SDK now available #google #android http://t.co/P05YR6PC" +"google","neutral","126512265036959744","Wed Oct 19 04:17:29 +0000 2011","Android 4.0 SDK now available #google #android http://t.co/dB2Rzsu5 #neowin" +"google","neutral","126512261543116800","Wed Oct 19 04:17:28 +0000 2011","Android 4.0 SDK now available #google #android http://t.co/LzJmTGQ9" +"google","neutral","126512199857475585","Wed Oct 19 04:17:13 +0000 2011","#Google thinks your digital books belong on a digital #bookcase, digitally (video) http://t.co/Qtt22akr" +"google","neutral","126512130752126976","Wed Oct 19 04:16:57 +0000 2011","And my favorite, ""is @danielltosh gay?"" #Google #autocomplete" +"google","neutral","126511907569020928","Wed Oct 19 04:16:03 +0000 2011","How do you get pink eye? #Google #autocomplete" +"google","neutral","126511799947366401","Wed Oct 19 04:15:38 +0000 2011","#Google unveils Ice Cream Sandwich, SDK is available now http://t.co/JHGldej9" +"google","neutral","126511624256372736","Wed Oct 19 04:14:56 +0000 2011","RT @rishij_pune: #google must b having sum underlying reasoning 4 naming products after eatables #eclair #gingerbread #icecream #rawados ..." +"google","neutral","126511578777526273","Wed Oct 19 04:14:45 +0000 2011","#Android + #Google have all of the tools to reshape the market. I should probably take a break from all this #ICS talk. http://t.co/D6T2XYAD" +"google","neutral","126511507088478208","Wed Oct 19 04:14:28 +0000 2011","@LegiondaChosen1: Check out ""Big Hit feat. LegiondaChosen1 - I'm On 1 - Freestyle"" - http://t.co/IfxU5Za0 #Google Me!!" +"google","neutral","126511425760935936","Wed Oct 19 04:14:09 +0000 2011","On Google+ then go here http://t.co/5tbv933W #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126511400825790464","Wed Oct 19 04:14:03 +0000 2011","#google #icecream #galaxy #nexus http://t.co/gkoHBpN8" +"google","neutral","126511380365983744","Wed Oct 19 04:13:58 +0000 2011","RT @VentureBeat: #Google releases an Infinite Digital Bookcase http://t.co/gVbdhLiu by @MeghanKel" +"google","neutral","126511358861778944","Wed Oct 19 04:13:53 +0000 2011","Why are manhole covers round? #Google #autocomplete" +"google","neutral","126511243371610113","Wed Oct 19 04:13:25 +0000 2011","RT @NeowinFeed Google shows off new Ice Cream Sandwich features #google #android http://t.co/TAJFjSD9 #neowin" +"google","neutral","126510976358035456","Wed Oct 19 04:12:21 +0000 2011","China Machado, I was googling earlier the women that resembles my momma) #iphoneartists #conceptualart #google http://t.co/TZvjaQlI" +"google","neutral","126510915771301888","Wed Oct 19 04:12:07 +0000 2011","RT @Div_Spartan: Fellow Michiganders, do the right thing. Get online! #Google helping #Michigan businesses get online http://t.co/npz2KwmQ" +"google","neutral","126510815246422016","Wed Oct 19 04:11:43 +0000 2011","RT @jessvpeterson: Ever post something to the wrong circles? #google+" +"google","neutral","126510801803673600","Wed Oct 19 04:11:40 +0000 2011","#Samsung #google #nexus" +"google","neutral","126510781801046016","Wed Oct 19 04:11:35 +0000 2011","DAMN!!! #Galaxy #Nexus #Google #Android http://t.co/DL3cNtEU time for me to upgrade my Nexus S xD" +"google","neutral","126510731549085697","Wed Oct 19 04:11:23 +0000 2011","Anyone use #Google+?" +"google","neutral","126510402359140352","Wed Oct 19 04:10:05 +0000 2011","#Android #Google Need Proof of Verizon’s Galaxy Nexus? http://t.co/jcP7bkvg #DhilipSiva" +"google","neutral","126510393114898432","Wed Oct 19 04:10:02 +0000 2011","Galaxy Nexus #GalaxyNexus #Android #Google http://t.co/AWJHxwGT #followme" +"google","neutral","126510377558216704","Wed Oct 19 04:09:59 +0000 2011","Android Ice Cream Sandwich adds Face Unlock feature http://t.co/45HxZE0o #Tech #google" +"google","neutral","126510284536942592","Wed Oct 19 04:09:36 +0000 2011","#Google releases an Infinite Digital Bookcase http://t.co/12AuhZ6o by @MeghanKel" +"google","neutral","126510143541231616","Wed Oct 19 04:09:03 +0000 2011","#Google encrypts search data against hackers, marketers 'howl': http://t.co/nFwcQD9R | #divisions" +"google","neutral","126510049039368192","Wed Oct 19 04:08:40 +0000 2011","#Google Android Ice Cream Sandwich OS http://t.co/VhntGols #technology #androidicecream #nexusprime #sandwichos" +"google","neutral","126509936518762496","Wed Oct 19 04:08:13 +0000 2011","#Android #Google Video: Samsung Galaxy Nexus Announcement and Official Promo http://t.co/4ErExB4T #DhilipSiva" +"google","neutral","126509922505596928","Wed Oct 19 04:08:10 +0000 2011","#Android #Google ICS Has Completely Revamped Your Contacts Into The ""People"" app http://t.co/QDoGji0U #DhilipSiva" +"google","neutral","126509783665745920","Wed Oct 19 04:07:37 +0000 2011","http://t.co/pUvXwjjc Is it #Google #bookmark or #links that bring site #traffic blogspot http://t.co/KPdXWPEB wp http://t.co/31g0geqy" +"google","neutral","126509619819462657","Wed Oct 19 04:06:58 +0000 2011","#Facebook vs. #Google+ : Which #SocialMedia Platform is the Best for You? | @customerthink http://t.co/QqkviaAn" +"google","neutral","126509495907135489","Wed Oct 19 04:06:28 +0000 2011","Android 4.0 Ice Cream Sandwich Official, SDK Now Available http://t.co/9zUBrPE2 #tech #google" +"google","neutral","126509474935611392","Wed Oct 19 04:06:23 +0000 2011","How #Google Ventures Chooses Which Startups Get Its $200 Million http://t.co/FCWXoUd8 via @mashbusiness @mashable" +"google","neutral","126509381327134720","Wed Oct 19 04:06:01 +0000 2011","#goodtoknow #privacy #google http://t.co/Ye337YR6" +"google","neutral","126509283260108800","Wed Oct 19 04:05:38 +0000 2011","Samsung Galaxy Nexus: Android 4.0 Smartphone Unveiled : http://t.co/b5jmtr1L #android #google #mobile #news" +"google","neutral","126509226720903168","Wed Oct 19 04:05:24 +0000 2011","Samsung And Google Announce Galaxy Nexus http://t.co/I6r95xgK #samsung #galaxy #nexus #google #android #ICS #news #tech" +"google","neutral","126509135842914304","Wed Oct 19 04:05:03 +0000 2011","Newest version of my (and @latc214's) phone (and a new version of Android) announced today. http://t.co/7acjrKIN #nexus #google #samsung" +"google","neutral","126508885992415232","Wed Oct 19 04:04:03 +0000 2011","#Google's #Android 4.0 Development Plan: Make Android Prettier http://t.co/7X0OFPuq @paidcontent" +"google","neutral","126508842367463424","Wed Oct 19 04:03:53 +0000 2011","#Google springs #IceCreamSandwich on Samsung #GalaxyNexus smartphone. Android tablets soon? http://t.co/yyU5q0I7 (TabTimes)" +"google","neutral","126508789254979584","Wed Oct 19 04:03:40 +0000 2011","It tells you where to find your next drink. RT @RoycinD: What the fuck is a Barometer doing in a phone? Eh? #ICS #google #galaxynexus" +"google","neutral","126508753997668352","Wed Oct 19 04:03:32 +0000 2011","Good To Read: Google steps up its privacy game, launches Good To Know (ZDNet - Oct.18) #google http://t.co/qKthMjwo" +"google","neutral","126508642060083200","Wed Oct 19 04:03:05 +0000 2011","Thank you, Internet, for allowing me to be Bunny Watson for my friends. #google #imdb #allmusic" +"google","neutral","126508398522019840","Wed Oct 19 04:02:07 +0000 2011","#Google weiterhin auf der Erfolgsspur http://t.co/lf3RkTcJ" +"google","neutral","126508044187217920","Wed Oct 19 04:00:42 +0000 2011","Well we should knw that this isnt the app from #google unlike #Apple." +"google","neutral","126508037400825857","Wed Oct 19 04:00:41 +0000 2011","Samsung e o Google lançam hoje o Galaxy Nexus e o Android 4.0 http://t.co/PW4KRE0F #google #samsung" +"google","neutral","126508035416928256","Wed Oct 19 04:00:40 +0000 2011","RT @BatzayaB: #Google #Android Ice Cream Sandwich 4.0 (photos) http://t.co/XlJBYgIS" +"google","neutral","126507982543532034","Wed Oct 19 04:00:28 +0000 2011","#Google #Android Ice Cream Sandwich 4.0 (photos) http://t.co/iK3Iq8KR" +"google","neutral","126507925337411584","Wed Oct 19 04:00:14 +0000 2011","#Google #Galaxy #Nexus, Miam ! + +http://t.co/8UpG3BOg" +"google","neutral","126507911844339712","Wed Oct 19 04:00:11 +0000 2011","On Google+ then go here http://t.co/VS0UJozD #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126507657472393216","Wed Oct 19 03:59:10 +0000 2011","#Google Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/D7ntPaIt" +"google","neutral","126507418539671552","Wed Oct 19 03:58:13 +0000 2011","Google Galaxy Nexus http://t.co/vWpuA2gt #google #galaxynexus #android #ICS" +"google","neutral","126506926069645312","Wed Oct 19 03:56:16 +0000 2011","Some people should not post replies in #Google+ threads. Their posts only continue to weaken their creditbility." +"google","neutral","126506902145347584","Wed Oct 19 03:56:10 +0000 2011","#Android #Google Android 4.0 (Ice Cream Sandwich) SDK and ADT 14 Released http://t.co/wG9xoNkg #DhilipSiva" +"google","neutral","126506895124086784","Wed Oct 19 03:56:08 +0000 2011","#Android #Google Official Galaxy Nexus Site Goes Live, Highlights Ice Cream Sandwich And Galaxy Nexus... http://t.co/4ijvupvq #DhilipSiva" +"google","neutral","126506846499520512","Wed Oct 19 03:55:57 +0000 2011","Reading: Android 4.0 'Ice Cream Sandwich' Features Announced By Google (HuffPost- Oct.18) #google http://t.co/wmqNPcLV" +"google","neutral","126506795958153216","Wed Oct 19 03:55:45 +0000 2011","Galaxy Nexus #Android ICS barometer will undoubtedly be used to add 3D to #Google Maps." +"google","neutral","126506701259157504","Wed Oct 19 03:55:22 +0000 2011","#Google+" +"google","neutral","126506651271430144","Wed Oct 19 03:55:10 +0000 2011","@dwtalker that's some tough competition right there http://t.co/XP9LhjpR #TechWar #Google #Facebook #Apple #Amazon" +"google","neutral","126506310228377600","Wed Oct 19 03:53:49 +0000 2011","#Android #Google Calling all speed demons - Galaxy Nexus! http://t.co/kma7ZnAf #DhilipSiva" +"google","neutral","126506306201858049","Wed Oct 19 03:53:48 +0000 2011","#Android #Google Android Beam makes NFC for more than paying for things http://t.co/dxrbxjkB #DhilipSiva" +"google","neutral","126506273662447616","Wed Oct 19 03:53:40 +0000 2011","Calling all speed demons - Galaxy Nexus! http://t.co/7OyVuqdq #android #google" +"google","neutral","126506272152502272","Wed Oct 19 03:53:40 +0000 2011","Android Beam makes NFC for more than paying for things http://t.co/3wKMrcTL #android #google" +"google","neutral","126506232432439296","Wed Oct 19 03:53:30 +0000 2011","#Google Docs presentations get real-time collaboration, transition effects and more #SocialMedia #SMM http://t.co/ekdJXo5h" +"google","neutral","126505781553147904","Wed Oct 19 03:51:43 +0000 2011","Yeni Galaxy Nexus - http://t.co/QZcbA4Bx #google #galaxynexus" +"google","neutral","126505768299147264","Wed Oct 19 03:51:40 +0000 2011","www.google.com/googlenexus #google #android #icecreamsandwich #googlenexus" +"google","neutral","126505670446022656","Wed Oct 19 03:51:16 +0000 2011","RT @MikeMcCready: Don’t Cast Your Google Adwords Net Too Wide | Mike McCready http://t.co/ms06fehi #google #adwords" +"google","neutral","126505612690456576","Wed Oct 19 03:51:03 +0000 2011","#Google, #Samsung unveil Ice Cream Sandwich powered Galaxy Nexus.. #Android4.0" +"google","neutral","126505607116238848","Wed Oct 19 03:51:01 +0000 2011","I'm thinking seriously to make my own page in #wikipedia just to find myself in #google." +"google","neutral","126505469601775616","Wed Oct 19 03:50:28 +0000 2011","#Google is introducing #GalaxyNexus. You can unlock you phone by smile. Other then that....get an iPhone." +"google","neutral","126505424823402496","Wed Oct 19 03:50:18 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/9IvTJkit #Google" +"google","neutral","126505412068511745","Wed Oct 19 03:50:15 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/QoXd3I0s #google" +"google","neutral","126505392191705088","Wed Oct 19 03:50:10 +0000 2011","Update : Google Officially calls Android Ice Cream Sandwich, Shouts Android 4.0 http://t.co/zzRuwrSk #google #ics #android4" +"google","neutral","126505364307984384","Wed Oct 19 03:50:03 +0000 2011","116 new #jobs posting for #google http://t.co/XfG6l29D #jobely" +"google","neutral","126505359161573377","Wed Oct 19 03:50:02 +0000 2011","Is Google Analytics Reverse-Engineering Facebook?: http://t.co/mJHRoa7L #google #facebook #media #digitalstrategy" +"google","neutral","126505133587709953","Wed Oct 19 03:49:08 +0000 2011","RT “@Ant0ineH: Twitter Will Beat #Google+ And #Facebook With Simplicity- #Twitter CEO http://t.co/melkHdb7†Beat? You aren't competitors…" +"google","neutral","126505125853401088","Wed Oct 19 03:49:07 +0000 2011","RT @meko2301: #Google #Android #ICS #IceCreamSandwich" +"google","neutral","126505094664568833","Wed Oct 19 03:48:59 +0000 2011","who the hell is #ambercole?? i dont feel like i care enough to #google this bitch so ill ask you guys!" +"google","neutral","126505086041067520","Wed Oct 19 03:48:57 +0000 2011","Posting on #twitter about #google+ suggesting I add Tom from #myspace I think the universe just exploded. #justsayin" +"google","neutral","126504998799552513","Wed Oct 19 03:48:36 +0000 2011","#Pearson and #Google take advantage of the profound lack of LMS PD with their new offering http://t.co/4RsRDU4G Also do not do reporting" +"google","neutral","126504923687960576","Wed Oct 19 03:48:18 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/qukDb5lb #google #mobile" +"google","neutral","126504910844989440","Wed Oct 19 03:48:15 +0000 2011","#Google #Android #ICS #IceCreamSandwich" +"google","neutral","126504893354741760","Wed Oct 19 03:48:11 +0000 2011","http://t.co/pUvXwjjc How do #CMM & #BBC get #massive #hits? Is #Google @ work? blogspot http://t.co/KPdXWPEB wp http://t.co/31g0geqy" +"google","neutral","126504842469457922","Wed Oct 19 03:47:59 +0000 2011","Google Reveals the Details of Android OS 4.0, Samsung Galaxy Nexus http://t.co/KyxuNP8I #google #android #samsung" +"google","neutral","126504774047772672","Wed Oct 19 03:47:43 +0000 2011","#google must b having sum underlying reasoning 4 naming products after eatables #eclair #gingerbread #icecream #rawadosa #android" +"google","neutral","126504709036056576","Wed Oct 19 03:47:27 +0000 2011","Installing the Ice Cream Sandwich SDK #android #google" +"google","neutral","126504479595044864","Wed Oct 19 03:46:32 +0000 2011","nothing like saying 'screw you' to a bad real estate company on 7 different social pages! #google #citysearch #yellowpages #yelp #yahoo ..." +"google","neutral","126504427275300864","Wed Oct 19 03:46:20 +0000 2011","Cool Infographic: Perks working for the big techs like #Google & #Facebook | http://t.co/lNEgf7Kn" +"google","neutral","126504377862205441","Wed Oct 19 03:46:08 +0000 2011","On Google+ then go here http://t.co/K6BggvDx #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126504319859175424","Wed Oct 19 03:45:54 +0000 2011","#Google To Begin Encrypting Searches & Outbound Clicks By Default With SSL Search http://t.co/xW7vh75e" +"google","neutral","126504126665334784","Wed Oct 19 03:45:08 +0000 2011","#droidtweak #Video: #IceCreamSandwich on the #GalaxyNexus http://t.co/vBcl0OzP #googlephone #nexus1 #NexusS #google" +"google","neutral","126504115508494337","Wed Oct 19 03:45:06 +0000 2011","Galaxy Nexus – the first Ice Cream Sandwich phone coming to Verizon http://t.co/BiPvBdxz #android #google" +"google","neutral","126504077831045122","Wed Oct 19 03:44:57 +0000 2011","is oxycodone and nyquil a bad mix? lol .. I don't think so but.. #ineedadoctor and I'm to lazy to use #google" +"google","neutral","126503993768804352","Wed Oct 19 03:44:37 +0000 2011","Big Money... http://t.co/Ms6AwiX4 #seo #search #google #hack #sem #it #business #web #marketing #online #yes #hot #winning" +"google","neutral","126503947560165376","Wed Oct 19 03:44:26 +0000 2011","RT @RUILIFESTYLE: #GOOGLE THE MIXTAPE http://t.co/v8zTtNVV" +"google","neutral","126503706760974337","Wed Oct 19 03:43:28 +0000 2011","#Android #Google Meet the Samsung Galaxy Nexus and Ice Cream Sandwich http://t.co/2VWXzReW #DhilipSiva" +"google","neutral","126503703514578944","Wed Oct 19 03:43:27 +0000 2011","#Samsung Galaxy Nexus is now confirmed http://t.co/v4LR34p3 #googlenews #mobile #android40icecreamsandwich #google" +"google","neutral","126503693355982849","Wed Oct 19 03:43:25 +0000 2011","#Android #Google Android 4.0 SDK now available http://t.co/HI1cQ2si #DhilipSiva" +"google","neutral","126503688595447810","Wed Oct 19 03:43:24 +0000 2011","#Android #Google Samsung Galaxy Nexus release details still in short supply http://t.co/s6A7Y9WT #DhilipSiva" +"google","neutral","126503683142852608","Wed Oct 19 03:43:23 +0000 2011","#Android #Google Samsung Galaxy Nexus hands-on roundup http://t.co/RE1EnpaU #DhilipSiva" +"google","neutral","126503627706732544","Wed Oct 19 03:43:09 +0000 2011","Don’t Cast Your Google Adwords Net Too Wide | Mike McCready http://t.co/ms06fehi #google #adwords" +"google","neutral","126503364103118848","Wed Oct 19 03:42:06 +0000 2011","Unwrapping Ice Cream Sandwich on the Galaxy Nexus http://t.co/TCKtgpBH #Google" +"google","neutral","126503349657935872","Wed Oct 19 03:42:03 +0000 2011","Google details Android 4.0 Ice Cream Sandwich, offers SDK http://t.co/mShfEQe6 #Android #Google" +"google","neutral","126503348613558272","Wed Oct 19 03:42:03 +0000 2011","Google details Android 4.0 Ice Cream Sandwich, offers SDK http://t.co/66fHUs6n #Android #Google" +"google","neutral","126503090487705601","Wed Oct 19 03:41:01 +0000 2011","Dennis Goedegebuure’s Thoughts on Changes in #Google Analytics: I contacted Dennis and asked… http://t.co/okCwNrpq" +"google","neutral","126503088461848576","Wed Oct 19 03:41:01 +0000 2011","Article Recap on the #Google Analytics ""Secure"" Change: Here are some articles we found after… http://t.co/RPTLWMLo" +"google","neutral","126502770130960384","Wed Oct 19 03:39:45 +0000 2011","#Google releases an Infinite Digital Bookcase http://t.co/FbvZq8Yp by @MeghanKel" +"google","neutral","126502761608122368","Wed Oct 19 03:39:43 +0000 2011","Check out "" @Legiondachosen1 ft. Big Hit - My Life's A Movie (snippet) - Prod. by @LegiondaChosen1"" - http://t.co/XjTCc7j7 #Google Me!!" +"google","neutral","126502730264088576","Wed Oct 19 03:39:35 +0000 2011","Massive Galaxy Nexus/Ice Cream Sandwich Recap. - http://t.co/I8SW5Udz #galaxynexus #Google #ics #Samsung" +"google","neutral","126502630578069504","Wed Oct 19 03:39:12 +0000 2011","#Android #Google Three UK to carry Samsung Galaxy Nexus http://t.co/Lt4m2HH4 #DhilipSiva" +"google","neutral","126502626085969920","Wed Oct 19 03:39:11 +0000 2011","#Android #Google Google updates Nexus site with Galaxy Nexus details http://t.co/BeckhmA1 #DhilipSiva" +"google","neutral","126502616128684032","Wed Oct 19 03:39:08 +0000 2011","Three UK to carry Samsung Galaxy Nexus http://t.co/SwvAcbxC #android #google" +"google","neutral","126502614086070273","Wed Oct 19 03:39:08 +0000 2011","Google updates Nexus site with Galaxy Nexus details http://t.co/LaSn4ol0 #android #google" +"google","neutral","126502326356815872","Wed Oct 19 03:37:59 +0000 2011","RT @tatn: #Google, #Samsung unveil Ice Cream Sandwich-powered Galaxy Nexus http://t.co/uY7KWJiY via @CNET #Android" +"google","neutral","126502296916987904","Wed Oct 19 03:37:52 +0000 2011","#Google, #Samsung Add ’Ice Cream Sandwich’ Phone http://t.co/oOdugpqG $AAPL $GOOG #hitech #gadgets #mkt #digital" +"google","neutral","126502291799945217","Wed Oct 19 03:37:51 +0000 2011","#google+ ...thoughts? to sign up or not to sign up, that is the question..." +"google","neutral","126502250041454593","Wed Oct 19 03:37:41 +0000 2011","#Google Ice Cream Sandwich, Nexus Prime Launch [LIVE BLOG] http://t.co/YD4FBog3 #uncategorized #android" +"google","neutral","126502128737976321","Wed Oct 19 03:37:12 +0000 2011","#Google has been released #Android 4.0 platform http://t.co/mvdCD0v8" +"google","neutral","126502101944778752","Wed Oct 19 03:37:06 +0000 2011","#Google Defaults to Encrypted HTTPS #Searches for Logged In Users [#Security] http://t.co/kzMZDxmE" +"google","neutral","126502100308996097","Wed Oct 19 03:37:05 +0000 2011","#Android #Google Samsung's Galaxy Nexus - It's Official, It's Headed For Global Availability, And It's... http://t.co/TI9Hy8LW #DhilipSiva" +"google","neutral","126501988132327425","Wed Oct 19 03:36:38 +0000 2011","Personal-finance #security #startup @BillGuard raises $10mn from #KhoslaVentures, Peter Thiel’s @FoundersFund n #Google http://t.co/UdlOQFsA" +"google","neutral","126501882331017216","Wed Oct 19 03:36:13 +0000 2011","#google just invented #mango taste #icecream. even the #roboto is so #segoe. but it's ok. every oem will have to pay #ms anyway... LOL." +"google","neutral","126501647378690048","Wed Oct 19 03:35:17 +0000 2011","Embed Google Calendar To Your Wordpress Post: http://t.co/ah8km2JF #wordpress #google" +"google","neutral","126501582274707457","Wed Oct 19 03:35:02 +0000 2011","#Google To Begin Encrypting Searches & Outbound Clicks By Default With SSL Search http://t.co/bf5zVm0U @searchengineland" +"google","neutral","126501535160074240","Wed Oct 19 03:34:50 +0000 2011","#samsung #google #galaxynexus We got the lowdown after the Hong Kong press release, check it out: http://t.co/OWhYR0YJ" +"google","neutral","126501463752060928","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/lfpPk5Yk" +"google","neutral","126501463726899200","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/qD1bqVm8" +"google","neutral","126501463672369152","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/eizExUn0" +"google","neutral","126501463663976448","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/DGGsNOh2" +"google","neutral","126501463659790337","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/XPGHLC2o" +"google","neutral","126501463626223616","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/rUkTyUJC" +"google","neutral","126501463554924544","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/3GC0UEPM" +"google","neutral","126501463529754624","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/STIfQWTU" +"google","neutral","126501463487815680","Wed Oct 19 03:34:33 +0000 2011","#google #IceCreamSandwich #Samsung #Updates #Android | +Galaxy Nexus with Ice Cream Sandwich: pictures http://t.co/8fiwDhcC" +"google","neutral","126501428897382400","Wed Oct 19 03:34:25 +0000 2011","#SEO Tip:Redirect http://t.co/IWeHzOqX to http://t.co/YQYNjBdy by using 301 redirection. #google #yahoo #bing #web" +"google","neutral","126501392163684353","Wed Oct 19 03:34:16 +0000 2011","#Google #Cloud Epson Artisan 837: According to Epson you can also upgrade the printer to support Google Clo... http://t.co/D6gFyji3 #TCN" +"google","neutral","126501360089825280","Wed Oct 19 03:34:09 +0000 2011","Chrome Experiment - WebGL Bookcase - http://t.co/1GNsOwmu #Google" +"google","neutral","126501176559677441","Wed Oct 19 03:33:25 +0000 2011","Word of Mouth and the Internet - YouTube http://t.co/UCD9sDx4 #google #searching #wom" +"google","neutral","126501155999203328","Wed Oct 19 03:33:20 +0000 2011","@AtlantaSnoop yea I've seen my #location waaaay off on #google sites. Not a big deal but still strange. #spookygoogle" +"google","neutral","126500867280093184","Wed Oct 19 03:32:11 +0000 2011","#Android #Google Galaxy Nexus Site is Live: Register and Relive the New Features http://t.co/7llRSpUO #DhilipSiva" +"google","neutral","126500856026771456","Wed Oct 19 03:32:09 +0000 2011","On Google+ then go here http://t.co/iLjdQBpT #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126500659833995265","Wed Oct 19 03:31:22 +0000 2011","#Google & Samsung announced their (hopeful) rebuttal to iPhone 4S tonight... site shows nice specs but no carriers yet. http://t.co/Z8SDgUT9" +"google","neutral","126500532478148610","Wed Oct 19 03:30:51 +0000 2011","RT @GWGoddess: We could have told you this two years ago! http://t.co/6BZ6W7cf @Novell #GroupWise #Google" +"google","neutral","126500276994711553","Wed Oct 19 03:29:50 +0000 2011","@Affan Fact! They still do everything better though #google" +"google","neutral","126500259974234112","Wed Oct 19 03:29:46 +0000 2011","@jpobrien11 or windows bridge for windows phones and don't try and pretend apple didn't steal the notification bar from #google #droidtweets" +"google","neutral","126500105762250752","Wed Oct 19 03:29:10 +0000 2011","What could a bookcase look like in 10 years...maybe this? http://t.co/i6YWQ7oR #google #books" +"google","neutral","126499963176886272","Wed Oct 19 03:28:36 +0000 2011","And thanks to TWIT's AAA crew for the live coverage of the #Google / Samsung announcement!" +"google","neutral","126499722805522433","Wed Oct 19 03:27:38 +0000 2011","Check%RT @CadientGroup: $6% of women use #Google for info on health care vs 28% of men -> http://t.co/QEfhPgX2 (via @nicolaziady) #hcsmeu" +"google","neutral","126499712164560896","Wed Oct 19 03:27:36 +0000 2011","Infinite digital dusting RT @VentureBeat: #Google releases an Infinite Digital Bookcase http://t.co/8m7gN6iN" +"google","neutral","126499577443532801","Wed Oct 19 03:27:04 +0000 2011","The competition: #Google introduces #Android 4.0 Ice Cream Sandwich and the Galaxy Nexus http://t.co/PCu88z7j" +"google","neutral","126499521822867458","Wed Oct 19 03:26:50 +0000 2011","Hide the women and children, break out the guns, #Google is going to encrypt your searches by default! http://t.co/2VbCGLgp #SEO" +"google","neutral","126499346022805504","Wed Oct 19 03:26:08 +0000 2011","#Android #Google Android 4.0 Ice Cream Sandwich SDK is available today http://t.co/mTnbj9hD #DhilipSiva" +"google","neutral","126499314393546752","Wed Oct 19 03:26:01 +0000 2011","RT @BrightSideNews: @Google and @Samsung Announce the #Galaxy #Nexus http://t.co/dZHvFxVh #Android #ICS #Icecreamsandwich #Google #Samsung" +"google","neutral","126499307284217856","Wed Oct 19 03:25:59 +0000 2011","So #Google #Droid is built (in part) using the bouncy castle C# api. C# is a Microsoft programming language. +1 for all you #Linux Fanboys" +"google","neutral","126499293895987201","Wed Oct 19 03:25:56 +0000 2011","My thoughts on tonight's #Google Ice Cream Sandwich and #Samsung Galaxy Nexus talk: http://t.co/gu7ScKXL" +"google","neutral","126499217807122433","Wed Oct 19 03:25:38 +0000 2011","#Galaxy #Nexus Officially Announced At Hong Kong Event http://t.co/wMy6LoCd #google" +"google","neutral","126498840831475712","Wed Oct 19 03:24:08 +0000 2011","#Android #Google Ice Cream Sandwich SDK now available http://t.co/K0tksZ1O #DhilipSiva" +"google","neutral","126498825992019969","Wed Oct 19 03:24:05 +0000 2011","Ice Cream Sandwich SDK now available http://t.co/ZOgECNER #android #google" +"google","neutral","126498759143211008","Wed Oct 19 03:23:49 +0000 2011","@csg122 http://t.co/TBxLrvin Hopefully you check twitter often enough to find this article. :D #google #ICS" +"google","neutral","126498660811935744","Wed Oct 19 03:23:25 +0000 2011","Why is everyone hating on #Android #IceCreamSandwich? #ICS #everyoneisacritic #os #google" +"google","neutral","126498593770184704","Wed Oct 19 03:23:09 +0000 2011","#Android #Google Galaxy Nexus Press Release http://t.co/JKrzYGBO #DhilipSiva" +"google","neutral","126498481534808064","Wed Oct 19 03:22:42 +0000 2011","engineers #google Manny Marroquin #MannyMarroquin" +"google","neutral","126498259228303360","Wed Oct 19 03:21:49 +0000 2011","@Google and @Samsung Announce the #Galaxy #Nexus http://t.co/dZHvFxVh #Android #ICS #Icecreamsandwich #Google #Samsung" +"google","neutral","126498101124005889","Wed Oct 19 03:21:12 +0000 2011","Will the #Galaxy #Nexus be coming to #Sprint? No mention of price, carriers, or source release for ICS. Give us those info #Google!" +"google","neutral","126497969972330497","Wed Oct 19 03:20:40 +0000 2011","RT @aalkhubaizi: Introducing Android 4.0, Ice Cream SandwichIntroducing Android 4.0, Ice Cream Sandwich http://t.co/L4Hqkv0c #Google #An ..." +"google","neutral","126497822009860096","Wed Oct 19 03:20:05 +0000 2011","#Advertising #Blog Today's blog covers the #IGen and #Google's ""Parisian Love"" Debating with @smarch323 for your votes! http://t.co/x3JY4b2L" +"google","neutral","126497642493648897","Wed Oct 19 03:19:22 +0000 2011","RT @DarkoIvancevic: How #Google Ventures Chooses Which Startups Get Its $200 Million http://t.co/9WDpnVDR" +"google","neutral","126497423815213056","Wed Oct 19 03:18:30 +0000 2011","Siri's Search Capabilities vs Google Voice Search on Android - Tested http://t.co/YKiFTufX #apple #google #voip" +"google","neutral","126497346870718464","Wed Oct 19 03:18:12 +0000 2011","#Google announces #NFC-based #Android Beam for sharing between phones http://t.co/5PAI7r8y" +"google","neutral","126497339933327360","Wed Oct 19 03:18:10 +0000 2011","#Google announces #NFC-based #Android Beam for sharing between phones http://t.co/zYKEr3ax via @engadget" +"google","neutral","126497339073495040","Wed Oct 19 03:18:10 +0000 2011","#Android #Google Android 4.0 Ice Cream Sandwich Official, SDK Now Available http://t.co/rXuPIW2U #DhilipSiva" +"google","neutral","126497335831306240","Wed Oct 19 03:18:09 +0000 2011","On Google+ then go here http://t.co/kDHaUfDl #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126497288301457408","Wed Oct 19 03:17:58 +0000 2011","Samsung Galaxy Nexus - the new official Google smartphone with Android 4.0 ""Ice-Cream Sandwich"" http://t.co/S8hSL06q #Google #Android" +"google","neutral","126497256382803968","Wed Oct 19 03:17:50 +0000 2011","#Google releases the latest version of #Android 4.0 #IceCreamSandwich http://t.co/fB7y5E1e" +"google","neutral","126497160345829376","Wed Oct 19 03:17:27 +0000 2011","Also, I maybe missing something here. But I thought #Google bought #Motorola Mobile? So, what's with all the #Samsung Love??" +"google","neutral","126497100866387969","Wed Oct 19 03:17:13 +0000 2011","ahahaa for the twitpic... the power of #google" +"google","neutral","126497008197435392","Wed Oct 19 03:16:51 +0000 2011","Google unveils Ice Cream Sandwich, SDK is available now http://t.co/901Ay8Qr via @talkandroid #ics #google #android #samsung #galaxynexus" +"google","neutral","126496951746301952","Wed Oct 19 03:16:38 +0000 2011","RT @whymicrosoft: Google’s Ad revenue for Q3 was 96% of total revenue. So, are #Google customers the commodity being sold to advertisers?" +"google","neutral","126496930204352512","Wed Oct 19 03:16:33 +0000 2011","#Google #Samsung #GalaxyNexus http://t.co/1C5iEpPk" +"google","neutral","126496891797118976","Wed Oct 19 03:16:23 +0000 2011","RT @mashable #Google Launches #Android Ice Cream Sandwich OS http://t.co/zxfeb0ld" +"google","neutral","126496853742198784","Wed Oct 19 03:16:14 +0000 2011","RT @mcala: #windowsphone Metro style has obviously impressed somebody at #Google. #imitation #flattery #Android 4.0" +"google","neutral","126496835933179904","Wed Oct 19 03:16:10 +0000 2011","#Android #Google Galaxy Nexus and Ice Cream Sandwich Promo Video Released, Verizon Spotted in... http://t.co/1niTrTzu #DhilipSiva" +"google","neutral","126496835920609280","Wed Oct 19 03:16:10 +0000 2011","#Google announces #NFC-based #Android Beam for sharing between phones http://t.co/dduu9ffd" +"google","neutral","126496802634612736","Wed Oct 19 03:16:02 +0000 2011","RT @dalmaer: WebGL infinite bookcase UI http://t.co/bvu1qtkT #google" +"google","neutral","126496726143086592","Wed Oct 19 03:15:44 +0000 2011","#TeamGoogleNexus RT @B__Y #Google + #Samsung = Perfect #Icecream sandwich #GalaxyNexus" +"google","neutral","126496688163655680","Wed Oct 19 03:15:35 +0000 2011","#Google does away with #Buzz - http://t.co/uqMIQbBT #SearchEngine #Search #SocialNetworking #Social #Network" +"google","neutral","126496634120056832","Wed Oct 19 03:15:22 +0000 2011","Add me to your circles on #Google+" +"google","neutral","126496571541045248","Wed Oct 19 03:15:07 +0000 2011","HNews: Google, Samsung unveil Ice Cream Sandwich-powered Galaxy Nexus http://t.co/elh77r4p #google #.net" +"google","neutral","126496553811709952","Wed Oct 19 03:15:03 +0000 2011","#Google Buzz to the junkyard along with four other services including Jaiku http://t.co/XzxMHuuu > liked Jaiku" +"google","neutral","126496553748803586","Wed Oct 19 03:15:03 +0000 2011","#Google Buzz to the junkyard along with four other services including Jaiku http://t.co/lm79ZlWA > liked Jaiku" +"google","neutral","126496493648609280","Wed Oct 19 03:14:48 +0000 2011","#Google (and #Twitter ) get ready for the debut of #IceCreamSandwich http://t.co/T36KZ6gf" +"google","neutral","126496395132796929","Wed Oct 19 03:14:25 +0000 2011","#SEO Tip:Examine #Google #Analytics reports frequently.Check the conversion and bounce rate+analyze traffic sources" +"google","neutral","126496303738912769","Wed Oct 19 03:14:03 +0000 2011","3 new stuffs today, #Motorola #razr, #Samsung #Galaxy #Nexus, #Google #Android 4.0 #IceCreamSandwich #ICS ~" +"google","neutral","126496230984519680","Wed Oct 19 03:13:46 +0000 2011","Ever post something to the wrong circles? #google+" +"google","neutral","126496228572790784","Wed Oct 19 03:13:45 +0000 2011","Introducing Android 4.0, Ice Cream SandwichIntroducing Android 4.0, Ice Cream Sandwich http://t.co/L4Hqkv0c #Google #Android" +"google","neutral","126496131554344960","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/rUkTyUJC" +"google","neutral","126496131545960449","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/STIfQWTU" +"google","neutral","126496131529183232","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/3GC0UEPM" +"google","neutral","126496131441102848","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/qD1bqVm8" +"google","neutral","126496131411742721","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/eizExUn0" +"google","neutral","126496131382382592","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/XPGHLC2o" +"google","neutral","126496131378188289","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/lfpPk5Yk" +"google","neutral","126496131374006272","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/DGGsNOh2" +"google","neutral","126496131353018368","Wed Oct 19 03:13:22 +0000 2011","Calling All #IceCreamSandwich Lovers | #Google & #Samsung Like in Hong Kong - A new look at what's new from Android http://t.co/8fiwDhcC" +"google","neutral","126496068467822593","Wed Oct 19 03:13:07 +0000 2011","#Google, #Samsung unveil Ice Cream Sandwich-powered Galaxy Nexus http://t.co/qtiCdIvd via @CNET #Android" +"google","neutral","126495741014315008","Wed Oct 19 03:11:49 +0000 2011","Motorola Droid Razr vs. Samsung Galaxy Nexus (chart) - CNET (blog) http://t.co/oRkTnT7R #nexus #one #google" +"google","neutral","126495706356789248","Wed Oct 19 03:11:41 +0000 2011","""#Google, Samsung unveil Ice Cream Sandwich-powered #Galaxy #Nexus"" - http://t.co/r8f0jmGY" +"google","neutral","126495690636529664","Wed Oct 19 03:11:37 +0000 2011","#Microsoft's Ballmer: We’re beating #Google in the #cloud http://t.co/XIq3v54f" +"google","neutral","126495653747634176","Wed Oct 19 03:11:29 +0000 2011","Guess what I spotted a couple days ago... didn't expect to see one especially in the West Village #Google http://t.co/Xop25kvU" +"google","neutral","126495340026273792","Wed Oct 19 03:10:13 +0000 2011","#Google officially announces Ice Cream Sandwich http://t.co/8U9XMNz9 #smartphones #tablets #news #android" +"google","neutral","126495319792959488","Wed Oct 19 03:10:09 +0000 2011","20% searches on #Google are related to location. 33% mobile search queries have local intent." +"google","neutral","126495268538560512","Wed Oct 19 03:09:56 +0000 2011","RT @ChWilhelm: #Google Wallet adds #coupons, rewards, and more #retail partners http://t.co/1s3wWs54 #li" +"google","neutral","126495169536208896","Wed Oct 19 03:09:33 +0000 2011","@OccupyWallSt #Apple stock DOWN despite #iPhone monumental SALES = #ocuupy #GOOGLE & #Verizon--i smell a #conspiracy!" +"google","neutral","126495125303083009","Wed Oct 19 03:09:22 +0000 2011","#Google announces #Android #Beam for sharing between Android Phones. Based on NFC technology. http://t.co/bc7yhhR4" +"google","neutral","126495072102522880","Wed Oct 19 03:09:10 +0000 2011","How to Create your own Google Maps http://t.co/yyNzSP8J #google #maps" +"google","neutral","126495001868906496","Wed Oct 19 03:08:53 +0000 2011","Sucks that I just threw down $3 for a panorama app when #ICS will have it as standard. Oh #Google, you pleasant bastards, you!" +"google","neutral","126494909548077056","Wed Oct 19 03:08:31 +0000 2011","It Is Indeed #Apple & #Google vs #TheOldWay!" +"google","neutral","126494729474015232","Wed Oct 19 03:07:48 +0000 2011","@Ayy_Queen #google it lol espn too" +"google","neutral","126494691016441857","Wed Oct 19 03:07:39 +0000 2011","The better question is if (and when) current devices will get the update. #Android #Google #IceCreamSandwich" +"google","neutral","126494645403398145","Wed Oct 19 03:07:28 +0000 2011","I just noticed that #google+ has introduced real time search." +"google","neutral","126494532857643008","Wed Oct 19 03:07:01 +0000 2011","$6% of women use #Google for info on health care vs 28% of men -> http://t.co/PM6wxvQb (via @nicolaziady) #hcsmeu #epharma" +"google","neutral","126494358508806144","Wed Oct 19 03:06:19 +0000 2011","Just Been To The Future And Back lol... Just Watch Something Happening Tomorrow Morning! #Google #Android" +"google","neutral","126494314057568256","Wed Oct 19 03:06:09 +0000 2011","Thank you for the laughs #Google, ""tapping my peepz"", white suit, face unlock fail, using an LED screen for a video cast. Love it anyway <3" +"google","neutral","126494270650724353","Wed Oct 19 03:05:58 +0000 2011","The Great Tech War Of 2012 - http://t.co/OY8PuVhP #Apple #Facebook #Google #Amazon" +"google","neutral","126494239143116801","Wed Oct 19 03:05:51 +0000 2011","Does Ice Cream Sandwich have a ""private browsing mode"" for, you know, ""me time?"" #google #android" +"google","neutral","126494171065364480","Wed Oct 19 03:05:35 +0000 2011","Soooooo @VerizonWireless... All eyes on you! When can we order? #Nexus #Samsung #Google #ICS" +"google","neutral","126494094276042752","Wed Oct 19 03:05:16 +0000 2011","Android Beam, panorama, Gmail offline, lockscreen, contacts, facial recognition unlock... #IceCreamSandwich #NexusPrime #Google" +"google","neutral","126493962499391488","Wed Oct 19 03:04:45 +0000 2011","Looking at some of the new features of #ICS it looks like #Google got some inspiration from #CyanogenMod." +"google","neutral","126493954429550593","Wed Oct 19 03:04:43 +0000 2011","All the info on Ice Cream Sandwich here : http://t.co/f0rNfGPA @twandroid #google #nexusprime #ics #android" +"google","neutral","126493895625424896","Wed Oct 19 03:04:29 +0000 2011","WebGL infinite bookcase UI http://t.co/bvu1qtkT #google" +"google","neutral","126493811638677504","Wed Oct 19 03:04:09 +0000 2011","RT @charmybird Soon. Getting ready for the party. #google #android #icecreamsandwich http://t.co/puMwmM9m" +"google","neutral","126493785533321216","Wed Oct 19 03:04:03 +0000 2011","RT @pretty_browniee: #oomf gone get it next week i promise! Man you better #Google tht!!!" +"google","neutral","126493567182061569","Wed Oct 19 03:03:11 +0000 2011","@AndroidPolice Duly noted -- I hope AOSP is updated with ICS 4.0. #Android #Google" +"google","neutral","126493558365630465","Wed Oct 19 03:03:09 +0000 2011","is #google the next innovators ?" +"google","neutral","126493553055633408","Wed Oct 19 03:03:07 +0000 2011","On Google+ then go here http://t.co/hhAmwm8K #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126493550618742784","Wed Oct 19 03:03:07 +0000 2011","#Android #Google Android 4.0 Platform and Updated SDK Tools http://t.co/0jRKtQ2S #DhilipSiva" +"google","neutral","126493370028797953","Wed Oct 19 03:02:24 +0000 2011","Recent study of #Google CTR reveals that top three positions #in #GoogleSearch receive 35% of traffic. @Econsultancy http://t.co/Z2bh6oH1" +"google","neutral","126493352370765824","Wed Oct 19 03:02:19 +0000 2011","#Google Ice Cream Sandwich, Nexus Prime Launch [LIVE BLOG] http://t.co/kj2y69HM" +"google","neutral","126493335518068736","Wed Oct 19 03:02:15 +0000 2011","RT @charlieroffe: Welcome Ice Cream Sandwich! #android #google #icecreamsandwich http://t.co/IFlB5EhJ" +"google","neutral","126493327540502528","Wed Oct 19 03:02:14 +0000 2011","Amazes me, I set up a #GMail address and have yet to use it even once and it's getting a ton of spam #Google" +"google","neutral","126493322268250114","Wed Oct 19 03:02:12 +0000 2011","Haven't even freshened up, due to #Google's #IceCreamSandwichEvent. #OffToShower now." +"google","neutral","126493312650719232","Wed Oct 19 03:02:10 +0000 2011","The Ice Cream Sandwich presentation was very buggy, however 4.0 looks much more promising than expected. #Android #GalaxyNexus #Google" +"google","neutral","126493280832724992","Wed Oct 19 03:02:02 +0000 2011","Recent study of #Google CTR reveals that top three positions in #GoogleSearch receive 35% of traffic.... http://t.co/IHts3PyW" +"google","neutral","126493225811841024","Wed Oct 19 03:01:49 +0000 2011","Hey, #google! You lied. #android 4.0 platform is not up as a link yet. But I found it. http://t.co/v5WG698V #ics #sdk" +"google","neutral","126493189254291457","Wed Oct 19 03:01:41 +0000 2011","#google #yahoo #bing Ballmer on Bing, Xbox, Apple...oh, and Yahoo, too http://t.co/CfHu4jBP #seo #serps" +"google","neutral","126493174477758464","Wed Oct 19 03:01:37 +0000 2011","So is the #galaxynexus coming to @sprint any info @android #google #android" +"google","neutral","126493155855052800","Wed Oct 19 03:01:33 +0000 2011","#Google Google Event: Vorstellung des Samsung Galaxy Nexus (Caschys Blog): o3:30 klingelte mein Wecker. Googles ... http://t.co/QnVDVcMX" +"google","neutral","126493015907897344","Wed Oct 19 03:00:59 +0000 2011","That was good but when is it coming out? What carriers? What pricepoints? #android #google" +"google","neutral","126492985834733568","Wed Oct 19 03:00:52 +0000 2011","Welcome Ice Cream Sandwich! #android #google #icecreamsandwich http://t.co/IFlB5EhJ" +"google","neutral","126492945057718272","Wed Oct 19 03:00:42 +0000 2011","@YoungBasedSonic #Google" +"google","neutral","126492886517809152","Wed Oct 19 03:00:28 +0000 2011","RT @tiffanyk: [#Infographic] #Google+ cost half a billion to build, driving users to #1 most followed user: Mark Zuckerberg. http://t.co ..." +"google","neutral","126492864174755841","Wed Oct 19 03:00:23 +0000 2011","#ai #google #ai #samsung" +"google","neutral","126492839998783488","Wed Oct 19 03:00:17 +0000 2011","What? No ""one more thing!"" ? #Samsung #Google #Nexus" +"google","neutral","126492795027456000","Wed Oct 19 03:00:07 +0000 2011","For all the bookworms out there! http://t.co/Mx2bMy3o #Google’s Infinite Bookcase: An Abstract Browser For Limitless Libraries #technews" +"google","neutral","126492775930798080","Wed Oct 19 03:00:02 +0000 2011","Can Bill Gross Take On #Facebook, #Twitter & #Google+ With Chime.in? - Forbes http://t.co/pG5uHrmI #socialmedia" +"google","neutral","126492775842713600","Wed Oct 19 03:00:02 +0000 2011","RT @NeowinFeed: Ice Cream Sandwich to stop carriers bullying smartphone users #google #android http://t.co/HHfJbBHU #neowin" +"google","neutral","126492759262633984","Wed Oct 19 02:59:58 +0000 2011","Just wonder how much time it will take throw an update of #android ICS on Nexus S !!! Please #google make it quick this time" +"google","neutral","126492737766834177","Wed Oct 19 02:59:53 +0000 2011","Ice Cream Sandwich to stop carriers bullying smartphone users #google #android http://t.co/a0wncHmI" +"google","neutral","126492735598366720","Wed Oct 19 02:59:52 +0000 2011","Ice Cream Sandwich to stop carriers bullying smartphone users #google #android http://t.co/HHfJbBHU #neowin" +"google","neutral","126492726601584640","Wed Oct 19 02:59:50 +0000 2011","How much house can i afford #Google" +"google","neutral","126492719987179520","Wed Oct 19 02:59:49 +0000 2011","#google #icecreamsandwich event featured #minecraft" +"google","neutral","126492692984238080","Wed Oct 19 02:59:42 +0000 2011","#Android Ice Cream Sandwich #Gmail reminds me a lot of #iOS mail app #CopyAndPaste #Google" +"google","neutral","126492604799004672","Wed Oct 19 02:59:21 +0000 2011","Samsung Galaxy Nexus Official, Launches Worldwide November - http://t.co/UX1CiB6A #galaxynexus #samsung #google #ics" +"google","neutral","126492471759867904","Wed Oct 19 02:58:50 +0000 2011","@eboyee S tim da http://t.co/hX21e1JK daje Server Error :) #Google #IceCreamSandwich" +"google","neutral","126492440914956290","Wed Oct 19 02:58:42 +0000 2011","Engadget following #android #galaxy #nexus event #google #samsung #ICS http://t.co/vZx25ht8" +"google","neutral","126492364138221569","Wed Oct 19 02:58:24 +0000 2011","RT: Collobrate and present. New version of Google presentations. http://t.co/xmF34T27 VIA @googledownunder #google #googlepresentations" +"google","neutral","126492325219278848","Wed Oct 19 02:58:15 +0000 2011","Sdk available now, so you can develop apps for the phone and tablets right now. #Android4.0 #Google" +"google","neutral","126492310866362368","Wed Oct 19 02:58:11 +0000 2011","Great hardware and great new version of Android. I want it now. API is available now at least. #Google #Android #ICS #Development" +"google","neutral","126492274766004226","Wed Oct 19 02:58:03 +0000 2011","@notch Minecraft Mobile was just mentioned by #Google while they were demoing ICS on the Nexus Prime!" +"google","neutral","126492105026703360","Wed Oct 19 02:57:22 +0000 2011","#Google unveils Android 4.0 ‘Ice Cream Sandwich’ http://t.co/dciffpI1 via @BGR" +"google","neutral","126492088270454785","Wed Oct 19 02:57:18 +0000 2011","Hey #google ,when unveiling a new product,use a backdrop w/ lots of spatial-temporal high resolution activity. Video codecs will love it. ;)" +"google","neutral","126492059308785664","Wed Oct 19 02:57:11 +0000 2011","Android beam, share any piece of information.from.one Android device to another by simply touching the devices. #Android4.0 #Google" +"google","neutral","126491876860755969","Wed Oct 19 02:56:28 +0000 2011","New Galaxy Nexus: App Improvements - Beam - sharing using NFC #nexus #samsung #google #android bit.ly/nEJbyE" +"google","neutral","126491752260575232","Wed Oct 19 02:55:58 +0000 2011","@jcmwright we are a going over to #Google right now, and we are in Maryland (#MICA). Catch up with us this week sometime #edu11" +"google","neutral","126491726276853760","Wed Oct 19 02:55:52 +0000 2011","Mashable! - Google Ice Cream Sandwich, Nexus Prime Launch [LIVE BLOG] #google #android #ice http://t.co/jtE7VuDK" +"google","neutral","126491658341715969","Wed Oct 19 02:55:36 +0000 2011","The new #google phone. Yoooo http://t.co/sr4sCat7" +"google","neutral","126491610199490560","Wed Oct 19 02:55:24 +0000 2011","New Galaxy Nexus: App Improvements - Quick Response to unwanted calls, predefined SMS's #nexus #samsung #google #android bit.ly/nEJbyE" +"google","neutral","126491517924810752","Wed Oct 19 02:55:02 +0000 2011","RT @jcmwright: Would love to hear from other small colleges who went #Google recently. Particularly interested in the learning tech prep ..." +"google","neutral","126491513038442496","Wed Oct 19 02:55:01 +0000 2011","Many new features for Android. #NexusPrime #Google" +"google","neutral","126491459078729728","Wed Oct 19 02:54:48 +0000 2011","#Samsung and #Google's Ice Cream Sandwich event #liveblog! http://t.co/iG3Gi4qd #android #icecreamsandwich" +"google","neutral","126491255436881920","Wed Oct 19 02:54:00 +0000 2011","Phone app redesigned to be unified with google voice, etc. Has fast forward and rewind capabilities as well. #Google #Android #ICS" +"google","neutral","126491202810953728","Wed Oct 19 02:53:47 +0000 2011","New Galaxy Nexus: App Improvements - New visual voicemail within the phone app #nexus #samsung #google #android bit.ly/nEJbyE" +"google","neutral","126490806315008000","Wed Oct 19 02:52:12 +0000 2011","RT @thedroidguy: #Google announces dates for 2012 IO http://t.co/90xoKyQN" +"google","neutral","126490801982275584","Wed Oct 19 02:52:11 +0000 2011","#Android #Google Google officially announces Ice Cream Sandwich http://t.co/Ij292Ye3 #DhilipSiva" +"google","neutral","126490797225934850","Wed Oct 19 02:52:10 +0000 2011","#Android #Google Samsung Galaxy Nexus launching in US, Europe and Asia this November http://t.co/NLdTU5oY #DhilipSiva" +"google","neutral","126490763151421440","Wed Oct 19 02:52:02 +0000 2011","#Google's open source search to end: http://t.co/h8PYttyr" +"google","neutral","126490712299675649","Wed Oct 19 02:51:50 +0000 2011","Google officially announces Ice Cream Sandwich http://t.co/u702sosO #android #google" +"google","neutral","126490709921497088","Wed Oct 19 02:51:49 +0000 2011","Samsung Galaxy Nexus launching in US, Europe and Asia this November http://t.co/SzchgKIf #android #google" +"google","neutral","126490662416826369","Wed Oct 19 02:51:38 +0000 2011","Hugo is buddies with 50 Cent? #google #android" +"google","neutral","126490566837026817","Wed Oct 19 02:51:15 +0000 2011","New Galaxy Nexus: App Improvements - New People App to improve contact information #nexus #samsung #google #android bit.ly/nEJbyE" +"google","neutral","126490278143082496","Wed Oct 19 02:50:07 +0000 2011","@5in_n_the_Air #GOOGLE time kml" +"google","neutral","126490265463701504","Wed Oct 19 02:50:04 +0000 2011","#Google announces dates for 2012 IO http://t.co/90xoKyQN" +"google","neutral","126490235877081088","Wed Oct 19 02:49:56 +0000 2011","I'm thinking about Google http://t.co/ACjRL4FO @GetGlue #Google" +"google","neutral","126490166897541120","Wed Oct 19 02:49:40 +0000 2011","Samsung Galaxy Nexus announced, full specs available http://t.co/VNGazg48 #samsung #galaxy #nexus #google #android #ics #thetechcheck" +"google","neutral","126490158815125504","Wed Oct 19 02:49:38 +0000 2011","Samsung Galaxy Nexus announced, full specs available http://t.co/0Nd3LiwV #samsung #galaxy #nexus #google #android #ics #thetechcheck" +"google","neutral","126490134869852161","Wed Oct 19 02:49:32 +0000 2011","#google #galaxy #nexus, I WANT A PHONE PLEASE TALK ABOUT OTHER FEATURES NOW THE CAMERA IS ONLY ONE PART!" +"google","neutral","126490118050684928","Wed Oct 19 02:49:28 +0000 2011","Funny how #samsung didn't have the #facebook app on their #galaxynexus demo but instead had #google+ LOOL." +"google","neutral","126490034865045504","Wed Oct 19 02:49:09 +0000 2011","On Google+ then go here http://t.co/86xwWN2d #GPlus #Googleplus #Google #teamfollowback #socialnetwork" +"google","neutral","126490011901231104","Wed Oct 19 02:49:03 +0000 2011","New Galaxy Nexus: Hardware Improvements - Camera captures 1080p video #nexus #samsung #google #android bit.ly/nEJbyE" +"google","neutral","126489950614073345","Wed Oct 19 02:48:48 +0000 2011","Video of livestream of the Google/Samsung announcement froze. Damn my slow internet connection. #Google #Samsung" +"google","neutral","126489908998176769","Wed Oct 19 02:48:39 +0000 2011","1080p video recording, continuous focus, zoom while recording and time lapse included in Android 4.0. #Google #Android #ICS" +"google","neutral","126489751325908992","Wed Oct 19 02:48:01 +0000 2011","The new panorama feature in #Android Ice Cream Sandwich is pretty cool. Saw a glitch in the live presentation though. #Google" +"google","neutral","126489719012990976","Wed Oct 19 02:47:53 +0000 2011","Built in panorama in camera app #Android 4.0 #ICS #Google" +"google","neutral","126489665116192768","Wed Oct 19 02:47:40 +0000 2011","Google, Samsung unveil Galaxy Nexus phone running Android 4.0 http://t.co/C4elM79o"" via:@appleinsider #tech #google #Dubai #beirut" +"google","neutral","126489609889783808","Wed Oct 19 02:47:27 +0000 2011","Panorama + +#icecreamsandwich #googland #google #android #ics" +"google","neutral","126489523776536576","Wed Oct 19 02:47:07 +0000 2011","No face tagging in the new #Android gallery? #Google" +"google","neutral","126489506672160768","Wed Oct 19 02:47:03 +0000 2011","did he just say hipster filters? #google #android" +"google","neutral","126489489328705536","Wed Oct 19 02:46:58 +0000 2011","how come when #google talks about their #android camera app it's boring, but when #apple does it's ZOMG!!!1" +"google","neutral","126489300828307456","Wed Oct 19 02:46:14 +0000 2011","Ballmer: 70% Of The Time, #Google & #Bing Are The Same, So Try Bing! by @dannysullivan http://t.co/9eudwSAD" +"google","neutral","126489263490596864","Wed Oct 19 02:46:05 +0000 2011","#Google WebGL Bookcase: Google WebGL Bookcase. Google developed a WebGL app that shows a 3D model of a bookcase ... http://t.co/y8HC6kzB" +"google","neutral","126489146029129729","Wed Oct 19 02:45:37 +0000 2011","Tap to focus and face recognition #ics #galaxynexus #google" +"google","neutral","126489064319881216","Wed Oct 19 02:45:17 +0000 2011","[#Infographic] #Google+ cost half a billion to build, driving users to #1 most followed user: Mark Zuckerberg. http://t.co/CzMfvnLQ #irony" +"google","neutral","126489048717074432","Wed Oct 19 02:45:13 +0000 2011","http://t.co/18xg3ivo! #Google’s Ice Cream Sandwich Official http://t.co/7h5YdQCN #android #icecreamsandwich" +"google","neutral","126488983164289026","Wed Oct 19 02:44:58 +0000 2011","Watching ice cream sandwich. So far it only melted when attempting facial recognition during unlock :) YouTube.com/android #Samsung #google" +"google","neutral","126488935026266112","Wed Oct 19 02:44:46 +0000 2011","#SamsungGalaxyNexus #google #android #IceCreamSandwich. New browser http://t.co/Zx6pIo6f" +"google","neutral","126488912037289984","Wed Oct 19 02:44:41 +0000 2011","now watching live stream of #Google new mobile #Nexus" +"google","neutral","126488727315943425","Wed Oct 19 02:43:57 +0000 2011","Damn group meeting disturbin me from watchin the #google event" +"google","neutral","126488649905864705","Wed Oct 19 02:43:38 +0000 2011","New tools giving the user the ability to restrict mobile data usage. #Google #Android #ICS" +"google","neutral","126488582218203136","Wed Oct 19 02:43:22 +0000 2011","Article discussing how the four great tech companies will compete in the marketplace: http://t.co/YFWF9iye #Apple #Google #Facebook #Amazon" +"google","neutral","126488561888399360","Wed Oct 19 02:43:17 +0000 2011","#Google releases an Infinite Digital Bookcase RT @VentureBeat http://t.co/YWtVgwJY by @MeghanKel" +"google","neutral","126488352135450625","Wed Oct 19 02:42:27 +0000 2011","#SamsungGalaxyNexus #google #android #Face Unlock.Ice Cream Sandwich literally knows your face. http://t.co/vUyxwrxW" +"google","neutral","126487924043821057","Wed Oct 19 02:40:45 +0000 2011","#Android Ice Cream Sandwich features face-recognition to unlock your phone, no more entering a pin or password #fb #google" +"google","neutral","126487912433975297","Wed Oct 19 02:40:43 +0000 2011","Email with 2 line preview, offline search of any amount of emails you want. #Android4.0 #Google" +"google","neutral","126487846038147073","Wed Oct 19 02:40:27 +0000 2011","Can search the last 30 days of Gmail, whether connected or not. #Google #Android #ICS" +"google","neutral","126487807156944899","Wed Oct 19 02:40:17 +0000 2011","Ha! The demo fail is still resonating.. Kinda funny, but on a global scale. Cool feature, though! #android #google #galaxynexus" +"google","neutral","126487788433584129","Wed Oct 19 02:40:13 +0000 2011","Are these guests on Samsung and Google event mostly Chinese? Wow! They're boring. #Google #Samsung" +"google","neutral","126487744787660800","Wed Oct 19 02:40:03 +0000 2011","#ICS show-off froze my browser. Well, time for sleep I guess. #Android #Google #Samsung" +"google","neutral","126487541569433600","Wed Oct 19 02:39:14 +0000 2011","RT @VentureBeat: #Google releases an Infinite Digital Bookcase http://t.co/BfFAEApW by @MeghanKel" +"google","neutral","126487465203736576","Wed Oct 19 02:38:56 +0000 2011","RT @adamdince: #Google might reverse its new data encryption policy if big brands threaten to cut their PPC spend. @Google biting the ha ..." +"google","neutral","126487372039847937","Wed Oct 19 02:38:34 +0000 2011","#Google thinks your digital books belong on a digital bookcase - a #WebGL #Chrome experiment http://t.co/2LqpqF6F" +"google","neutral","126487167823388673","Wed Oct 19 02:37:45 +0000 2011","Demo crime for #google dude in white suit. Face recognition for unlocking doesn't work if you wear make up apparently :-)" +"google","neutral","126487043462266880","Wed Oct 19 02:37:15 +0000 2011","But does request desktop site work with hulu?!?!? #ICS #Google" +"google","neutral","126486964408033280","Wed Oct 19 02:36:56 +0000 2011","#SamsungGalaxyNexus #google #android #IceCreamSandwich http://t.co/z2uzAnHD" +"google","neutral","126486941364527104","Wed Oct 19 02:36:51 +0000 2011","Addicted to this new game #tripletown on #Google+," +"google","neutral","126486926827065344","Wed Oct 19 02:36:48 +0000 2011","RT @VentureBeat: #Google releases an Infinite Digital Bookcase http://t.co/DNwy6mqx by @MeghanKel" +"google","neutral","126486798640754688","Wed Oct 19 02:36:17 +0000 2011","#SamsungGalaxyNexus #google #android #IceCreamSandwich http://t.co/6YidLr0N" +"google","neutral","126486616343724032","Wed Oct 19 02:35:34 +0000 2011","The lock screen now has facial recognition capability! #Google #Android #ICS" +"google","neutral","126486384902017024","Wed Oct 19 02:34:38 +0000 2011","here comes facial recognition! Face unlock +#icecreamsandwich #googland #google #android #ics" +"google","neutral","126486348713570304","Wed Oct 19 02:34:30 +0000 2011","#Google and #Samsung Reveals Galaxy Nexus Phone http://t.co/REnJ0C0Z #mobile #software #android #mobiledevice" +"google","neutral","126486111689256960","Wed Oct 19 02:33:33 +0000 2011","#Google Google’s Infinite Bookcase: An Abstract Browser For Limitless Libraries http://t.co/MyLzFMRq" +"google","neutral","126486051530354689","Wed Oct 19 02:33:19 +0000 2011","#Measure your #Marketing.Get the FREE Analytics Campaign Tagger sheet for #Google #Analytics now! http://t.co/8WC3Y5kK" +"google","neutral","126485712836112384","Wed Oct 19 02:31:58 +0000 2011","#Samsung and #Google introduce #GALAXY #Nexus http://t.co/2rvF0tZp via @androidcentral #Android" +"google","neutral","126485684113522689","Wed Oct 19 02:31:51 +0000 2011","""Kanye West has added you to his circles."" yeah, right. #galaxynexus #google #ics" +"google","neutral","126485085942845440","Wed Oct 19 02:29:29 +0000 2011","Launching November in Japan #SamsungGalaxyNexus #google #android" +"google","neutral","126484200269426688","Wed Oct 19 02:25:57 +0000 2011","#SamsungGalaxyNexus #google #android http://t.co/GIhTHeIJ" +"google","neutral","126484162302586880","Wed Oct 19 02:25:49 +0000 2011","#google +1 Stickers came with this month's Adsense cheque . http://t.co/AOFYoJmE" +"google","neutral","126484021369778177","Wed Oct 19 02:25:15 +0000 2011","#Android #Google Samsung Galaxy Nexus Gallery http://t.co/wUnmSQL6 #DhilipSiva" +"google","neutral","126484018211454976","Wed Oct 19 02:25:14 +0000 2011","#Android #Google Samsung and Google introduce GALAXY Nexus http://t.co/KkuepMVW #DhilipSiva" +"google","irrelevant","126535062148759552","Wed Oct 19 05:48:04 +0000 2011","Ho come l'impressione che la recente evoluzione di #facebook abbia creato un ulteriore solco con #google+ ...la vittoria finale??" +"google","irrelevant","126534927637417984","Wed Oct 19 05:47:32 +0000 2011","#YahooAnswers #Google y #Wikipedia son parte del staff de Dios, lo sé." +"google","irrelevant","126534908670783489","Wed Oct 19 05:47:27 +0000 2011","#Google stellt #Android 4.0 Ice Cream Sandwich offiziell vor, #SDK jetzt verfügbar http://t.co/R6aEQ4fH" +"google","irrelevant","126534871299538944","Wed Oct 19 05:47:18 +0000 2011","#google обьÑвил об обновлении #Nexus_S до #ICS . Ждем порта 4.0 на #SGS ." +"google","irrelevant","126534769105305600","Wed Oct 19 05:46:54 +0000 2011","#Google y #Samsung presentan #NexusGalaxy el #Android más #potente del #mercado y el primero en llevar el nuevo #I http://t.co/FYw5K9g7" +"google","irrelevant","126534678156029953","Wed Oct 19 05:46:32 +0000 2011","RT @OliPalko: RT @ishpconsult Der langsame Tod von SEO #Google -Suche für eingeloggte User verschlüsselt - http://t.co/SSdxjoWd" +"google","irrelevant","126534649995464704","Wed Oct 19 05:46:26 +0000 2011","Microsoft Word - Ch 3 Solutions IE done pdf ebook: http://t.co/h4X0gHBl #google" +"google","irrelevant","126534648800096256","Wed Oct 19 05:46:25 +0000 2011","Echelon Quality Management System pdf ebook: http://t.co/OudMwlne #google" +"google","irrelevant","126534648611340288","Wed Oct 19 05:46:25 +0000 2011","Microsoft Word - Ch 3 Solutions IE done pdf ebook: http://t.co/nZTGU900 #google" +"google","irrelevant","126534647264972800","Wed Oct 19 05:46:25 +0000 2011","Echelon Quality Management System pdf ebook: http://t.co/sPVYoB8r #google" +"google","irrelevant","126534525089091584","Wed Oct 19 05:45:56 +0000 2011","#google Actualité Google Panda: Arrivé pendant l'été 2011 en France, Google Panda a été conçu afin ... http://t.co/N5hSOroi #googlepanda" +"google","irrelevant","126534223950651392","Wed Oct 19 05:44:44 +0000 2011","ahora si, a ver lo que presento #google sobre #android ya lo vieron? alguna sorpresa?" +"google","irrelevant","126534054739836929","Wed Oct 19 05:44:04 +0000 2011","GMusic une Google Music con Apple iOS [Apps] #Apple #Google #iOS http://t.co/k2T4ztCY" +"google","irrelevant","126534037929074688","Wed Oct 19 05:44:00 +0000 2011","Das neue #Google #Galaxy #Nexus haut mich nicht so sehr vom Hocker. Ist meiner Meinung nach nur ein wenig besser als das #SGS2." +"google","irrelevant","126533966156148736","Wed Oct 19 05:43:43 +0000 2011","Samsung и Google предÑтавили новый Ñмартфон Galaxy Nexus http://t.co/x1i0MXhA #samsung #google" +"google","irrelevant","126533775411781632","Wed Oct 19 05:42:57 +0000 2011","Jetzt gerade angeschaut und +1t #adwords #guide #google #adwords #guide http://t.co/S87lNq2u" +"google","irrelevant","126533688321253376","Wed Oct 19 05:42:36 +0000 2011","AsiaClassifiedToday: Asian stocks rise on Europe debt plan hopes - Belleville News Democrat: ... http://t.co/RQhgAIBF #asia #google #ads" +"google","irrelevant","126533686282825728","Wed Oct 19 05:42:36 +0000 2011","AsiaClassifiedToday: TH Peng Joins Grey as Chairman and CEO of Grey Greater China - MarketWat... http://t.co/Hd4pi9lM #asia #google #ads" +"google","irrelevant","126533684252774401","Wed Oct 19 05:42:35 +0000 2011","AsiaClassifiedToday: YHOO Q3 Conf Call: $20.4B Asian Assets, $19.5B Market Cap - Barron's (bl... http://t.co/nc7hVH84 #asia #google #ads" +"google","irrelevant","126533682273071105","Wed Oct 19 05:42:35 +0000 2011","AsiaClassifiedToday: AdAsia 2011: A heavy dose for the creative minds - http://t.co/ZJ9k6FW6: A... http://t.co/BVaY8XPB #asia #google #ads" +"google","irrelevant","126533680301752320","Wed Oct 19 05:42:34 +0000 2011","AsiaClassifiedToday: Point Park summit reflects Asian culture - The Globe: Point Park summit ... http://t.co/fBdw211P #asia #google #ads" +"google","irrelevant","126533636576129024","Wed Oct 19 05:42:24 +0000 2011","@DaphneDijkerman Wat let je om die vraag te stellen bij Blik op de weg, er is vast wel een site waar dat kan #google" +"google","irrelevant","126533618997792768","Wed Oct 19 05:42:20 +0000 2011","RT @oli2be: For Twitter, free speech is what matters — not real names. http://t.co/DAnykTjl /@factsandtools #Twitter #Google" +"google","irrelevant","126533452618137600","Wed Oct 19 05:41:40 +0000 2011","#Google lanserar #Android 4.0 / Ice Cream Sandwich med tillhörande SDK: http://t.co/U62wsEoU" +"google","irrelevant","126533364789424129","Wed Oct 19 05:41:19 +0000 2011","#Google no compartirá las consultas de búsquedas con otros sitios web http://t.co/fNPiO6wz" +"google","irrelevant","126533325702701056","Wed Oct 19 05:41:10 +0000 2011","“جوجل†تطلق “Android Ice Cream Sandwichâ€ ÙˆÙ…ÙØ§Ø¬Ø£Ø© لمستخدمي “Android 2.3″ @7ki3arabi http://t.co/Ytx4jHZm #google #android" +"google","irrelevant","126533318928896001","Wed Oct 19 05:41:08 +0000 2011","Google помнит о тебе, нищеброд. http://t.co/LtWQVySF #Ð¡Ð¾Ð±Ñ‹Ñ‚Ð¸Ñ #4.0 #Android #Google" +"google","irrelevant","126533134819921920","Wed Oct 19 05:40:24 +0000 2011","“@rianru: #Google и #Samsung предÑтавили новую верÑию операционной ÑиÑтемы - Android 4.0 Ice Cream Sandwich http://t.co/HvtFSCXz†ÑÑндвич!?" +"google","irrelevant","126533119573630976","Wed Oct 19 05:40:21 +0000 2011","ну ÑобÑтвенно, товарищи, фотки интерфейÑа #Google #GalaxyNexus кто еще не видел: http://t.co/NS2jNRL7" +"google","irrelevant","126533008760111104","Wed Oct 19 05:39:54 +0000 2011","#Google lanserar #Galaxy Nexus - tillgänglig i Europa frÃ¥n och med november: http://t.co/fVyNoW2p" +"google","irrelevant","126532931236794369","Wed Oct 19 05:39:36 +0000 2011","بشكل عام أرى أنه مؤتمر مخيب للآمال مثل مؤتمر آبل، لا جديد من ناحية الهاردوير، ÙˆØ§Ù„Ø³ÙˆÙØªÙˆÙŠØ± مشابه لأندرويد 3.0 #Samsung #Google #Android" +"google","irrelevant","126532704287199232","Wed Oct 19 05:38:42 +0000 2011","RT @ishpconsult Der langsame Tod von SEO #Google -Suche für eingeloggte User verschlüsselt - http://t.co/SSdxjoWd" +"google","irrelevant","126532561315958784","Wed Oct 19 05:38:08 +0000 2011","#Google Phil Hellmuth and Sam Farha – poker fight: http://t.co/vwaiQDlT – Cost-free Poker University with... http://t.co/F9A0NJnX #wikileaks" +"google","irrelevant","126532543158820864","Wed Oct 19 05:38:03 +0000 2011","RT @LoyaltyFamU: ] #money #jobs Network Accounts Submit Or Propose ur bids now http://t.co/YF8fmEaH via: #google @LoyaltyFamU" +"google","irrelevant","126532472258301952","Wed Oct 19 05:37:46 +0000 2011","Salut les twittos! Un petit résumé de cet conf' ? :) #ICS #GOOGLE" +"google","irrelevant","126532467954950144","Wed Oct 19 05:37:45 +0000 2011","RT @ijnetEs: Cinco herramientas de #Google que todo periodista debe conocer. http://t.co/ELypfnyG #periodismodigital" +"google","irrelevant","126532295665532928","Wed Oct 19 05:37:04 +0000 2011","#Google Google Event: Ice Cream Sandwich (Caschys Blog): ... Widgets lassen sich jetzt auch unter [...] XING-Pro... http://t.co/Ozqy3UG4" +"google","irrelevant","126532294122024960","Wed Oct 19 05:37:04 +0000 2011","#Google Google und Samsung stellen das Galaxy Nexus und Android Ice Cream Sandwich offiziell vor (mobiFlip.de): ... http://t.co/0cckAp5E" +"google","irrelevant","126532119278264320","Wed Oct 19 05:36:22 +0000 2011","@Vusala_Abidin xeyirli ugurlu olsun.amma deyen ciddi yenilik yoxdu. #google #nexus #android" +"google","irrelevant","126532046360289280","Wed Oct 19 05:36:05 +0000 2011","Le #GalaxyNexus fait bien envie... Mais bizarrement ça me gêne qu'il y soit marqué #Google au dos." +"google","irrelevant","126531989846241280","Wed Oct 19 05:35:51 +0000 2011","نتائج مؤتمر قوقل وسامسونج: جهاز سامسونج جالكسي نكسوس 1.2دول كور وشاشة 4.65 إتش دي، نظام تشغيل قوقل أندرويد 4.0ØŒ #Samsung #Google #Android" +"google","irrelevant","126531827073679360","Wed Oct 19 05:35:13 +0000 2011","Crea #Google #librero virtual infinito : El sitio WebGL Bookcase despliega el acervo de #libros .. http://t.co/Y6E1tuvi" +"google","irrelevant","126531797419950080","Wed Oct 19 05:35:06 +0000 2011","каталог Ñтатей http://t.co/aNPOt4Cn CÐµÑ€Ð²Ð¸Ñ Ñ€ÐµÐ³Ð¸Ñтрации в каталогах Ñтатей #seo #linkbuilding #backlinks #addurl #bookmarks #google" +"google","irrelevant","126531615181651968","Wed Oct 19 05:34:22 +0000 2011","Presentado Oficialmente Android 4.0 ICS ~ ANDROIDVZLA http://t.co/adQYCOdD #android #androidvzla #google #ICS" +"google","irrelevant","126531519824142337","Wed Oct 19 05:33:59 +0000 2011","RT @ishpconsult: Der langsame Tod von SEO?? #Google-Suche für eingeloggte User verschlüsselt - http://t.co/OlORx2Pc #marketing #search..." +"google","irrelevant","126531381152059392","Wed Oct 19 05:33:26 +0000 2011","Just googled what it means if your eyelid twitches. Answer: your tired. I think its time for bed...thanks #Google" +"google","irrelevant","126531281650589696","Wed Oct 19 05:33:03 +0000 2011","#google Jeux d’été 2012 robes du soir 57e empire: Jeux d’été 2012 robes Empire robe de soirée, robe d... http://t.co/zMD42jJJ #wikileaks" +"google","irrelevant","126531280253882368","Wed Oct 19 05:33:02 +0000 2011","#google JL – Fashion GmbH, Zollikon, femme essentielle: SHOPPING: SUISSE: par astramedia: JL – Mode G... http://t.co/rZKIDJv6 #wikileaks" +"google","irrelevant","126531278685216768","Wed Oct 19 05:33:02 +0000 2011","#google Tutoriel: Comment faire une fermeture à glissière de fleurs: Ce que vous avez besoin: l’aigui... http://t.co/nMmedAlW #wikileaks" +"google","irrelevant","126531098279804928","Wed Oct 19 05:32:19 +0000 2011","Non mais WTF !? RT @Twikito Google passe en HTTPS, un drame pour les statistiques http://t.co/gQIhDcEs #analytics #Google via @Zorgloob" +"google","irrelevant","126530329820397568","Wed Oct 19 05:29:16 +0000 2011","ИнтереÑно, в #google видели как gmail for mobile выглÑдит на Ñкране обычного Ñмартфона? Это же пиздец ходÑчий - кнопки в четверть Ñкрана!" +"google","irrelevant","126530251684720641","Wed Oct 19 05:28:57 +0000 2011","Samsung и Google предÑтавили новый Ñмартфон Galaxy Nexus http://t.co/ou1aoGK6 #google #nexus #android" +"google","irrelevant","126530242612432898","Wed Oct 19 05:28:55 +0000 2011","Irgendwie dann doch enttäuschend, das neue #Google #Galaxy #Nexus" +"google","irrelevant","126530163029704705","Wed Oct 19 05:28:36 +0000 2011","Mit einfachen Mittel gegen #Facebook & #Google+, so die Strategie von Costolo, CEO von #Twitter http://t.co/C1f5dAnO #SocialMedia" +"google","irrelevant","126530054023946240","Wed Oct 19 05:28:10 +0000 2011","Už vím, jaký bude můj příští mobil! http://t.co/UzI5aEXa #Google #Android #GalaxyNexus" +"google","irrelevant","126530000303292416","Wed Oct 19 05:27:57 +0000 2011","Para el que le interese saber las características del nuevo cel que me compraré http://t.co/4JjjCqdJ El #galaxynexus de #google" +"google","irrelevant","126529908850700289","Wed Oct 19 05:27:35 +0000 2011","#Google Ñтанет еще безопаÑнее http://t.co/rXRcc1MB" +"google","irrelevant","126529643829399553","Wed Oct 19 05:26:32 +0000 2011","Novedades #Google: resultados en inglés traducidos y SSL por defecto para los usuarios registrados http://t.co/LCbQk2fw golpe a #Analytics" +"google","irrelevant","126529610614718464","Wed Oct 19 05:26:24 +0000 2011","""Hay que darse prisa en conocer el fenómeno web... Acabará en tres ... - El… http://t.co/ynksEeUN #google" +"google","irrelevant","126529609771659264","Wed Oct 19 05:26:24 +0000 2011","VIDEO: Een online check-out in een webwinkel in ‘In Real Life’ #google #webshop http://t.co/kGw54MaC" +"google","irrelevant","126529491819433985","Wed Oct 19 05:25:56 +0000 2011","#Google kondigt #Android Ice Cream Sandwich aan http://t.co/3VT22WNY #ics" +"google","irrelevant","126529490582118400","Wed Oct 19 05:25:56 +0000 2011","Zapatero (#Google): ""Hay que darse prisa en conocer el fenómeno #web, acabará en 3 años"" http://t.co/L49qrDeR" +"google","irrelevant","126529267503861760","Wed Oct 19 05:25:02 +0000 2011","Der langsame Tod von SEO?? #Google -Suche für eingeloggte User verschlüsselt - http://t.co/czsosVGp #socialmedia #marketing #search #seo" +"google","irrelevant","126529265142472704","Wed Oct 19 05:25:02 +0000 2011","Der langsame Tod von SEO?? #Google -Suche für eingeloggte User verschlüsselt - http://t.co/1Gop6W4r #socialmedia #marketing #search #seo" +"google","irrelevant","126529171773067265","Wed Oct 19 05:24:40 +0000 2011","Galaxy Nexus: Google-Handy mit HD-Display und Android 4.0 - Heise Newsticker http://t.co/a4WBqmc9 #Google" +"google","irrelevant","126529003879272448","Wed Oct 19 05:23:59 +0000 2011","AsiaClassifiedToday: Parkson to list retail arm in S$200 mln Singapore IPO-source - Reuters: Parks... http://t.co/AadDaQA7 #asia #google" +"google","irrelevant","126529001866002432","Wed Oct 19 05:23:59 +0000 2011","AsiaClassifiedToday: Asia Family Offices to Triple, Lead Growth, Citigroup Says - BusinessWeek: As... http://t.co/71QApFD8 #asia #google" +"google","irrelevant","126528999554949120","Wed Oct 19 05:23:58 +0000 2011","AsiaClassifiedToday: SE Asia and Singapore lead Asia Pacific region in business travel - Forimmedi... http://t.co/Jdvas9rH #asia #google" +"google","irrelevant","126528997763989504","Wed Oct 19 05:23:58 +0000 2011","AsiaClassifiedToday: Parkway Novena Hospital to offer premier healthcare service in Asia - The Bor... http://t.co/50KmorZm #asia #google" +"google","irrelevant","126528993187999744","Wed Oct 19 05:23:57 +0000 2011","AsiaClassifiedToday: ITB Asia opens in Singapore - eTurboNews: eTurboNewsITB Asia opens in Singapo... http://t.co/vlL4NZc6 #asia #google" +"google","irrelevant","126528826279866369","Wed Oct 19 05:23:17 +0000 2011","@NattaliaAlba mejor consulta ana y mia en #google hahahha(:" +"google","irrelevant","126528658834853888","Wed Oct 19 05:22:37 +0000 2011","#Google начал ""маÑштабную зачиÑтку"" ÑервиÑов http://t.co/1vRAuo8S" +"google","irrelevant","126528444476555264","Wed Oct 19 05:21:46 +0000 2011","ПоиÑк от #Google Ñтанет еще безопаÑнее http://t.co/Hmo2qDKO" +"google","irrelevant","126528018469494784","Wed Oct 19 05:20:05 +0000 2011","Coole 2-minuten VIDEO: Een online check-out in een webwinkel in ‘In Real Life’ #google #webshop http://t.co/E4SnmZtL" +"google","irrelevant","126527955475251202","Wed Oct 19 05:19:50 +0000 2011","@studiodev aha :) et qui est ce qui va remonter dans les résultats #google ? :p" +"google","irrelevant","126527760746295296","Wed Oct 19 05:19:03 +0000 2011","Características Samsung Galaxy Prime con Android ICS 4.0 ~ ANDROIDVZLA http://t.co/LmbrSC4Y #android #androidvzla #google #ICS" +"google","irrelevant","126527053133656066","Wed Oct 19 05:16:14 +0000 2011","#google Google anuncia Galaxy Nexus com Android 4.0 http://t.co/WplTBVS3 http://t.co/WsfevVd4" +"google","irrelevant","126527051292356609","Wed Oct 19 05:16:14 +0000 2011","http://t.co/RqjtztR3 // muy buen experimento de #google, la cago :D, #webgl la lleva" +"google","irrelevant","126526946946457601","Wed Oct 19 05:15:49 +0000 2011","#Google revela o Anddroi 4.0 Ice Cream Sandwich: O Google revelou hoje durante um evento em… http://t.co/pfzUy1i2" +"google","irrelevant","126526850477469696","Wed Oct 19 05:15:26 +0000 2011","#Samsung y #Google contraatacan a #Apple con el nuevo Nexus Prime http://t.co/xHvncznB vía @cincodiascom #android #tecnología" +"google","irrelevant","126526815010426880","Wed Oct 19 05:15:18 +0000 2011","Impatient de voir arriver Ice Cream Sandwich sur mon #SGS2 :) #ics #google #samsung" +"google","irrelevant","126526781019787264","Wed Oct 19 05:15:10 +0000 2011","#Digital : Google en Twitter komen niet tot een overeenkomst voor realtime-zoekdiensten - ... #de #google #en http://t.co/cFmZOejW" +"google","irrelevant","126526765995802624","Wed Oct 19 05:15:06 +0000 2011","#Tyfusverspreiding zichtbaar op #Google #Earth http://t.co/49jIucI9 #nuandroid" +"google","irrelevant","126526602686369792","Wed Oct 19 05:14:27 +0000 2011","Видеообзор Galaxy Nexus. http://t.co/VnnKrFs2 #Ð¡Ð¾Ð±Ñ‹Ñ‚Ð¸Ñ #Android #GalaxyNexus #Google #Samsung" +"google","irrelevant","126526113131413504","Wed Oct 19 05:12:30 +0000 2011","Android fanatlarinin gozu aydin olsun :) new Android 4.0 Ice Cream Sandwich cixdi :) #android #samsung #google" +"google","irrelevant","126526068835368961","Wed Oct 19 05:12:20 +0000 2011","Samsung Galaxy Nexus officieel aangekondigd door #google en #samsung!" +"google","irrelevant","126526020923834368","Wed Oct 19 05:12:08 +0000 2011","HANBELL SCREW COMPRESSORS MAINTENANCE MANUAL pdf ebook: http://t.co/tKHMMY5Y #google" +"google","irrelevant","126526019602628608","Wed Oct 19 05:12:08 +0000 2011","Usagi Yojimbo RPG Core Rules pdf ebook: http://t.co/iWaJ1aGa #google" +"google","irrelevant","126526019208351744","Wed Oct 19 05:12:08 +0000 2011","HANBELL SCREW COMPRESSORS MAINTENANCE MANUAL pdf ebook: http://t.co/m4T1kxiz #google" +"google","irrelevant","126526017660653568","Wed Oct 19 05:12:08 +0000 2011","Usagi Yojimbo RPG Core Rules pdf ebook: http://t.co/UVYJxGsV #google" +"google","irrelevant","126525994348711937","Wed Oct 19 05:12:02 +0000 2011","#samsung и #google предÑтавили новый Ñмартфон galaxy nexus #autofollow" +"google","irrelevant","126525853738860544","Wed Oct 19 05:11:28 +0000 2011","#google - @pc_insecurities- another rise in my website results part 6 http://t.co/OESQu4Q5 #salisbury" +"google","irrelevant","126525817713991680","Wed Oct 19 05:11:20 +0000 2011","sabia yo q #google no me hiba a fallar... resumen encontrado... solo falta transcribirlo" +"google","irrelevant","126525815084158976","Wed Oct 19 05:11:19 +0000 2011","Kurz und knapp das wichtigste: #Google stellt neues Smartphone und Betriebssystem vor http://t.co/EeQJFL0P" +"google","irrelevant","126525130426957824","Wed Oct 19 05:08:36 +0000 2011","ICS je pro mÄ› naprosto zanedbatelný OS pro mobil. Promiň ;( #fail #Google #samsung" +"google","irrelevant","126524896665796608","Wed Oct 19 05:07:40 +0000 2011","التقرير الكامل لجهاز Galaxy Nexus الخاص بـ Google +http://t.co/MvlI71vK #SE4m #Nexus #Google #android http://t.co/8AfOMTso" +"google","irrelevant","126524657875697664","Wed Oct 19 05:06:43 +0000 2011","RT @twandroid: Pogo Pin sur le côté pour le connecter sur un dock #google #nexusprime" +"google","irrelevant","126524361065758720","Wed Oct 19 05:05:33 +0000 2011","#android Google confirma que todos los dispositivos con gingerbread tendrán Ice Cream Sandwich: Y desde ... http://t.co/bUcSI4xy #google" +"google","irrelevant","126524217889001472","Wed Oct 19 05:04:58 +0000 2011","#Google Trademark Poker Full Size Texas Hold’em 83 x 44-Inch Poker Table (Green Felt): Trademark Poke... http://t.co/jICU3KON #wikileaks" +"google","irrelevant","126524216412618752","Wed Oct 19 05:04:58 +0000 2011","#Google How to Play a Teenpatti Tournament: Junglee is fired up to provide you the 1st ever multi-pla... http://t.co/96GaNUx2 #wikileaks" +"google","irrelevant","126524213401096192","Wed Oct 19 05:04:57 +0000 2011","#Google New Effortless-to-Use Texas Holdem Strategy Kit Has the Tools You Require To Turn into a 100%... http://t.co/KkswpBtW #wikileaks" +"google","irrelevant","126523942314840064","Wed Oct 19 05:03:53 +0000 2011","#Google Google bestätigt Android 4.0 Ice Cream Sandwich für das Nexus S (mobiFlip.de): Nachdem Google und Samsun... http://t.co/CpmTEUjG" +"google","irrelevant","126523941035577344","Wed Oct 19 05:03:52 +0000 2011","#Google Endlich: Alle offiziellen Details zum Google Galaxy Nexus und zu Android Ice Cream Sandwich (mit Videos)... http://t.co/V4IbwFpP" +"google","irrelevant","126523916817674240","Wed Oct 19 05:03:47 +0000 2011","yeah! #fringe RT @TrendingTopicMN #csportsmn #android #ios #pulse #google #galaxy #fringe #ajildavna #nexus #ics" +"google","irrelevant","126523574470189057","Wed Oct 19 05:02:25 +0000 2011","#Juick #Google #Buzz #+ #! Срочно нужна Ð¸Ð½Ñ‚ÐµÐ³Ñ€Ð°Ñ†Ð¸Ñ Ð–ÑƒÐ¹ÐºÐ° Ñ Ð“ÑƒÐ³Ð»Ð¾Ð¿Ð»ÑŽÑом, ибо Буз закрывают! + +Рецепты еÑть? http://t.co/QaGboWhc" +"google","irrelevant","126523560096313344","Wed Oct 19 05:02:22 +0000 2011","“@rianru: #Google и #Samsung предÑтавили новую верÑию операционной ÑиÑтемы - Android 4.0 Ice Cream Sandwich http://t.co/DhLHoexZ†+#Vfacebook" +"google","irrelevant","126523556958961664","Wed Oct 19 05:02:21 +0000 2011","#Google образова онлайн потребителите http://t.co/9XMBZbUk #защитаналичнитеданни" +"google","irrelevant","126523548914290688","Wed Oct 19 05:02:19 +0000 2011","US-Steuerbehörde leitet Untersuchung gegen #Google ein http://t.co/wfjxlKhe via @zdnet_de" +"google","irrelevant","126523356773232641","Wed Oct 19 05:01:33 +0000 2011","Finalmente llegó el Galaxy Nexus: #Google y #Samsung acaban de hacer el tan esperado… http://t.co/2Eerkm1E #tech" +"google","irrelevant","126523270420905984","Wed Oct 19 05:01:13 +0000 2011","Ik lees steeds meer enthousiasme voor #google+ en aantal gebruikers groeit spectaculair. Ik merk daar niets van!" +"google","irrelevant","126523229400600576","Wed Oct 19 05:01:03 +0000 2011","サムスン電å­ã®ã‚¹ãƒžãƒ¼ãƒˆãƒ•ォン新機種「ギャラクシー・ãƒã‚¯ã‚µã‚¹ã€ã€ã‚°ãƒ¼ã‚°ãƒ«ã®åŸºæœ¬ã‚½ãƒ•ãƒˆï¼ˆï¼¯ï¼³ï¼‰ã€Œã‚¢ãƒ³ãƒ‰ãƒ­ã‚¤ãƒ‰ã€æœ€æ–°ç‰ˆã‚’æ­è¼‰ã€‚ã€Œã‚¯ãƒ©ã‚¦ãƒ‰ã€æ´»ç”¨ã€éŸ³å£°èªè­˜ã‚„ã‚«ãƒ¡ãƒ©ã®æ©Ÿèƒ½ã‚‚å‘上ã•ã›ãŸæˆ¦ç•¥ãƒ¢ãƒ‡ãƒ« #keizai #sumaho #google  http://t.co/h822eCym" +"google","irrelevant","126523147091578880","Wed Oct 19 05:00:43 +0000 2011","RT @twandroid: Galaxy Nexus annoncé : #nexusprime #google http://t.co/IrMPjoiS" +"google","irrelevant","126523008524365825","Wed Oct 19 05:00:10 +0000 2011","#Google mejora las presentaciones de #GoogleDocs http://t.co/SjiMOtov" +"google","irrelevant","126522989633212417","Wed Oct 19 05:00:06 +0000 2011","#csportsmn #android #ios #pulse #google #galaxy #fringe #ajildavna #nexus #ics" +"google","irrelevant","126522783650955264","Wed Oct 19 04:59:16 +0000 2011","#Google y #samsung tienen todo para romperla con #AndroidIceCreamSandwich #smartphoneNFC #GalaxyNexus" +"google","irrelevant","126522732195233792","Wed Oct 19 04:59:04 +0000 2011","AsiaClassifiedToday Asia Stocks, Ringgit Gain on European Outlook - Bloomberg: Asia Stocks, R... http://t.co/biHKwSTW #asia #google #biz" +"google","irrelevant","126522730001600512","Wed Oct 19 04:59:04 +0000 2011","AsiaClassifiedToday Asia Currencies Strengthen as Growth Outlook Outweighs Europe-Debt Concer... http://t.co/qm8zpFgE #asia #google #biz" +"google","irrelevant","126522728101584897","Wed Oct 19 04:59:03 +0000 2011","AsiaClassifiedToday Asia Family Offices to Triple, Lead Growth, Citigroup Says - BusinessWeek... http://t.co/93f16E44 #asia #google #biz" +"google","irrelevant","126522726239313920","Wed Oct 19 04:59:03 +0000 2011","AsiaClassifiedToday Indian shares to start higher; Hero MotoCorp eyed - Reuters: Indian share... http://t.co/Zo0Fm1Ya #asia #google #biz" +"google","irrelevant","126522723441721345","Wed Oct 19 04:59:02 +0000 2011","AsiaClassifiedToday Thailand - Market factors to watch - Oct 19 - Reuters: Thailand - Market ... http://t.co/nNsDZgWI #asia #google #biz" +"google","irrelevant","126522646149087233","Wed Oct 19 04:58:44 +0000 2011","RT @laurentalbaret: [2.0] La recherche cryptée (sur #Google) étendue aux utilisateurs connectés à un service Google http://t.co/K8oalek2" +"google","irrelevant","126522540834304000","Wed Oct 19 04:58:19 +0000 2011","7 Lessons Learned Running an SEO Agency: Posted by neilpatelThis post was originally in YOUmoz, and... http://t.co/AU0k5sWY #seo #google" +"google","irrelevant","126522492700471297","Wed Oct 19 04:58:07 +0000 2011","#google #yahoo #bing 7 Lessons Learned Running an SEO Agency http://t.co/d7b2G1rW #seo #serps" +"google","irrelevant","126522273963319298","Wed Oct 19 04:57:15 +0000 2011","#Samsung Galaxy #Nexus / #Google #Android ICS å‘表会é‡ç‚¹æ•´ç† http://t.co/u7ViMWl6 #galaxynexus #galaxynexus" +"google","irrelevant","126522127775047680","Wed Oct 19 04:56:40 +0000 2011","Ice Cream Sandwich: Android 4.0 gepresenteerd http://t.co/gscupQUV via @TabletGuide_NL #android #tablet #google" +"google","irrelevant","126521734076698625","Wed Oct 19 04:55:06 +0000 2011","Bon. Reste plus qu'à voir ce que ça va donner IRL. #Google #NexusPrime #Android #ICS" +"google","irrelevant","126521694583132160","Wed Oct 19 04:54:57 +0000 2011","Ðовите #Google Presentations http://t.co/lJ1PaxvA #googledocs" +"google","irrelevant","126521635464425472","Wed Oct 19 04:54:43 +0000 2011","Todavía existe #Google+ ?" +"google","irrelevant","126521523220652032","Wed Oct 19 04:54:16 +0000 2011","ДоÑтупна Ð¾Ñ„Ð¸Ñ†Ð¸Ð°Ð»ÑŒÐ½Ð°Ñ Google-Ñтраница Galaxy Nexus http://t.co/99TAk4lC #nexus #google #galaxy" +"google","irrelevant","126521505097068544","Wed Oct 19 04:54:12 +0000 2011","#GalaxyNexus | @xataka http://t.co/p2Tu2kBm <-El nuevo teléfono de #Samsung y #Google, EL perfecto rival para el #iPhone4S con su #Android 4" +"google","irrelevant","126521233603960832","Wed Oct 19 04:53:07 +0000 2011","How an email #hacker ruined my life and then tried to sell it back to me http://t.co/ixquAT2l via @guardian #privacy #fb #gmail #google" +"google","irrelevant","126520774025678848","Wed Oct 19 04:51:17 +0000 2011","Anschauen! Genial! http://t.co/YLVJAzxz #Google #Nexus #Vorstellung" +"google","irrelevant","126520080543649792","Wed Oct 19 04:48:32 +0000 2011","“@rianru: #Google и #Samsung предÑтавили новую верÑию операционной ÑиÑтемы - Android 4.0 Ice Cream Sandwich http://t.co/4XkmSFs0†Еще борщ!" +"google","irrelevant","126519943234732032","Wed Oct 19 04:47:59 +0000 2011","#Google eBook: The way we shop is changing and marketing strategies are simply not keeping pace. http://t.co/vJBBjWhE" +"google","irrelevant","126519837173358592","Wed Oct 19 04:47:34 +0000 2011","Para todos los androides, he aquí la nueva bestia de #Google! El #GalaxyNexus http://t.co/oUTER1n4" +"google","irrelevant","126519715085549568","Wed Oct 19 04:47:05 +0000 2011","Google y Samsung presentan oficialmente el Galaxy Nexus http://t.co/NKoXMfOi vía @xatakamovil #Google #Samsung #Galaxy #Android" +"google","irrelevant","126519472172445696","Wed Oct 19 04:46:07 +0000 2011","ドコモã€Android 4.0採用ã®ã€ŒGALAXY Nexusã€ã‚’11月発売 - AV Watch http://t.co/MzTgzn1u #google" +"google","irrelevant","126519390245109760","Wed Oct 19 04:45:47 +0000 2011","Lucha de Titanes: Las tres mejores alternativas a #Google Earth http://t.co/5X5pVRHr #Linux via @gabriel_j_gomez:" +"google","irrelevant","126519359714766848","Wed Oct 19 04:45:40 +0000 2011","RT @wireditalia: #Twitter 250milioni di cinguettii al giorno e un valore di 8miliardi di $. Tempi duri per #Facebook e #Google+? http:// ..." +"google","irrelevant","126518917635125248","Wed Oct 19 04:43:55 +0000 2011","#Google revela o Anddroi 4.0 Ice Cream Sandwich: O Google revelou hoje durante um evento em Hong Kong a nova versão…" +"google","irrelevant","126518845983830016","Wed Oct 19 04:43:38 +0000 2011","#Google и #Samsung предÑтавили новую верÑию операционной ÑиÑтемы - Android 4.0 Ice Cream Sandwich http://t.co/XhbCkQ9t" +"google","irrelevant","126518328373153792","Wed Oct 19 04:41:34 +0000 2011","RT @funkfish_2: so. ja. Ich will das galaxy nexus. sofort! #google #samsung" +"google","irrelevant","126517747894075392","Wed Oct 19 04:39:16 +0000 2011","Lucha de Titanes: Las tres mejores alternativas a #Google Earth http://t.co/Jy0Ipy9A #Linux" +"google","irrelevant","126517575336214529","Wed Oct 19 04:38:35 +0000 2011","AsiaClassifiedToday FOREX-Euro recovers after Spain but downside eyed - Reuters: Globe and Ma... http://t.co/o78ADSYW #asia #google #biz" +"google","irrelevant","126517570806358018","Wed Oct 19 04:38:34 +0000 2011","AsiaClassifiedToday Apple blames iPhone rumours for share dive - TVNZ: Globe and MailApple bl... http://t.co/hlxJVMUr #asia #google #biz" +"google","irrelevant","126517567694180352","Wed Oct 19 04:38:33 +0000 2011","AsiaClassifiedToday Temasek to raise up to $800 million with bond offer - Straits Times: Tele... http://t.co/IaTj4kb9 #asia #google #biz" +"google","irrelevant","126517492049915904","Wed Oct 19 04:38:15 +0000 2011","#Mango shows #Microsoft still has the taste for smartphone success #Apple #Google http://t.co/C4fy2ffj" +"google","irrelevant","126517413788401664","Wed Oct 19 04:37:56 +0000 2011","RT @bytegadget: Video oficial del #GalaxyNexus y especificaciones – El nuevo #smartphone insignia de #Google y… http://t.co/Rvn0oFWy" +"google","irrelevant","126517183139430400","Wed Oct 19 04:37:01 +0000 2011","Galaxy Nexus http://t.co/xDxcwSAR #android #google No sé yo semejante pantalla..." +"google","irrelevant","126516892029558784","Wed Oct 19 04:35:52 +0000 2011","El nuevo Samsung Galaxy NEXUS, tiene Desbloqueo del teléfono por reconocimiento facial!!!! | Impresionante #IceCreamSandwich #google" +"google","irrelevant","126516806360899584","Wed Oct 19 04:35:31 +0000 2011","AsiaClassifiedToday: Canada Wheat Monopoly, Software Piracy, Swaps Rule: Compliance - Bloombe... http://t.co/2PylmiMs #asia #google #ads" +"google","irrelevant","126516804108562433","Wed Oct 19 04:35:31 +0000 2011","AsiaClassifiedToday: Adopting solar leasing programmes in Singapore, by Rachel Wong - eco-bus... http://t.co/l1V1gUIG #asia #google #ads" +"google","irrelevant","126516802011402240","Wed Oct 19 04:35:30 +0000 2011","AsiaClassifiedToday: Temasek to raise up to $800 million with bond offer - Straits Times: Tel... http://t.co/bdBPJDh2 #asia #google #ads" +"google","irrelevant","126516602316406784","Wed Oct 19 04:34:43 +0000 2011","@SERGIOapple incluso si usas #Google #Chrome hay una App. Y al igual es mas practica pero es para el navegador" +"google","irrelevant","126516523408949248","Wed Oct 19 04:34:24 +0000 2011","#SEO Video:Specifying an image's license using RDFa. http://t.co/aO99FTLV #google #seo #images #rdfa" +"google","irrelevant","126516376566366208","Wed Oct 19 04:33:49 +0000 2011","solo #google puede ponerle a un OS #IceCreamSandwich" +"google","irrelevant","126516048961863681","Wed Oct 19 04:32:31 +0000 2011","Las mejores aplicaciones para hablar gratis por el móvil - http://t.co/LN0pChe3 http://t.co/iUS8431m #google" +"google","irrelevant","126516048626335744","Wed Oct 19 04:32:31 +0000 2011","Wordt er al een beetje gebruik gemaakt van #google+ #durtevragen" +"google","irrelevant","126515979059609603","Wed Oct 19 04:32:14 +0000 2011","RT @rianru: #Google и #Samsung предÑтавили новую верÑию операционной ÑиÑтемы - Android 4.0 Ice Cream Sandwich http://t.co/5baK0OdJ" +"google","irrelevant","126515953516298241","Wed Oct 19 04:32:08 +0000 2011","so. ja. Ich will das galaxy nexus. sofort! #google #samsung" +"google","irrelevant","126515696497725440","Wed Oct 19 04:31:07 +0000 2011","Lecture Notes in Financial Economics pdf ebook: http://t.co/Ef8j77uj #google" +"google","irrelevant","126515695352688640","Wed Oct 19 04:31:06 +0000 2011","Surfactants pdf ebook: http://t.co/mnPp2jjm #google" +"google","irrelevant","126515693993738240","Wed Oct 19 04:31:06 +0000 2011","Surfactants pdf ebook: http://t.co/JlzvsR9f #google" +"google","irrelevant","126515640889655296","Wed Oct 19 04:30:53 +0000 2011","Newt Gingrich: #NewtGingrich #Newt #Gingrich http://t.co/bZQS2pV3 #google #youtube" +"google","irrelevant","126515639362920449","Wed Oct 19 04:30:53 +0000 2011","Bob Harper: #BobHarper #Bob #Harper http://t.co/hnAMTNzh #google #youtube" +"google","irrelevant","126515633314729984","Wed Oct 19 04:30:52 +0000 2011","Rocksmith: #Rocksmith #Rocksmith http://t.co/JNS1oEZN #google #youtube" +"google","irrelevant","126515631980937216","Wed Oct 19 04:30:51 +0000 2011","Mukesh Ambani: #MukeshAmbani #Mukesh #Ambani http://t.co/uMBGXvQI #google #youtube" +"google","irrelevant","126515608887115776","Wed Oct 19 04:30:46 +0000 2011","ملخص مؤتمر سامسونج وقوقل عن نظام اندرويد 4 الجديد http://t.co/enpWtGb4 #android #android_ar #google #samsung" +"google","irrelevant","126515498467860480","Wed Oct 19 04:30:20 +0000 2011","Galaxy Nexus anunciado oficialmente: pantalla de 4,65 pulgadas y Ice Cream Sandwich http://t.co/gDETlxsN #galaxy_nexus #GalaxyNexus #google" +"google","irrelevant","126515353110065152","Wed Oct 19 04:29:45 +0000 2011","RT @AL_HASHEMY: يبدو ان Ø·ÙØ±Ø© الاجهزة الالكترونية القادمة ستكون بقيادة موتورولا ،، لاسيم بعد استحواذ قوقل عليها. + +#google #motorola #moto" +"google","irrelevant","126515159995912193","Wed Oct 19 04:28:59 +0000 2011","اعر٠الكثير عن نظام ايسكريم ساندويتش http://t.co/Fzjd2Zx1 #android #google" +"google","irrelevant","126514888951595008","Wed Oct 19 04:27:54 +0000 2011","Уже 14 верÑÐ¸Ñ Ð¿Ð»Ð°Ð³Ð¸Ð½Ð° #Android Ð´Ð»Ñ #Eclipse вышла, а подгружать апдейты Ð´Ð»Ñ sdk в фоне они так и не научилиÑÑŒ. #Google вперде!!!" +"google","irrelevant","126514866059100160","Wed Oct 19 04:27:49 +0000 2011","RT @RaheemHBBTI: RT @capoKenn: i like big glutus & i cannot lie +#GOOGLE<sandra levels lol>" +"google","irrelevant","126514704347693056","Wed Oct 19 04:27:10 +0000 2011","Mientras que hoy #RIM publicó su 'fabuloso' #BBX, #Google presentó #Android 4.0 y todos se fueron al diablo :D." +"google","irrelevant","126514555743518720","Wed Oct 19 04:26:35 +0000 2011","#Publicidad Galaxy Nexus es la próxima generación de dispositivos Nexus co-desarrollado por #Samsung y #Google l http://t.co/yVWrw98h" +"google","irrelevant","126514131544178688","Wed Oct 19 04:24:54 +0000 2011","Prenez un Smartphone Galaxy, ajouter y du software a la sauce Google et vous avez un tout beau Galaxy Nexus. #google#iphone" +"google","irrelevant","126513961553244160","Wed Oct 19 04:24:13 +0000 2011","Fine tune your #SEO strategy with the #Google Panda 2.5.2 update. Find out how it can affect your business rankings http://t.co/TeRUK0bp" +"google","irrelevant","126513706912841729","Wed Oct 19 04:23:12 +0000 2011","RT @esmandau: BREAKING NEWS: Samsung Filtra Accidentalmente la foto del Galaxy Nexus http://t.co/vEgkjIfQ #Android #GalaxyNexus #Google" +"google","irrelevant","126513429409312768","Wed Oct 19 04:22:06 +0000 2011","#Samsung y #Google presentan el Samsung Galaxy #Nexus, oficialmente. http://t.co/DwQyyu5v #gadgets #smartphones" +"google","irrelevant","126513410128089088","Wed Oct 19 04:22:02 +0000 2011","#Video oficial del #Samsung Galaxy Nexus http://t.co/IMid6Bem #gadgets #smartphones #galaxynexus #google #promocion" +"google","irrelevant","126513333191979008","Wed Oct 19 04:21:43 +0000 2011","Desbloquear el telefono con reconocimiento facial vs la habilidad de interactuar con tu voz #Apple #vs #Google" +"google","irrelevant","126512924385738752","Wed Oct 19 04:20:06 +0000 2011","Cupom Google AdWords + +Promo Starter Kit da LogosBr = Divulgação Eficaz e de Graça! +Confira: +http://t.co/kf9supYG +#Google #AdWords #LogosBr" +"google","irrelevant","126512627110252544","Wed Oct 19 04:18:55 +0000 2011","#Google Google stellt das Galaxy Nexus vor (GoogleWatchBlog): Heute Nacht hat Google aus Hongkong das nächste Ne... http://t.co/vkyYY2RO" +"google","irrelevant","126512625713545216","Wed Oct 19 04:18:55 +0000 2011","#Google Google: Galaxy Nexus Seite ist online + Teaser-Video (Caschys Blog): 05:30 Uhr und die Galaxy Nexus Seit... http://t.co/kn18pgCD" +"google","irrelevant","126512053660827648","Wed Oct 19 04:16:38 +0000 2011","グーグルã¨éŸ“国ã®ã‚µãƒ ã‚¹ãƒ³é›»å­ã€ã‚¹ãƒžãƒ¼ãƒˆãƒ•ã‚©ãƒ³ã®æ–°æ©Ÿç¨®ã€Œã‚®ãƒ£ãƒ©ã‚¯ã‚·ãƒ¼ãƒ»ãƒã‚¯ã‚µã‚¹ã€ã‚’11月ã«ä¸–界ã§é †æ¬¡ç™ºå£²ã™ã‚‹ã¨ç™ºè¡¨ã€‚ï¼”ãƒ»ï¼–ï¼•åž‹ã®æœ‰æ©Ÿï¼¥ï¼¬ç”»é¢ã‚’採用。日本ã§ã¯ï¼®ï¼´ï¼´ãƒ‰ã‚³ãƒ¢ãŒç™ºå£²ã€€#keizai #sumaho #google  http://t.co/h822eCym" +"google","irrelevant","126511257170886656","Wed Oct 19 04:13:28 +0000 2011","Nexus S รอà¸à¸´à¸™à¹„อศà¸à¸£à¸µà¸¡à¹à¸‹à¸™à¸§à¸´à¸Š #google #android #ics" +"google","irrelevant","126510558764736513","Wed Oct 19 04:10:42 +0000 2011","Para los que tenemos un #GalaxySII, no se preocupen... está 99% confirmado que tendremos #Android 4.0 en este GRANDIOSO terminal. #Google" +"google","irrelevant","126510399884496897","Wed Oct 19 04:10:04 +0000 2011","Galaxy Nexus: El nuevo teléfono de Google y Samsung #Android #Samsung #Google http://t.co/ak9xKDXT #followme" +"google","irrelevant","126509931657564160","Wed Oct 19 04:08:12 +0000 2011","Uiiii Yo Quisiera Ser Tuu #GOOGLE" +"google","irrelevant","126509298279907328","Wed Oct 19 04:05:41 +0000 2011","RT @capoKenn: i like big glutus & i cannot lie +#GOOGLE<sandra levels lol>" +"google","irrelevant","126509273994891264","Wed Oct 19 04:05:36 +0000 2011","Conoce al nuevo #Samsung #GalaxyNexus con #Android 4.0 #IceCreamSandwich - http://t.co/PdGz1l51 #Google #Smartphones" +"google","irrelevant","126509203148902400","Wed Oct 19 04:05:19 +0000 2011","Auch Google gefällt das Panda 2.5 Update nicht – Analyse eines Rollbacks « Searchmetrics SEO Blog http://t.co/2LvpBK0J #google #panda #seo" +"google","irrelevant","126508947980029952","Wed Oct 19 04:04:18 +0000 2011","Für alle Agenturen, die #Google #AdWords für ihre Kunden betreuen ... #Google #Engage - die Agenturen-Plattform: http://t.co/7RGu5aSl" +"google","irrelevant","126508667007803392","Wed Oct 19 04:03:11 +0000 2011","#Google+ llega a 40 millones de usuarios aunque su problema sigue siendo la poca permanencia en esta #RedSocial http://t.co/ntS7GXqV" +"google","irrelevant","126508103368835072","Wed Oct 19 04:00:56 +0000 2011","Como es posible que Motorola siga bloqueando los bootloaders de sus telefono. Merecen quebrar, y con ellos #google. #MotoFAIL" +"google","irrelevant","126507946707398656","Wed Oct 19 04:00:19 +0000 2011","Android 4.0 Ice Cream Sandwich ya es oficial http://t.co/uZ7qqU1J #android #google #androidve #tecnologia #vendroid #usoandroid" +"google","irrelevant","126507876469583872","Wed Oct 19 04:00:02 +0000 2011","GALAXY NEXUS SC-04D | NTTドコモ 2011-2012冬春モデル http://t.co/hOqa4eRq #android #google #docomo" +"google","irrelevant","126507802251370497","Wed Oct 19 03:59:45 +0000 2011","@julioandres26 sí, es como el #google de la piratería! jajaja" +"google","irrelevant","126507794777128964","Wed Oct 19 03:59:43 +0000 2011","看見 #Android 4.0 é…åˆ #NFC 加入的 Android #Beam 功能,令我想起åå¹´å‰ä½¿ç”¨ #Palm ç¶“å¸¸è·Ÿæœ‹å‹ Beam 交æ›è³‡æ–™çš„æ—¥å­ã€‚#Google #Samsung #Galaxy #Nexus" +"google","irrelevant","126507720546328576","Wed Oct 19 03:59:25 +0000 2011","Aparece nuevo video del Galaxy Nexus http://t.co/mhOcZuIm #android #google #androidve #tecnologia #vendroid #usoandroid" +"google","irrelevant","126507456649101314","Wed Oct 19 03:58:22 +0000 2011","i like big glutus & i cannot lie +#GOOGLE" +"google","irrelevant","126507441985826816","Wed Oct 19 03:58:19 +0000 2011","#Google и #Samsung предÑтавили новый Ñмартфон #Galaxy #Nexus на базе новой ОС #Android Ice Cream Sandwich 4.0 http://t.co/yNg45IDC" +"google","irrelevant","126507086522748929","Wed Oct 19 03:56:54 +0000 2011","#Google sorprende de nuevo http://t.co/G7iLQLME #basedenegocios #business" +"google","irrelevant","126507016368816129","Wed Oct 19 03:56:37 +0000 2011","Soy FAN de #Google http://t.co/bGBhZROX http://t.co/iLNouVGK http://t.co/RfFixdi4 http://t.co/pdRns1pK" +"google","irrelevant","126506951063511041","Wed Oct 19 03:56:22 +0000 2011","Google Fr #Google #earth #pro: Avez-vous déjà essayé Google Earth Pro? Google Earth dispose… http://t.co/Ezqlmq4T" +"google","irrelevant","126506683131379713","Wed Oct 19 03:55:18 +0000 2011","ابرز مميزات Android 4.0 +موجوده بهذا Ø§Ù„ÙØ¯ÙŠÙˆ +http://t.co/ZE4BZfph +#google #android @Abdullah347" +"google","irrelevant","126506375886016513","Wed Oct 19 03:54:05 +0000 2011","نظام جديد .. Ùˆ جهاز جديد شكراً جزيلاً #samsung #google #android" +"google","irrelevant","126506159938088961","Wed Oct 19 03:53:13 +0000 2011","#AndroidBeam: Comparte con otro #Android lo que ves solo con ponerlo juntos. (Por ahora sólo en el #NexusPrime). #NFC #Google" +"google","irrelevant","126506135082631168","Wed Oct 19 03:53:07 +0000 2011","siento mas interesante el #google+ que el #face xD" +"google","irrelevant","126505911584948225","Wed Oct 19 03:52:14 +0000 2011","ايسكريم ساندويش، عسل، زنجبيل .. مشكلة من كثر المسميات احسهم مسوين مقادير مب انظمة !! 😠+ +#google" +"google","irrelevant","126505668181098496","Wed Oct 19 03:51:16 +0000 2011","Ça fait des gato : Android Ice Cream Sandwich enfin dévoilé ! http://t.co/ernNQ40w #Android #Galaxy_Nexus #Google" +"google","irrelevant","126505497888161792","Wed Oct 19 03:50:35 +0000 2011","продвижение Ñайта ÑтатьÑми http://t.co/ytNbL8sF CÐµÑ€Ð²Ð¸Ñ Ñ€ÐµÐ³Ð¸Ñтрации в каталогах Ñтатей #seo #linkbuilding #backlinks #addurl #google" +"google","irrelevant","126505070098518016","Wed Oct 19 03:48:53 +0000 2011","#Google #Android os4.0発表会内容!! http://t.co/qseGK2Zy" +"google","irrelevant","126504896307539968","Wed Oct 19 03:48:12 +0000 2011","Welchen Vorteil bietet G+ für #Facebook? | basic http://t.co/DLvTboeK #Google" +"google","irrelevant","126504863369670657","Wed Oct 19 03:48:04 +0000 2011","#android 4.0 нужен #google чтобы объединить телефонную и планшетную верÑии в одну." +"google","irrelevant","126504793039572992","Wed Oct 19 03:47:47 +0000 2011","Los fabricantes de móviles y su integración con las redes sociales: Sony Ericson apunta a #Facebook, Apple a #Twitter y Samsung a #Google+" +"google","irrelevant","126504734864576512","Wed Oct 19 03:47:33 +0000 2011","Andy Rubin de #Google develó oficialmente el Samsung #GalaxyNexus, 1er smartphone con #Android4.0 Ice Cream Sandwich http://t.co/dKGCvjsG""" +"google","irrelevant","126504701142376448","Wed Oct 19 03:47:25 +0000 2011","RT @Eleconomistanet: Impulsor de #Facebook Sean #Parker resta importancia a la amenaza de #Google http://t.co/EnQtXsfv" +"google","irrelevant","126504580145094656","Wed Oct 19 03:46:56 +0000 2011","RT @DanyFajardo: @DanielaS04 :) eso!!! #Google+" +"google","irrelevant","126504516479754241","Wed Oct 19 03:46:41 +0000 2011","RT @centroandroid: #Google presenta #Android Beam una nueva forma de compartir por NFC http://t.co/lofb8HCJ" +"google","irrelevant","126504512960724992","Wed Oct 19 03:46:40 +0000 2011","@DanielaS04 :) eso!!! #Google+" +"google","irrelevant","126504398925996032","Wed Oct 19 03:46:13 +0000 2011","@DanyFajardo Listo Dany gracias =) ... #Google+" +"google","irrelevant","126504335734607873","Wed Oct 19 03:45:58 +0000 2011","RT @laiswhisky: @Peedro_Guiih gato intenda a nat é a metamoforse sorridente. . . . . .ela nao intende ela joga tudo no #google" +"google","irrelevant","126504297763581952","Wed Oct 19 03:45:49 +0000 2011","#TweetDeck y #Google+ no están hechos para convivir en armonía.. Las notificaciones aparecen del mismo lado.. =/ Es ver esas o las de face.." +"google","irrelevant","126504285436514304","Wed Oct 19 03:45:46 +0000 2011","RT @takashiruru: android beam ã£ã¦ä½•ã ã‚ã†ã€€ #Android4.0 #Google" +"google","irrelevant","126504090053259265","Wed Oct 19 03:45:00 +0000 2011","@Peedro_Guiih gato intenda a nat é a metamoforse sorridente. . . . . .ela nao intende ela joga tudo no #google" +"google","irrelevant","126504049393672192","Wed Oct 19 03:44:50 +0000 2011","#Google y #Samsung lanzan oficialmente el #GalaxyNexus con #Android #IceCreamSandwich. http://t.co/N0yIucDw" +"google","irrelevant","126503517149069313","Wed Oct 19 03:42:43 +0000 2011","「docomo NEXT series GALAXY NEXUS SC-04Dã€ã‚’開発 +http://t.co/B3t2y3vm +#android #google #docomo #samsung" +"google","irrelevant","126503460836343810","Wed Oct 19 03:42:30 +0000 2011","#Google presenta #Android Beam una nueva forma de compartir por NFC http://t.co/lofb8HCJ" +"google","irrelevant","126503277117452288","Wed Oct 19 03:41:46 +0000 2011","@ViviStephiee buscalo en #google..persona qe se comen a los conejos D:" +"google","irrelevant","126502927337660416","Wed Oct 19 03:40:22 +0000 2011","Le Galaxy Nexus officialisé #Samsung #Google #GalaxyNexus #Icecream - http://t.co/YT2RWbgn via @Julbut @TweeqFr @Thibaudd" +"google","irrelevant","126502611657564160","Wed Oct 19 03:39:07 +0000 2011","Camarada @michelenlared, mándale a @mvdan los enlaces de los dólares de #Google que financian a la #FSF porque no quiere creerte... xD" +"google","irrelevant","126502150472871937","Wed Oct 19 03:37:17 +0000 2011","RT @tecnoclips: #Google lanza #Android 4.0 http://t.co/NZ494v4h también conocido como Ice Cream Sandwich y Samsung lanza el Galaxy #Nexus" +"google","irrelevant","126502129060950016","Wed Oct 19 03:37:12 +0000 2011","RT @dulmandakh: #Google буузаа түүхийрүүлчихÑÑн, аврах аргагүй учир удахгүй хаана. би Ñ‚ÑрнÑÑÑ Ð½ÑŒ өмнө хаачихлаа." +"google","irrelevant","126501952317165568","Wed Oct 19 03:36:30 +0000 2011","продвижение ÑтатьÑми http://t.co/zZ8PjWwX CÐµÑ€Ð²Ð¸Ñ Ñ€ÐµÐ³Ð¸Ñтрации в каталогах Ñтатей #seo #linkbuilding #backlinks #addurl #bookmarks #google" +"google","irrelevant","126501402397782016","Wed Oct 19 03:34:19 +0000 2011","閉鎖ã˜ã‚ƒãªãã¦ã€ãã£ãりストリームã«å映ã§ãるよã†ã«ã—ã¦ãれるã¨ã‚りãŒãŸã„ã‚“ã ã‘ã©ãªã€œã€‚>> グーグルã€ã€ŒGoogle Buzzã€ã‚’閉鎖--「Google+ã€ã¸ã®å®Œå…¨ http://t.co/mOfsDfFq #google #buzz" +"google","irrelevant","126501224148242432","Wed Oct 19 03:33:36 +0000 2011","@Natalysanchezs según se un sentimiento #google" +"google","irrelevant","126501185321566208","Wed Oct 19 03:33:27 +0000 2011","@alanlepo..IBM Connections light years ahead of other vendors in #analytics http://t.co/jg24duIa #socialbiz #Google+" +"google","irrelevant","126501161502126080","Wed Oct 19 03:33:21 +0000 2011","@iPhonologo iOS exclusivo, desde que no logre pasar a la segunda ronda de examenes para trabajar en #Google me cambie al mundo Mac" +"google","irrelevant","126501085794930688","Wed Oct 19 03:33:03 +0000 2011","@JosialBass Dios mio !!! #Android no tiene #Limites!! RT@JosialBass: El nuevo #Android de #Google: #GalaxyNexus 4.0... http://t.co/fxqXDQb8" +"google","irrelevant","126500586681143296","Wed Oct 19 03:31:04 +0000 2011","A espera acabou. #Samsung, parceria com o #Google, o novo GalaxyNexus nesta terça-feira (19) em HongKong. O Android 4.0 Ice Cream Sandwich" +"google","irrelevant","126500532671086592","Wed Oct 19 03:30:51 +0000 2011","Hoy #Foursquare y #Google+ me patearon las pelotas!" +"google","irrelevant","126500515872915457","Wed Oct 19 03:30:47 +0000 2011","@iPhonologo si, vi el evento, pero si hay algo en lo que #google es bueno es en reportes .... analytics es genial" +"google","irrelevant","126500195570683905","Wed Oct 19 03:29:31 +0000 2011","@CarlosTolosa10 jajaja buscalo en #google y ve como esta jajaja" +"google","irrelevant","126500127354535937","Wed Oct 19 03:29:15 +0000 2011","#Google No se que aria sin ti :$" +"google","irrelevant","126500080659341315","Wed Oct 19 03:29:04 +0000 2011","#google you do have a journ degree correct? RT @SZimms: Who know how to format an annotated bibliography in APA style?" +"google","irrelevant","126500021863579648","Wed Oct 19 03:28:50 +0000 2011","Que mierda le pasa a #FOTA que no me quiere traer #IceCreamSandwich? #Google #Android @SamsungArg #GalaxyS2 ;-)" +"google","irrelevant","126500018000633858","Wed Oct 19 03:28:49 +0000 2011","#Samsung #NexusPrime esta wwwaaooooo y con #IceCreamSandwich superior @samsungpty buena presentación #Android #Google" +"google","irrelevant","126499879638925312","Wed Oct 19 03:28:16 +0000 2011","#Google lanza #Android 4.0 http://t.co/NZ494v4h también conocido como Ice Cream Sandwich y Samsung lanza el Galaxy #Nexus" +"google","irrelevant","126499873821437952","Wed Oct 19 03:28:14 +0000 2011","6 positive Learnings aus dem Panda-Update ... http://t.co/JEgkwrDR #panda #seo #google" +"google","irrelevant","126499835481292801","Wed Oct 19 03:28:05 +0000 2011","El nuevo #Android de #Google: #GalaxyNexus con #IceCreamSandwich 4.0... http://t.co/FITenr0v" +"google","irrelevant","126499741176561664","Wed Oct 19 03:27:43 +0000 2011","#android El nuevo Android de Google: Galaxy Nexus con Ice Cream Sandwich 4.0: Menuda madrugada del 19 de... http://t.co/wjCcDIAb #google" +"google","irrelevant","126499498133426176","Wed Oct 19 03:26:45 +0000 2011","RT @rianru: #Google предÑтавит новую верÑию #Android 4.0 http://t.co/mf8XE5Aw" +"google","irrelevant","126499369905160192","Wed Oct 19 03:26:14 +0000 2011","Galaxy Nexus: El nuevo teléfono de Google y Samsung http://t.co/laKDByOT #nexus #google #android" +"google","irrelevant","126499359528468480","Wed Oct 19 03:26:12 +0000 2011","Pearson и Google запуÑкают открытую ÑиÑтему диÑтанционного Ð¾Ð±ÑƒÑ‡ÐµÐ½Ð¸Ñ http://t.co/YglYrzWj #google" +"google","irrelevant","126499266851119104","Wed Oct 19 03:25:50 +0000 2011","TIPP: Kostenlose Google +1 und FB-Likes - Free Google +1 and FB-Likes - http://t.co/eNfReVk3 #GooglePlus #Google+1 #Facebook #Likes" +"google","irrelevant","126499256503762944","Wed Oct 19 03:25:47 +0000 2011","Cómo funciona #Google AdWords http://t.co/8kuFtEVY vía @JuanPittau" +"google","irrelevant","126499194247725056","Wed Oct 19 03:25:32 +0000 2011","ICS plutôt bien dans l'ensemble, perso j'aime beaucoup l'interface des contact // epic fail reconnaissance faciale ! #google #android" +"google","irrelevant","126499065423859712","Wed Oct 19 03:25:02 +0000 2011","Que es #Android Beam? Una nueva forma que presenta #Google para compartir contenido, solo juntar los teléfonos y pasar la información :)" +"google","irrelevant","126498881142927360","Wed Oct 19 03:24:18 +0000 2011","@iley_little we'lll figure it out tm in gym #google" +"google","irrelevant","126498880048214016","Wed Oct 19 03:24:17 +0000 2011","RT @QiiBO: #GalaxyNexus es presentado oficialmente. http://t.co/dwUWIYWC | Conoce los detalles del nuevo teléfono de #Google" +"google","irrelevant","126498875405115392","Wed Oct 19 03:24:16 +0000 2011","#Google La recherche cryptée étendue à tous les utilisateurs connectés à un service Google via @commentcamarche http://t.co/5d4YG4pa" +"google","irrelevant","126498743993376768","Wed Oct 19 03:23:45 +0000 2011","RT @FTTank: Voici un très bon résumé de l'annonce du #GalaxyNexus de #Google http://t.co/O0bh5nBp" +"google","irrelevant","126498725471334401","Wed Oct 19 03:23:41 +0000 2011","@nestorrom77 majeeee yo cuando reuna bastante pisto me voy a comprar los que usa @davidguetta puuuuuta son tan vergones buscalos en #google" +"google","irrelevant","126498584110710784","Wed Oct 19 03:23:07 +0000 2011","#GalaxyNexus es presentado oficialmente. http://t.co/dwUWIYWC | Conoce los detalles del nuevo teléfono de #Google" +"google","irrelevant","126498451289673728","Wed Oct 19 03:22:35 +0000 2011","@ksidd94 lol it's #tocomplicated to explain on here lol, #lookitup #google #twitterrookie #lmao" +"google","irrelevant","126498295219621888","Wed Oct 19 03:21:58 +0000 2011","RT vers @FuturKing > @FTTank Voici un très bon résumé de l'annonce du #GalaxyNexus de #Google http://t.co/ueCURyDX" +"google","irrelevant","126498266799013888","Wed Oct 19 03:21:51 +0000 2011","@.brown45235: vibro http://t.co/uoIfnklJ #gnu #gome #google #hadopi #health ... http://t.co/V690lfLS" +"google","irrelevant","126497733262585856","Wed Oct 19 03:19:44 +0000 2011","Voici un très bon résumé de l'annonce du #GalaxyNexus de #Google http://t.co/O0bh5nBp" +"google","irrelevant","126497725633150977","Wed Oct 19 03:19:42 +0000 2011","#NuevaEscuela #InternetHighSchoolOfEducation#001InThisWorld con el Director #Google y los mejores profesores #SanWikipedia y #Yahoo!Answers" +"google","irrelevant","126497618258964480","Wed Oct 19 03:19:17 +0000 2011","'Did #Jews...' on a #Google Search: http://t.co/PyoG47fV <<< ""Kill Jesus, Cause 9/11, Vote for Hitler""" +"google","irrelevant","126497409340686336","Wed Oct 19 03:18:27 +0000 2011","Samsung Galaxy Nexus - el nuevo smartphone oficial Google con Android 4.0 ""Ice-Cream Sandwich"" http://t.co/JXyHhfNV #Google #Android" +"google","irrelevant","126497063742603264","Wed Oct 19 03:17:04 +0000 2011","หน้าตาของระบบปฎิบัติà¸à¸²à¸£ Android 4.0 Ice Cream Sandwich ของ #google à¸à¹‡à¸”ูดีเหมือนà¸à¸±à¸™à¸™à¸°" +"google","irrelevant","126497027273129984","Wed Oct 19 03:16:56 +0000 2011","хочу виджет Ð´Ð»Ñ #Google+ на #Android как Ð´Ð»Ñ Ñ‚Ð²Ð¸Ñ‚Ñ‚ÐµÑ€Ð°, чтобы поÑты были видны в виджете." +"google","irrelevant","126496716789792768","Wed Oct 19 03:15:42 +0000 2011","#google เริ่มเปิดต้วระบบปฎิบัติà¸à¸²à¸£ Android 4.0 Ice Cream Sandwich à¹à¸¥à¹‰à¸§" +"google","irrelevant","126496706257895424","Wed Oct 19 03:15:39 +0000 2011","#google è¯ï¼Œä¸«çš„åªæœ‰ä½  #apple 会抄么,俺一样会抄。天下一大抄。" +"google","irrelevant","126496681717014528","Wed Oct 19 03:15:33 +0000 2011","#Google anuncia melhorias ao #PowerPoint do seu Docs http://t.co/4pLGTDgz" +"google","irrelevant","126496237879959553","Wed Oct 19 03:13:47 +0000 2011","necesito encontrar tantas respuestas que no las da ni #google ni #wikipedia entonces donde?" +"google","irrelevant","126496216052801536","Wed Oct 19 03:13:42 +0000 2011","Parece que Google Labs se acaba :( m.fastcompany.com/technology/57403/ #google #fail" +"google","irrelevant","126496203822211072","Wed Oct 19 03:13:39 +0000 2011","Virage Success Story - Harvard Business School pdf ebook: http://t.co/JemEYNDb #google" +"google","irrelevant","126496202693939200","Wed Oct 19 03:13:39 +0000 2011","Virage Success Story - Harvard Business School pdf ebook: http://t.co/dNpFlXxB #google" +"google","irrelevant","126496202509389824","Wed Oct 19 03:13:39 +0000 2011","Putting Leadership Back into Strategy pdf ebook: http://t.co/fGc3ufed #google" +"google","irrelevant","126496200668098561","Wed Oct 19 03:13:39 +0000 2011","Putting Leadership Back into Strategy pdf ebook: http://t.co/Rcu7X2lh #google" +"google","irrelevant","126495762568851456","Wed Oct 19 03:11:54 +0000 2011","android beam ã£ã¦ä½•ã ã‚ã†ã€€ #Android4.0 #Google" +"google","irrelevant","126495739663757313","Wed Oct 19 03:11:49 +0000 2011","i-nexus Named as One of The Telegraph's Top 1000 - Free PR Site (press release) http://t.co/J4AHo3Aa #nexus #one #google" +"google","irrelevant","126495620025417729","Wed Oct 19 03:11:20 +0000 2011","الجهاز الجديد عجيب + +#DriodRazr +#google #motorola #moto" +"google","irrelevant","126495346569392129","Wed Oct 19 03:10:15 +0000 2011","@oriondesarrollo en todas las #redessociales ahora también en #google+ http://t.co/z5oXYc0y" +"google","irrelevant","126495269201264640","Wed Oct 19 03:09:57 +0000 2011","Я немного потрÑÑен :) #google #android" +"google","irrelevant","126495253535531008","Wed Oct 19 03:09:53 +0000 2011","RT @google_topic: Samsungã€Android 4.0æ­è¼‰ã®Googleブランド端末「GALAXY Nexusã€ã‚’発表 - ITmedia http://t.co/t0CrKsoo #google" +"google","irrelevant","126495253321613312","Wed Oct 19 03:09:53 +0000 2011","no dijeron si #ICS estara para el galaxy s1 T_T RT @AndrotekRD: #Pregunta...Pro y Contra del evento de #Google?" +"google","irrelevant","126495251757150208","Wed Oct 19 03:09:52 +0000 2011","#Google y su distorsión de la ##accesibilidad http://t.co/LE6lzlus #fb #android #chromeos #chromevox" +"google","irrelevant","126495232417218560","Wed Oct 19 03:09:48 +0000 2011","Brilhante a apresentação do Android ICS. O Google simplesmente abusou da tecnologia!!! Sambou na cara das agendas !!! #ICS #google #Android" +"google","irrelevant","126495208505479168","Wed Oct 19 03:09:42 +0000 2011","今日発表ã ã£ãŸï¼¾ï¾›ï¼¾ã€€ #Android4.0 #Google" +"google","irrelevant","126495101131309056","Wed Oct 19 03:09:16 +0000 2011","Con algunos ""hiccups"" durante la presetnación de #Android 4.0 pero todo pinta genial. Muy bien ahí #Google #Samsung #GalaxyNexus" +"google","irrelevant","126495097180262400","Wed Oct 19 03:09:15 +0000 2011","Le Galaxy Nexus a l'air bien sympa et #ICS bien repensé. Reste à voir les tests ... #google #android" +"google","irrelevant","126495028548874241","Wed Oct 19 03:08:59 +0000 2011","يبدو ان Ø·ÙØ±Ø© الاجهزة الالكترونية القادمة ستكون بقيادة موتورولا ،، لاسيم بعد استحواذ قوقل عليها. + +#google #motorola #moto" +"google","irrelevant","126495020156063744","Wed Oct 19 03:08:57 +0000 2011","Samsungã€Android 4.0æ­è¼‰ã®Googleブランド端末「GALAXY Nexusã€ã‚’発表 - ITmedia http://t.co/t0CrKsoo #google" +"google","irrelevant","126494978561146881","Wed Oct 19 03:08:47 +0000 2011","RT @interfase: Andy Rubin de #Google develó oficialmente el Samsung #GalaxyNexus, el 1er smartphone con #Android4.0 Ice Cream Sandwich h ..." +"google","irrelevant","126494884726190080","Wed Oct 19 03:08:25 +0000 2011","Se jodió #iOS5 con #Android 4.0 (Ice Cream Sandwich). Bendito seas #Google !!!" +"google","irrelevant","126494808847040513","Wed Oct 19 03:08:07 +0000 2011","@donchele vaya trabaje con #Google mejor jajajajajajajaj" +"google","irrelevant","126494774428565504","Wed Oct 19 03:07:59 +0000 2011","RT @Osama11: RT @SE4mCom: استجابة الشاشة هي 0.1 وهو رقم أكثر من Ù…Ø±ØªÙØ¹ ÙÙŠ هات٠محمول +http://t.co/N2IEuzEN #SE4m #Google #Nexus" +"google","irrelevant","126494752517537792","Wed Oct 19 03:07:53 +0000 2011","@Ladycamia @JewelSantini check out that link sis - some good stuff on #mindfulness #medidtation techniques on #google - jewels swears by it!" +"google","irrelevant","126494681617010689","Wed Oct 19 03:07:36 +0000 2011","definitivamente sigo sin entender un carajo de #Google+" +"google","irrelevant","126494434887090177","Wed Oct 19 03:06:38 +0000 2011","#Android #IceCreamSandwich 4.0 y el Galaxy Nexus, no necesitas nada más... #samsung #google" +"google","irrelevant","126494286316445696","Wed Oct 19 03:06:02 +0000 2011","En estooos ultimooos dias Mi página mas visitadaaa en internet es #Google" +"google","irrelevant","126494260269821952","Wed Oct 19 03:05:56 +0000 2011","Galaxy Nexus: El nuevo teléfono de Google y Samsung - Gizmología http://t.co/yUYG3KF6 #google" +"google","irrelevant","126494247082934272","Wed Oct 19 03:05:53 +0000 2011","Queridos, até amanhã! Até mais! #Google #FAIL #ICS #FAIL #NEXUSPRIME #MAOMENO" +"google","irrelevant","126494176551514112","Wed Oct 19 03:05:36 +0000 2011","@rmeneghelo: ""#Google ainda te ama apesar de tudo IUEHAIUAHEIUHAEIUH =P"" correção: ainda te AMO..." +"google","irrelevant","126494166145437696","Wed Oct 19 03:05:34 +0000 2011","#android #ice #cream #Sandwich #google +текÑÑ‚-Фото на английÑком +http://t.co/UK3A7JWk" +"google","irrelevant","126494156368523267","Wed Oct 19 03:05:31 +0000 2011","Hinweis: Linkbuilding Kurs kostenlos http://t.co/pwldYXVE #google" +"google","irrelevant","126494152375533568","Wed Oct 19 03:05:30 +0000 2011","RT @DominicanDroids: Cuando 2 grandes se unen, #Samsung y #Google... Estas cosas son las que pasan... #GalaxyNexus" +"google","irrelevant","126494104187183104","Wed Oct 19 03:05:19 +0000 2011","#Google ainda te ama apesar de tudo IUEHAIUAHEIUHAEIUH =P" +"google","irrelevant","126494033882259458","Wed Oct 19 03:05:02 +0000 2011","me encanta sacarme dudas existenciales con respecto a la diferencia entre objetivamente y subjetivamente. GRACIAS #GOOGLE !" +"google","irrelevant","126493930794659840","Wed Oct 19 03:04:37 +0000 2011","é¡”èªè­˜ãƒ­ãƒƒã‚¯è§£é™¤å¤±æ•—ã—ã¦ã‚‹ãƒ»ãƒ»ãƒ»ãƒ‡ãƒ¢ã§å¤±æ•—ã—ã¡ã‚ƒã£ã¦ã„ã„ã®ã‹ï¼Ÿ #google" +"google","irrelevant","126493860804308992","Wed Oct 19 03:04:21 +0000 2011","ESA propaganda de #ICSNexus ya me aburrio q me dan ganas de decir q lo unico q aprecio de android es el browser q trae para abrir #Google" +"google","irrelevant","126493833608441856","Wed Oct 19 03:04:14 +0000 2011","RT @juank3946: #IceCreamSandwich presenta #AndroidBeam, una herramienta para transferir archivos con solo juntar 2 teléfonos #Android #G ..." +"google","irrelevant","126493751215525889","Wed Oct 19 03:03:55 +0000 2011","Με συγχισες #google σιχτιÏια! Για αυτο κ ποτε δν θα δεχτω να δουλεψω σε σενα που να χτυπιεσαι :Ρ" +"google","irrelevant","126493741354713088","Wed Oct 19 03:03:52 +0000 2011","interface a voir en réel pour juger, sinon ice cream sandwich est simplement monstrueux !! #nexusprime #google #ICS" +"google","irrelevant","126493715933052928","Wed Oct 19 03:03:46 +0000 2011","RT @gattenist: サムスンã€Googleã¨å…±åŒã§Googleリファレンス機「Galaxy Nexusã€ã‚’æ­£å¼ç™ºè¡¨ã€11月ã«ä¸–界å„地ã§ç™ºå£²ã€€http://t.co/zwSUFuez #androidjp #samsung #google #galaxynexus" +"google","irrelevant","126493543249358848","Wed Oct 19 03:03:05 +0000 2011","#Google When you perform texas hold em, do you have to use the 2 cards you are dealt? go through on?:... http://t.co/IR9F1IAb #wikileaks" +"google","irrelevant","126493540053303296","Wed Oct 19 03:03:04 +0000 2011","#Google Sit and Go Center Method For Profitable Sit-N-Go’s on PokerStars: http://t.co/6PwI0rjZ -Co... http://t.co/JCqPi1hS #wikileaks" +"google","irrelevant","126493537540907008","Wed Oct 19 03:03:04 +0000 2011","#Google Newest Texas Holdem Zynga Poker Hack Cheat Update 18 October 2011.flv: Download Hyperlink: ur... http://t.co/oEXPDnoT #wikileaks" +"google","irrelevant","126493534336458752","Wed Oct 19 03:03:03 +0000 2011","#Google The Annie Duke Texas Hold-Em Poker Tournament to Gain Homeless Youth And Domestic Violence Vi... http://t.co/wSGk5WAj #wikileaks" +"google","irrelevant","126493525578747905","Wed Oct 19 03:03:01 +0000 2011","#Google How to Play Texas Holdem Poker for Newbies : Texas Hold’em Poker: The River: Discover poker t... http://t.co/9E7WLKvG #wikileaks" +"google","irrelevant","126493517622149121","Wed Oct 19 03:02:59 +0000 2011","iOS kan slänga sig i väggen. #ICS levererar! #Google" +"google","irrelevant","126493505047629824","Wed Oct 19 03:02:56 +0000 2011","So viel zum Thema Retina Display :D Respekt an #Google und #Samsung, Super Amoled HD in 1280x720 auf einem Phone ist krass :D #GalaxyNexus" +"google","irrelevant","126493282355261440","Wed Oct 19 03:02:03 +0000 2011","#Galaxy #Nexus líbí, i novinky v #ICS. Najít truhlu se zlaťáky, je můj :D Snad jeÅ¡tÄ› nezapomenou na Nexus One. #Google" +"google","irrelevant","126493154219266048","Wed Oct 19 03:01:32 +0000 2011","#Google Ab jetzt Live von der Google und Samsung Pressekonferenz zu Android 4.0 und dem Galaxy Nexus (NewGadgets... http://t.co/tLMxcv6L" +"google","irrelevant","126493144824020993","Wed Oct 19 03:01:30 +0000 2011","La presentazione di Ice Cream Sandwich è appena finita, la scimmia ha già cominciato a urlare, lo voglio! :D #Google #ICS #GalaxyNexus" +"google","irrelevant","126493116554424320","Wed Oct 19 03:01:23 +0000 2011","Hé bien c'était cool d'attendre pour voir la conférence :-), j'ai envie du #nexus #prime de #google mnt, et jouer avec #ICM #android 4.0" +"google","irrelevant","126493008244912128","Wed Oct 19 03:00:57 +0000 2011","#Pregunta...Pro y Contra del evento de #Google?" +"google","irrelevant","126492972735922177","Wed Oct 19 03:00:49 +0000 2011","楽ã—ã‹ã£ãŸï½žã€€#android #google" +"google","irrelevant","126492905476067328","Wed Oct 19 03:00:33 +0000 2011","RT @joselopes_2001: Parabéns, #Google. Vocês humilharam o #iOS! #Android 4.0" +"google","irrelevant","126492820130373632","Wed Oct 19 03:00:13 +0000 2011","Ñкромненько, быÑтренько без пафоÑа прошла Ð¿Ñ€ÐµÐ·ÐµÐ½Ñ‚Ð°Ñ†Ð¸Ñ #google и #samsung #android" +"google","irrelevant","126492770348171264","Wed Oct 19 03:00:01 +0000 2011","「益若ã¤ã°ã•ã€ã€Œå¡©éº¹ã€ã€Œãƒžãƒ„コ富士ã€ãªã©ã€æœ€æ–°ã®Google急上昇ワードã¨ãれã«é–¢é€£ã™ã‚‹Amazon商å“ã¯ã“ã¡ã‚‰ã§ãƒã‚§ãƒƒã‚¯ï¼â†’ http://t.co/pjCyxd9J #google #amazon" +"google","irrelevant","126492723673960448","Wed Oct 19 02:59:50 +0000 2011","Parabens #google gostei bastante do novo Android #android" +"google","irrelevant","126492704707321856","Wed Oct 19 02:59:45 +0000 2011","RT @twandroid SDK dispo aujourd'hui #google #nexusprime" +"google","irrelevant","126492543146926080","Wed Oct 19 02:59:07 +0000 2011","_('~')_ ã‚りãŒã¨ã†ã”ã–ã„ã¾ã™ã£ï¼ï¼Facebookページ「Google Appsã£ã¦ã„ã„ã­ï¼ã€ã‚‚å¾ã€…ã«ãƒ•ァンãŒå¢—ãˆã¦ã¾ã™ã£ï¼ï¼å¬‰ã—ã„ã§ã™ã£ï¼ï¼ http://t.co/PBi9Hog8 #google #googleapps" +"google","irrelevant","126492533860728832","Wed Oct 19 02:59:04 +0000 2011","Y termina el evento. #Android #Google #Samsung" +"google","irrelevant","126492487111028736","Wed Oct 19 02:58:53 +0000 2011","#ThingsWeAllHate people who think they know it all! hello...your name isn't #Google" +"google","irrelevant","126492457276940288","Wed Oct 19 02:58:46 +0000 2011","RT @twandroid: SDK dispo aujourd'hui #google #nexusprime" +"google","irrelevant","126492452990369792","Wed Oct 19 02:58:45 +0000 2011","RT @twandroid Android Beam : partager du contenu entre devices Android via le NFC #google #nexusprime" +"google","irrelevant","126492366776446976","Wed Oct 19 02:58:25 +0000 2011","GOGOGo desenvolvedores! developer.android.com #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126492339559608320","Wed Oct 19 02:58:18 +0000 2011","RT @BeautyNda_Beast don’t waste your time on a guy that isn’t willing to waste his time on you #Google" +"google","irrelevant","126492333519802368","Wed Oct 19 02:58:17 +0000 2011","RT @twandroid Messagerie vocale directement dans ICS #google #nexusprime" +"google","irrelevant","126492258504683520","Wed Oct 19 02:57:59 +0000 2011","#ICS et #Google une communauté à votre service !" +"google","irrelevant","126492256868900865","Wed Oct 19 02:57:58 +0000 2011","Il est dispo sur developer.android.com #google #nexusprime" +"google","irrelevant","126492248547405825","Wed Oct 19 02:57:56 +0000 2011","Pqp! Icecream Sandwich tá muito foda! Hail #google #ICS #android 4.0" +"google","irrelevant","126492186060656640","Wed Oct 19 02:57:41 +0000 2011","Parabéns, #Google. Vocês humilharam o #iOS! #Android 4.0" +"google","irrelevant","126492179525931009","Wed Oct 19 02:57:40 +0000 2011","SDK dispo aujourd'hui #google #nexusprime" +"google","irrelevant","126492130154774528","Wed Oct 19 02:57:28 +0000 2011","#Android Beam 好åƒå¾ˆå޲害ï¼#Google #ICS" +"google","irrelevant","126492053248020481","Wed Oct 19 02:57:10 +0000 2011","Grave c'est même plus une demi-molle là! RT @g123k #kikitoutdur devant ICS #google #nexusprime" +"google","irrelevant","126492052371406848","Wed Oct 19 02:57:10 +0000 2011","@EgyDroid ""Android Beam هو نظام تبادل البيانات الخاص بأندرويد لتناقل البيانات عن طريق خاصية NFS"" very strong feature amazing #Google #Nexus" +"google","irrelevant","126492019009916928","Wed Oct 19 02:57:02 +0000 2011","android beam スゲー #google #android" +"google","irrelevant","126492010864574464","Wed Oct 19 02:57:00 +0000 2011","Android Beam: Compartilhamento de aplicativos, links, multimidia e contatos via NFC #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126491986927685632","Wed Oct 19 02:56:54 +0000 2011","RT @g123k: Google a pas innové avec un tamagochi, ils ont tout compris #google #nexusprime" +"google","irrelevant","126491961271136256","Wed Oct 19 02:56:48 +0000 2011","RT @twandroid: Partage d'apps en NFC #google #nexusprime" +"google","irrelevant","126491942077992961","Wed Oct 19 02:56:43 +0000 2011","Les annonces se suivent et sont toutes super pratiques #ics #android #google" +"google","irrelevant","126491928320688128","Wed Oct 19 02:56:40 +0000 2011","Partage d'apps en NFC #google #nexusprime" +"google","irrelevant","126491895512838144","Wed Oct 19 02:56:32 +0000 2011","Shoutout to our New Twitter Friends & #WelcomeToTwitter! Thanks to everyone following @xrvolume - #myspace #wordpress #Google+" +"google","irrelevant","126491807205965825","Wed Oct 19 02:56:11 +0000 2011","Alguien dejara de trabajar en #Samsung o #Google" +"google","irrelevant","126491801723994114","Wed Oct 19 02:56:10 +0000 2011","Tipp: kostenlose SEO-Infos http://t.co/1ayEKV6F #google" +"google","irrelevant","126491743301537792","Wed Oct 19 02:55:56 +0000 2011","To gostando do Android 4.0 #google #android" +"google","irrelevant","126491727673569280","Wed Oct 19 02:55:52 +0000 2011","le nfc intégré aussi dans #nexus #google #android '-'" +"google","irrelevant","126491723353427968","Wed Oct 19 02:55:51 +0000 2011","Google a pas innové avec un tamagochi, ils ont tout compris #google #nexusprime" +"google","irrelevant","126491662879965184","Wed Oct 19 02:55:37 +0000 2011","Android Beam: Compartilhamentovia NFC #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126491659008610304","Wed Oct 19 02:55:36 +0000 2011","RT @twandroid: Android Beam : partager du contenu entre devices Android via le NFC #google #nexusprime" +"google","irrelevant","126491616641953792","Wed Oct 19 02:55:26 +0000 2011","Android 4.0 endast för enheter med 720p-skärm? http://t.co/BqpiYl6W #android #google #ics" +"google","irrelevant","126491544558632960","Wed Oct 19 02:55:08 +0000 2011","Android Beam : partager du contenu entre devices Android via le NFC #google #nexusprime" +"google","irrelevant","126491450035814400","Wed Oct 19 02:54:46 +0000 2011","Et de 3 #fails, mais au moins c'est du vrai direct #google #nexusprime" +"google","irrelevant","126491409871155200","Wed Oct 19 02:54:36 +0000 2011","BUGOU asoeijoaijoriaejr #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126491356481859585","Wed Oct 19 02:54:24 +0000 2011","RT @Formateate: Tendremos nueva interfaz de #google #calendar" +"google","irrelevant","126491323774672896","Wed Oct 19 02:54:16 +0000 2011","viva #Microsoft o #google para e copiar a #apple e copia o #wp7 de tao bom que é. Incrivel como a interface metro esta ali," +"google","irrelevant","126491290627080194","Wed Oct 19 02:54:08 +0000 2011","RT @twandroid: Messagerie vocale directement dans ICS #google #nexusprime" +"google","irrelevant","126491272088260609","Wed Oct 19 02:54:04 +0000 2011","joga meu nome no #google pra ver no que vai dar ;) ♪" +"google","irrelevant","126491237720141825","Wed Oct 19 02:53:55 +0000 2011","Messagerie vocale directement dans ICS #google #nexusprime" +"google","irrelevant","126491078797950976","Wed Oct 19 02:53:17 +0000 2011","Pas mieux \o/ RT @g123k: #kikitoutdur devant ICS #google #nexusprime" +"google","irrelevant","126491075870343168","Wed Oct 19 02:53:17 +0000 2011","RT @twandroid: Possibilité de voir les mises à jour (un peu comme Sony Ericsson avec l'intégration Facebook) #google #nexusprime" +"google","irrelevant","126490998816768001","Wed Oct 19 02:52:58 +0000 2011","Παει κ η #google ! Αποθανε!? ""The server encountered an error and could not complete your request""" +"google","irrelevant","126490976565985281","Wed Oct 19 02:52:53 +0000 2011","RT @g123k: #kikitoutdur devant ICS #google #nexusprime" +"google","irrelevant","126490918885920768","Wed Oct 19 02:52:39 +0000 2011","#kikitoutdur devant ICS #google #nexusprime" +"google","irrelevant","126490858735407104","Wed Oct 19 02:52:25 +0000 2011","å‘现 #google 特æ„的故æ„çš„ä¸æ #facebook ,哈哈哈。" +"google","irrelevant","126490790150144000","Wed Oct 19 02:52:09 +0000 2011","O android 4.0 Ice Cream Sandwich agenda de contato esta parecendo o windows phone 7 #google #android" +"google","irrelevant","126490759766618112","Wed Oct 19 02:52:01 +0000 2011","RT @twandroid Nouvelle application Contacts : les connections Linkedin, Twitter.. sont enfin intégrées ! #google #nexusprime" +"google","irrelevant","126490644616200192","Wed Oct 19 02:51:34 +0000 2011","Possibilité de voir les mises à jour (un peu comme Sony Ericsson avec l'intégration Facebook) #google #nexusprime" +"google","irrelevant","126490589125550080","Wed Oct 19 02:51:21 +0000 2011","サムスンã€Googleã¨å…±åŒã§Googleリファレンス機「Galaxy Nexusã€ã‚’æ­£å¼ç™ºè¡¨ã€11月ã«ä¸–界å„地ã§ç™ºå£²ã€€http://t.co/zwSUFuez #androidjp #samsung #google #galaxynexus" +"google","irrelevant","126490558230302721","Wed Oct 19 02:51:13 +0000 2011","@nandokanarski PUTZ, que bela MERDA... #Google #Android #fail" +"google","irrelevant","126490549367738368","Wed Oct 19 02:51:11 +0000 2011","Chrome Experiment – WebGL Bookcase, biblioteca infinita de Google Books http://t.co/HrLLVlIK #Google #Chrome #GoogleChrome #Software" +"google","irrelevant","126490540794576896","Wed Oct 19 02:51:09 +0000 2011","RT @twandroid: Nouvelle application Contacts : les connections Linkedin, Twitter.. sont enfin intégrées ! #google #nexusprime" +"google","irrelevant","126490516186595328","Wed Oct 19 02:51:03 +0000 2011","SO Hinweis: kostenlose SEO Videos http://t.co/pwldYXVE #google" +"google","irrelevant","126490479859736576","Wed Oct 19 02:50:55 +0000 2011","Nouvelle application Contacts : les connections Linkedin, Twitter.. sont enfin intégrées ! #google #nexusprime" +"google","irrelevant","126490448436015104","Wed Oct 19 02:50:47 +0000 2011","A diagramação das fotos está bem parecida com a do windows mango #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126490435836325888","Wed Oct 19 02:50:44 +0000 2011","O Ice Cream Sandwich praticamente dispensa apps de terceiros! #android #google Galaxy Nexus" +"google","irrelevant","126490420808126464","Wed Oct 19 02:50:41 +0000 2011","#Google #Panda Où est parti le 30 % de trafic perdu? http://t.co/QONIToAg" +"google","irrelevant","126490021493616640","Wed Oct 19 02:49:05 +0000 2011","@VinnyMartiins colokei no #Google pra saber !" +"google","irrelevant","126489969677176832","Wed Oct 19 02:48:53 +0000 2011","Excelente. Proximo cel? RT @Giuliano3G: Presentanción #Android 4.0 y Samsung Nexus Prime - http://t.co/8l9vzqgn #Google. (Se jodió #iOS5)" +"google","irrelevant","126489957538873344","Wed Oct 19 02:48:50 +0000 2011","RT @twandroid: C'est quand même une conférence, où on n'attend pas 1h30 pour avoir une nouveauté.... #google #nexusprime" +"google","irrelevant","126489935313256449","Wed Oct 19 02:48:45 +0000 2011","Android #ICS predice cuál será tu gasto de datos y te advierte o bloquea para que no gastes más dinero! #tecnologia #ciencia #google" +"google","irrelevant","126489934088511490","Wed Oct 19 02:48:45 +0000 2011","RT @twandroid: Le panorama est très sympa et peut être coupé à tout moment #google #nexusprime" +"google","irrelevant","126489915042168833","Wed Oct 19 02:48:40 +0000 2011","C'est quand même une conférence, où on n'attend pas 1h30 pour avoir une nouveauté.... #google #nexusprime" +"google","irrelevant","126489907349831680","Wed Oct 19 02:48:38 +0000 2011","直擊ï¼ã€@Phone雜誌 / @å¾®åšé€šè¨Šç¤¾ 19日訊】谷歌+三星智能手機發布會http://t.co/SFBLWqjg @LK154: #Google è·Ÿ #Samsung æ­£å¼ç™¼ä½ˆ #Galaxy #Nexus… æ‰‹æ©Ÿè¦æ ¼å¦‚下:http://t.co/MyIcSKXW #ICS" +"google","irrelevant","126489892678144000","Wed Oct 19 02:48:35 +0000 2011","Panoramique implémenté :o #android #google #nexusprime <3 j'ai envie du tel mnt" +"google","irrelevant","126489830677942272","Wed Oct 19 02:48:20 +0000 2011","caramba a galera da #google mandou muito bem no novo #android novidades na app da camera estão d+ #soudev \o/ quero o novo Google Nexus :D" +"google","irrelevant","126489823103041537","Wed Oct 19 02:48:18 +0000 2011","Sistema de fotos panorâmicas na camera do icecream sandwich #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126489808913694720","Wed Oct 19 02:48:15 +0000 2011","#google #motorola Motorola-Übernahme durch Google: Justizministerium fordert Informationen: Mitte August wurde b... http://t.co/C5nZWALj" +"google","irrelevant","126489703418568705","Wed Oct 19 02:47:50 +0000 2011","Le panorama est très sympa et peut être coupé à tout moment #google #nexusprime" +"google","irrelevant","126489580835848193","Wed Oct 19 02:47:20 +0000 2011","RT @twandroid: Panoramas sur ICS ! #google #nexusprime" +"google","irrelevant","126489542436995072","Wed Oct 19 02:47:11 +0000 2011","RT @twandroid: Possibilité de taguer les personnes #google #nexusprime" +"google","irrelevant","126489540906070018","Wed Oct 19 02:47:11 +0000 2011","Panoramas sur ICS ! #google #nexusprime" +"google","irrelevant","126489462648745984","Wed Oct 19 02:46:52 +0000 2011","Possibilité de taguer les personnes #google #nexusprime" +"google","irrelevant","126489460551585792","Wed Oct 19 02:46:52 +0000 2011","RT @twandroid: Nouveau layout pour la Galerie qui est en forme de magazine #google #nexusprime" +"google","irrelevant","126489430830743552","Wed Oct 19 02:46:45 +0000 2011","RT @twandroid Prise de photo quasi instantannée #ouaaaaaaahhhhh #google #nexusprime" +"google","irrelevant","126489415248920576","Wed Oct 19 02:46:41 +0000 2011","Nouveau layout pour la Galerie qui est en forme de magazine #google #nexusprime" +"google","irrelevant","126489326786850816","Wed Oct 19 02:46:20 +0000 2011","RT @twandroid: Editeur de photo maintenant : redimensionner, .... (ce qu'on avait déjà vu) #google #nexusprime" +"google","irrelevant","126489254535774208","Wed Oct 19 02:46:02 +0000 2011","Editeur de photo maintenant : redimensionner, .... (ce qu'on avait déjà vu) #google #nexusprime" +"google","irrelevant","126489247581609986","Wed Oct 19 02:46:01 +0000 2011","Presentanción #Android 4.0 y Samsung Nexus Prime - http://t.co/Iy3VNaFF #Google. (Se jodió #iOS5)" +"google","irrelevant","126489192866910208","Wed Oct 19 02:45:48 +0000 2011","Muito rapida essa velocidade de captura de imagens da camera do Galaxy Nexus #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126489107143737344","Wed Oct 19 02:45:27 +0000 2011","RT @twandroid: Prise de photo quasi instantannée #ouaaaaaaahhhhh #google #nexusprime" +"google","irrelevant","126489099858214914","Wed Oct 19 02:45:26 +0000 2011","Hinweis: kostenlose SEO-Infos http://t.co/d3WrYCUT #google" +"google","irrelevant","126489084003762176","Wed Oct 19 02:45:22 +0000 2011","Prise de photo quasi instantannée #ouaaaaaaahhhhh #google #nexusprime" +"google","irrelevant","126489023928741890","Wed Oct 19 02:45:08 +0000 2011","Bien repensé l'app photo #nexusprime #google #android avec l'aperçu direct" +"google","irrelevant","126489012423770113","Wed Oct 19 02:45:05 +0000 2011","Parisian Love: How to impress a French? http://t.co/Nh81Cbqf #google" +"google","irrelevant","126488974595334144","Wed Oct 19 02:44:56 +0000 2011","Cool!! #google #win RT: @a1arte: Google y Samsung lanzan oficialmente el Galaxy Nexus con Android Ice Cream Sandwich http://t.co/kE0dFNQL" +"google","irrelevant","126488887026655232","Wed Oct 19 02:44:35 +0000 2011","L'affichage de l'Intent Share est cool sur l'appareil photo : #google #nexusprime" +"google","irrelevant","126488858815774721","Wed Oct 19 02:44:28 +0000 2011","Pagaliau oficialiai pristatytas Samsung Galaxy Nexus http://t.co/BHetRH4B #android_4 #galaxy #google" +"google","irrelevant","126488807922085889","Wed Oct 19 02:44:16 +0000 2011","Meu Bom Jesus que velocidade de captura é essa ? #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126488559795453954","Wed Oct 19 02:43:17 +0000 2011","RT @twandroid: Possibilité de ne sélectionner qu'une période et savoir pour chaque app combien de data a été utilisé #google #nexusprime" +"google","irrelevant","126488553352994817","Wed Oct 19 02:43:15 +0000 2011","Pô. Gostei do novo sistema de reconhecimento facial do Android. Tão forte que não liberou nem o dono do smartphone. #Google #ICS" +"google","irrelevant","126488503428190208","Wed Oct 19 02:43:03 +0000 2011","Possibilité de ne sélectionner qu'une période et savoir pour chaque app combien de data a été utilisé #google #nexusprime" +"google","irrelevant","126488427779727360","Wed Oct 19 02:42:45 +0000 2011","デフォã§ãƒ‡ãƒ¼ã‚¿é€šä¿¡åˆ¶å¾¡ï¼Ÿã€€#android #google" +"google","irrelevant","126488387531194369","Wed Oct 19 02:42:36 +0000 2011","RT @twandroid: Consommation data affichée dans les paramètres, avec même une approximation de la consommation future !#google #nexusprime" +"google","irrelevant","126488315041030144","Wed Oct 19 02:42:19 +0000 2011","Data Usage: Nada mais de aplicativos para monitorar tráfego de Dados #icecreamsandwich #galaxynexus #google #samsung" +"google","irrelevant","126488313187143681","Wed Oct 19 02:42:18 +0000 2011","RT @twandroid: Deux lignes sont affichées sur la première page de GMail pour chaque mail : un aperçu en quelque sorte #google #nexusprime" +"google","irrelevant","126488305859698688","Wed Oct 19 02:42:16 +0000 2011","Consommation data affichée dans les paramètres, avec même une approximation de la consommation future !#google #nexusprime" +"google","irrelevant","126488293717192704","Wed Oct 19 02:42:13 +0000 2011","#1 #Google ranked funniest #FUNNY #tshirts on the net #Sushi #BarExam http://t.co/jSrVRFgK #lawschool #lawyers #SALE #humor #lawyers" +"google","irrelevant","126488289988452352","Wed Oct 19 02:42:13 +0000 2011","#1 #Google ranked funniest #FUNNY #tshirts on the net #Sushi #BarExam http://t.co/fXJyOZeV #lawschool #lawyers #SALE #humor #lawyers" +"google","irrelevant","126488289157976064","Wed Oct 19 02:42:12 +0000 2011","RT @twandroid: Nouvelle app Calendrier : plus épurée #google #nexusprime" +"google","irrelevant","126488234359406593","Wed Oct 19 02:41:59 +0000 2011","Olha o tamanho desse CRÂNIO! xD #Google #Android" +"google","irrelevant","126488066977308673","Wed Oct 19 02:41:19 +0000 2011","RT @twandroid: Intégration de la reconnaissance vocale #google #nexusprime" +"google","irrelevant","126488061369532417","Wed Oct 19 02:41:18 +0000 2011","Très plaisante cette mise à jour d'Android ! En espérant que cela stimulera Apple à nous pondre du nouveau. #Google #icecreamsandwich" +"google","irrelevant","126488048962772992","Wed Oct 19 02:41:15 +0000 2011","RT @twandroid: Le clavier semble avoir peu bougé depuis Gingerbread #google #nexusprime" +"google","irrelevant","126487986425696259","Wed Oct 19 02:41:00 +0000 2011","Pinch to zoom sur le calendrier (comme sur Honeycomb) #google #nexusprime" +"google","irrelevant","126487974975242240","Wed Oct 19 02:40:57 +0000 2011","RT @twandroid: La prise de captures d'écran est officialisée !!!!!! #google #nexusprime" +"google","irrelevant","126487943509581824","Wed Oct 19 02:40:50 +0000 2011","RT @twandroid: Possibilité de mettre des contacts dans les dossiers ! #google #nexusprime" +"google","irrelevant","126487924698128384","Wed Oct 19 02:40:45 +0000 2011","Tendremos nueva interfaz de #google #calendar" +"google","irrelevant","126487924249329664","Wed Oct 19 02:40:45 +0000 2011","RT @twandroid: Widgets redimensionnables aussi #google #nexusprime" +"google","irrelevant","126487911465095169","Wed Oct 19 02:40:42 +0000 2011","RT @twandroid: Widgets scrollables comme sur Honeycomb #google #nexusprime" +"google","irrelevant","126487879139590144","Wed Oct 19 02:40:35 +0000 2011","RT @twandroid: Des boutons virtuels... #google #nexusprime" +"google","irrelevant","126487855362088961","Wed Oct 19 02:40:29 +0000 2011","Nouvelle app Calendrier : plus épurée #google #nexusprime" +"google","irrelevant","126487852824526848","Wed Oct 19 02:40:28 +0000 2011","RT @twandroid: Nouveau lockscreen (écran de déverrouillage) à la Honeycomb #google #nexusprime" +"google","irrelevant","126487831572004864","Wed Oct 19 02:40:23 +0000 2011","RT @twandroid: Nouvelle police (font) pour android : Roboto #google #nexusprime http://t.co/NvBPe6fm" +"google","irrelevant","126487783077449728","Wed Oct 19 02:40:12 +0000 2011","RT @twandroid: Le baromètre est inclus dans le Galaxy Nexus #google #nexusprime" +"google","irrelevant","126487738294861824","Wed Oct 19 02:40:01 +0000 2011","Okay its officially a #sausagefest here @thursdays it really makes no sense based on the feedback from #google?? Moving on..." +"google","irrelevant","126487624444674048","Wed Oct 19 02:39:34 +0000 2011","Deux lignes sont affichées sur la première page de GMail pour chaque mail : un aperçu en quelque sorte #google #nexusprime" +"google","irrelevant","126487457381359617","Wed Oct 19 02:38:54 +0000 2011","The ARM Cortex-A9 Processors pdf ebook: http://t.co/F8mSgjgF #google" +"google","irrelevant","126487454797668353","Wed Oct 19 02:38:53 +0000 2011","Settlement with Multiple Plaintiffs: The Role of Insolvency pdf ebook: http://t.co/YEl8Pq4O #google" +"google","irrelevant","126487422249861120","Wed Oct 19 02:38:46 +0000 2011","Reconnaissance Faciale de ouf là ! :p #google #android" +"google","irrelevant","126487408500940800","Wed Oct 19 02:38:42 +0000 2011","Le agregan face unlock: bloquear y desbloquear el telefono con tu rostro. ESTA DEMASIADO INTERESANTE LA CONFERENCIA DE #GOOGLE JUSTO AHORA." +"google","irrelevant","126487406131150848","Wed Oct 19 02:38:42 +0000 2011","Ya tendremos browser con tabs, y nuestros bookmarks se sincronizarán con Chrome! #ics #google" +"google","irrelevant","126487403220320258","Wed Oct 19 02:38:41 +0000 2011","vendo os significadosdos sonhos aki no #Google pqe meu livro eu emprestei enfiaram no cú pqe até hj nunca me devolveram =X" +"google","irrelevant","126487385461633024","Wed Oct 19 02:38:37 +0000 2011","Ishi o #faceunlock do #icecreamsandwich #android não funcionou no seu lançamento #google" +"google","irrelevant","126487165969510400","Wed Oct 19 02:37:45 +0000 2011","RT @SE4mCom: المؤتمر الخاص بجوجل وسامسونج يبدأ خلال الدقائق القادمة +http://t.co/WUF65DTr #SE4m #Google #SAMSUNG #follow_up http://t.co/ ..." +"google","irrelevant","126487155886407680","Wed Oct 19 02:37:42 +0000 2011","RT @twandroid: ""Request desktop site"" : fonction très sympa #google #nexusprime" +"google","irrelevant","126487143555153920","Wed Oct 19 02:37:39 +0000 2011","RT @twandroid: Possibilité de sauvegarder pour visionner hors-ligne #google #nexusprime" +"google","irrelevant","126487140317147136","Wed Oct 19 02:37:38 +0000 2011","ããŸãƒ¼ãƒ¼ãƒ¼ã£ï¼ï¼Galaxy Nexusï¼ï¼ï¼ã€€http://t.co/7IgbD9U0 #android #galaxy #nexus #google" +"google","irrelevant","126486968682037248","Wed Oct 19 02:36:58 +0000 2011","RT @twandroid Nouvelle gestion des onglets sur le navigateur #google #nexusprime" +"google","irrelevant","126486956149448704","Wed Oct 19 02:36:55 +0000 2011","""Request desktop site"" : fonction très sympa #google #nexusprime" +"google","irrelevant","126486814520381440","Wed Oct 19 02:36:21 +0000 2011","Reconnaissance faciale poir debloquer ton phone :-) #google #android" +"google","irrelevant","126486790818373632","Wed Oct 19 02:36:15 +0000 2011","Nouvelle gestion des onglets sur le navigateur #google #nexusprime" +"google","irrelevant","126486753598119936","Wed Oct 19 02:36:06 +0000 2011","RT @twandroid: Hugo Barra sur scène #google #nexusprime" +"google","irrelevant","126486526631743488","Wed Oct 19 02:35:12 +0000 2011","A los interesados pueden seguir la presentación stream de #google y #samsung sobre lo nuevo de #android" +"google","irrelevant","126486211824058368","Wed Oct 19 02:33:57 +0000 2011","Use Caution When Sharing on Social Networks http://t.co/SXNoBASn via @LarryMagid #youth #facebook #google+" +"google","irrelevant","126486125874384896","Wed Oct 19 02:33:37 +0000 2011","Intégration de la reconnaissance vocale #google #nexusprime" +"google","irrelevant","126485882265018368","Wed Oct 19 02:32:38 +0000 2011","RT @twandroid: Les notifications sont visibles depuis le lockscreen #google #nexusprime" +"google","irrelevant","126485702056751105","Wed Oct 19 02:31:56 +0000 2011","RT @twandroid: Possibilité de supprimer les notifications en les swipant (comme sur Cyanogen) #google #nexusprime" +"google","irrelevant","126485474016628736","Wed Oct 19 02:31:01 +0000 2011","En train de suivre la keynote Google - Ice Cream Sandwich.. #Google #icecreamsandwich" +"google","irrelevant","126484568239906817","Wed Oct 19 02:27:25 +0000 2011","Deep Linking Plugin - Black... http://t.co/RnF3cNu8 #seo #search #google #hack #sem #it #business #web #marketing #online #yes #hot #winning" +"google","irrelevant","126484213737340928","Wed Oct 19 02:26:01 +0000 2011","#galaxy #nexus #android #google / Ñкраааааан / -4,7'' / -super HD amoled / -1280x720 / -16:9 / / камера / 5м... http://t.co/3cbUMlg3" +"google","irrelevant","126484000075292672","Wed Oct 19 02:25:10 +0000 2011","RT @twandroid Le baromètre est inclus dans le Galaxy Nexus #google #nexusprime" +"microsoft","positive","126803641486163969","Wed Oct 19 23:35:18 +0000 2011","Just participated in another #Microsoft #SQLServer certification test with #TSQL typing instead of multiple choices. Really cool." +"microsoft","positive","126792129832951808","Wed Oct 19 22:49:34 +0000 2011","#Microsoft #Opens The Door To Two #Spanish #Entrepreneurs With #Medical #Project ! http://t.co/c97sNizT" +"microsoft","positive","126788430679113728","Wed Oct 19 22:34:52 +0000 2011","#Buffalo, #Intel, #Microsoft, #IBM are the best pavilions at #SMAU" +"microsoft","positive","126780006964805632","Wed Oct 19 22:01:23 +0000 2011","Windows Phone 7.5 Mango Update Process Ahead of Schedule. http://t.co/p1hwo73x #Microsoft #WindowsPhone #Mango" +"microsoft","positive","126779403605770241","Wed Oct 19 21:58:59 +0000 2011","WP7 is getting there: #Microsoft Claws Back to Smartphone Relevance: Rich Jaroslovsky - Bloomberg http://t.co/JIMML4RC via @BloombergNews" +"microsoft","positive","126779217911349248","Wed Oct 19 21:58:15 +0000 2011","On #Microsoft Excel making functions like it's my job #Productive" +"microsoft","positive","126774092274741248","Wed Oct 19 21:37:53 +0000 2011","RT @Logic2020: Why I love #Microsoft > Microsoft hosting Kids TechFest to encourage careers in tech http://t.co/fmzFWgYv #babygeeks ..." +"microsoft","positive","126768366345138176","Wed Oct 19 21:15:08 +0000 2011","#Microsoft word works on my computer again! Yessss" +"microsoft","positive","126767508253454336","Wed Oct 19 21:11:43 +0000 2011","In terms of unified communications giants, really surprised to see how underrated #Microsoft is" +"microsoft","positive","126762337087655936","Wed Oct 19 20:51:10 +0000 2011","#Microsoft dishing out free first-gen WP7 handsets at its stores http://t.co/cvfWMvmO" +"microsoft","positive","126755785391869954","Wed Oct 19 20:25:08 +0000 2011","Will watch this one with interest! RT @RossMistry: #Microsoft Codename ""Data Explorer"" CTP Coming ... http://t.co/kvJSerCj via @scottyd99999" +"microsoft","positive","126755232393867264","Wed Oct 19 20:22:57 +0000 2011","Lunch today: 19.00 Eating it at #Microsoft while attending #VSLive? Priceless." +"microsoft","positive","126754593714606085","Wed Oct 19 20:20:24 +0000 2011","Will watch this one with interest! RT @RossMistry: #Microsoft Codename ""Data Explorer"" CTP Coming Next Month http://t.co/vpQOyXWa" +"microsoft","positive","126754500278104064","Wed Oct 19 20:20:02 +0000 2011","""#Microsoft details Search improvements in Windows 8 Start Screen"" http://t.co/Agv49znJ" +"microsoft","positive","126750973329817601","Wed Oct 19 20:06:01 +0000 2011","Mango shows #Microsoft still has the taste for smartphone success http://t.co/AFm8VSMm #wp7 #mango" +"microsoft","positive","126749591956762624","Wed Oct 19 20:00:32 +0000 2011","RT @bmann Awesome! @TommyLee is moving to #Vancouver as Dev Evangelist for #Microsoft - we finally get a local" +"microsoft","positive","126748497096622080","Wed Oct 19 19:56:11 +0000 2011","Awesome! @TommyLee is moving to #Vancouver as Dev Evangelist for #Microsoft - we finally get a local" +"microsoft","positive","126748156003221504","Wed Oct 19 19:54:49 +0000 2011","Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores: Microsoft's retail stores ar... http://t.co/r2RndN6Y" +"microsoft","positive","126748155021762561","Wed Oct 19 19:54:49 +0000 2011","Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores http://t.co/xoLRaueY #microsoft" +"microsoft","positive","126748153952206849","Wed Oct 19 19:54:49 +0000 2011","Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores http://t.co/F4HwAN6Q" +"microsoft","positive","126747965393084416","Wed Oct 19 19:54:04 +0000 2011","Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores http://t.co/7YvJMHnQ" +"microsoft","positive","126747962817781760","Wed Oct 19 19:54:03 +0000 2011","Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores http://t.co/hJ8qlOC9 #neowin" +"microsoft","positive","126747960900984832","Wed Oct 19 19:54:03 +0000 2011","Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores http://t.co/BpFKyd1O" +"microsoft","positive","126744770713362432","Wed Oct 19 19:41:22 +0000 2011","#Microsoft store here I come to spend my hard earned cash. #vslive" +"microsoft","positive","126743288320491521","Wed Oct 19 19:35:29 +0000 2011","If you didn't know - Free #Microsoft #HTML5 @Webcamps on West Coast MountainView Redmond Portland check them out http://t.co/L86OWQRE" +"microsoft","positive","126742063961214976","Wed Oct 19 19:30:37 +0000 2011","Hey parents!Did you know about the free tools from #Microsoft that help keep your kids safe online?Live Family Safety http://t.co/5pk1et6r" +"microsoft","positive","126741671965769728","Wed Oct 19 19:29:04 +0000 2011","#Microsoft #Cloud Microsoft offers students free access to its technologies to improve employability: Dubbe... http://t.co/rGBHHUQP #TCN" +"microsoft","positive","126738939594813440","Wed Oct 19 19:18:12 +0000 2011","Awesome! #Microsoft #holodeck bit.ly/qpGxle" +"microsoft","positive","126736431929507840","Wed Oct 19 19:08:14 +0000 2011","""#Microsoft details Windows 8 search improvements"" - via @SlashGear http://t.co/JCawWRzQ" +"microsoft","positive","126735438948995072","Wed Oct 19 19:04:17 +0000 2011","Can't wait until I can visit the holodeck! Thanks for sharing. RT @karljuhlke Beam me up #Microsoft! Microsoft has... http://t.co/ZGbk3xnK" +"microsoft","positive","126734527551913984","Wed Oct 19 19:00:40 +0000 2011","@SmartKeitai Yeah I saw it, @Google is taking seriously what #Microsoft has done with #Metro UI but yeah looks good Android 4.0" +"microsoft","positive","126732577301217280","Wed Oct 19 18:52:55 +0000 2011","Why I love #Microsoft > Microsoft hosting Kids TechFest to encourage careers in tech http://t.co/fmzFWgYv #babygeeks via @Techflash" +"microsoft","positive","126732240368570369","Wed Oct 19 18:51:35 +0000 2011","Microsoft explains new improvements in Windows 8 Start screen search: http://t.co/cxovk5WW #buildingwindows8 @buildwindows8 #tech #microsoft" +"microsoft","positive","126732148144209920","Wed Oct 19 18:51:13 +0000 2011","Microsoft explains new improvements in Windows 8 Start screen search: http://t.co/pq67GRra #buildingwindows8 @buildwindows8 #tech #microsoft" +"microsoft","positive","126731873517965313","Wed Oct 19 18:50:07 +0000 2011","enjoy #Microsoft #Search Idea Could Let You Search Like Miley http://t.co/GWOkOCEg great" +"microsoft","positive","126730153454870529","Wed Oct 19 18:43:17 +0000 2011","#Microsoft Bing is The New King of Search, Tipped #Google Search http://t.co/61OgLIKA" +"microsoft","positive","126729713568849920","Wed Oct 19 18:41:32 +0000 2011","#Microsoft #PowerPivot gives users the power to create compelling self-service #BI solutions. #infinitywebinar" +"microsoft","positive","126725535677157376","Wed Oct 19 18:24:56 +0000 2011","The Future of #Information: Innovators from #Google, #Microsoft &#Twitter Nov. 14-18 Info & registration at http://t.co/kJKZ77Zy #UMD" +"microsoft","positive","126725332031127552","Wed Oct 19 18:24:08 +0000 2011","Another Gem from #Microsoft - Curate your personal history with ""Project Greenwich"". Available later this month. http://t.co/KgNrbVgp" +"microsoft","positive","126721324042305536","Wed Oct 19 18:08:12 +0000 2011","Beam me up #Microsoft! Microsoft has #holoDesk research project: http://t.co/KlMgkh5V" +"microsoft","positive","126716103123673088","Wed Oct 19 17:47:27 +0000 2011","Thanks to Steffen Krause #Microsoft for a great SQL Server Session @EuropeanSP." +"microsoft","positive","126715314007314434","Wed Oct 19 17:44:19 +0000 2011","This is how charity works these days. http://t.co/839klkFH #billgates #philanthropy #microsoft" +"microsoft","positive","126714241427312641","Wed Oct 19 17:40:04 +0000 2011","Ballmer Thinks You Have To Be A Computer Scientist To Use Android http://t.co/VKV2TBcj #tech #microsoft (i agree to some extent)" +"microsoft","positive","126709513947594753","Wed Oct 19 17:21:16 +0000 2011","We have had a great time at #TechEdAfrica thanks to all the sponsors, delegates, #Microsoft and #mimecastsa." +"microsoft","positive","126705831126384640","Wed Oct 19 17:06:38 +0000 2011","Validated that #Microsoft #iSCSI target in Win Server 8 works fine with #VMware #vSphere5 / ESXi5, no errors - more: http://t.co/6qnLWFt7" +"microsoft","positive","126695555685560320","Wed Oct 19 16:25:49 +0000 2011","wow - #Microsoft tech turns your body into a touchscreen http://t.co/5hVBC5Bm via @PSFK" +"microsoft","positive","126693834846515200","Wed Oct 19 16:18:58 +0000 2011","Love the #Microsoft campus! Love the feeling of the mixer building. Thanks #vslive bringing the conference here!" +"microsoft","positive","126692533869871106","Wed Oct 19 16:13:48 +0000 2011","#microsoft #research shows #Holodesk. Awesome!!! The holodeck is one step closer! bit.ly/pCMJON #kinect /via @dvroegop" +"microsoft","positive","126691809417113600","Wed Oct 19 16:10:55 +0000 2011","RT @ralphbin: #Microsoft Research shows #Holodesk - looks like science fiction, but is science fact http://t.co/w5OCrbpm Cool but no sound" +"microsoft","positive","126689578886246400","Wed Oct 19 16:02:04 +0000 2011","#Microsoft Research shows #Holodesk - looks like science fiction, but is science fact http://t.co/vv6ltkYp" +"microsoft","positive","126689007512993792","Wed Oct 19 15:59:47 +0000 2011","Zune Music arrives in Canada! http://t.co/CJJ12v4I #microsoft #music #news" +"microsoft","positive","126688284343672832","Wed Oct 19 15:56:55 +0000 2011","#Kinect Makes Learning Playful with Help from #SesameStreet and #NationalGeographic #education #microsoft http://t.co/E5uhka8F" +"microsoft","positive","126685198531297281","Wed Oct 19 15:44:39 +0000 2011","HÃ¥per #microsoft lager ny releasepartyguide for #mango. #greatsuccess" +"microsoft","positive","126677986522054657","Wed Oct 19 15:16:00 +0000 2011","Hey parents! Did you know about the free tools from #Microsoft that help keep your kids safe online? Live Family Safety http://t.co/muuWbGPn" +"microsoft","positive","126674853230149632","Wed Oct 19 15:03:33 +0000 2011","“@sinanaral: Check out the new #Microsoft #faculty fellows here: http://t.co/2oeNsVG8 poised to change the world!†Go @jure !" +"microsoft","positive","126670032951443456","Wed Oct 19 14:44:23 +0000 2011","Maybe #apple could use some of it's billions to do some good in the world?...oh wait, that's what #microsoft does." +"microsoft","positive","126669652469350401","Wed Oct 19 14:42:53 +0000 2011","#Microsoft @ ISB... watching the new windows 8 in action...pretty impressive! Finally a contender to mac os.. Interesting battle in store" +"microsoft","positive","126668529046007808","Wed Oct 19 14:38:25 +0000 2011","#Microsoft tries to combine the #Xbox #WindowsPhone and #PC and all of them will share the same neat looking interface. http://t.co/J332zfpO" +"microsoft","positive","126665091381854208","Wed Oct 19 14:24:45 +0000 2011","Check out the new #Microsoft #faculty fellows here: http://t.co/GCicPEX4 poised to change the world!" +"microsoft","positive","126663720075141121","Wed Oct 19 14:19:18 +0000 2011","I became a #Citizenship #Member at @Microsoft ...Thanks #GOD :) #MSPEgypt #Microsoft" +"microsoft","positive","126659125751971840","Wed Oct 19 14:01:03 +0000 2011","Blog Post: Cool Tool : Microsoft Mouse Without Borders. http://t.co/uM860jFh #Tools #Utilities #microsoft" +"microsoft","positive","126658961263951873","Wed Oct 19 14:00:24 +0000 2011","Forget Siri, there is still no beating speech commands on @windowsphone: http://t.co/19GAjSGi #wp7 #mango #tellme #microsoft #siri" +"microsoft","positive","126658937155108866","Wed Oct 19 14:00:18 +0000 2011","#Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/xtZMPSMT #enterprise" +"microsoft","positive","126658528965439488","Wed Oct 19 13:58:41 +0000 2011","@RAD_Software Yes good points about #ERP but unfair assessment in saying ""#SAGE, #SAP and #Microsoft Dynamics, are not delivering""" +"microsoft","positive","126650473322262529","Wed Oct 19 13:26:40 +0000 2011","Good dev opportunities at #Microsoft UX Tour, Helsinki Finland http://t.co/hCKJXz4K" +"microsoft","positive","126638821948403712","Wed Oct 19 12:40:22 +0000 2011","@edbott you have sugar-coated the FSF secure boot reaction. This is blatant anti-#Microsoft propaganda." +"microsoft","positive","126637126010929152","Wed Oct 19 12:33:38 +0000 2011","V. impressed w/ Image Composite Editor from @MSFTresearch for creating panoramas from individual images. http://t.co/SO7W7spn Tx #Microsoft!" +"microsoft","positive","126633708315873280","Wed Oct 19 12:20:03 +0000 2011","RT @fran_mac_ I have been blown away by technological innovations.... #microsoft should do more #marketing!!" +"microsoft","positive","126629320948060161","Wed Oct 19 12:02:37 +0000 2011","I have been blown away by technological innovations.... #microsoft should do more #marketing!!" +"microsoft","positive","126611718376919041","Wed Oct 19 10:52:40 +0000 2011","RT @asherCFO: #Yahoo up 4 sale - again! Yrs back #Microsoft almost bought it & now glad it didn't. This could b deal of the year that do ..." +"microsoft","positive","126610651916410881","Wed Oct 19 10:48:26 +0000 2011","#Yahoo up 4 sale - again! Yrs back #Microsoft almost bought it & now glad it didn't. This could b deal of the year that dosent happen!" +"microsoft","positive","126608712407322624","Wed Oct 19 10:40:43 +0000 2011","#Microsoft presents #Omnitouch - http://t.co/ui0aD1FJ - Very impressive and futuristic technology!" +"microsoft","positive","126605340270788608","Wed Oct 19 10:27:19 +0000 2011","Is #Google Down? http://t.co/wFrzKYsN via @Ejunkie Good for #Bing, #Microsoft ’s investments paying off" +"microsoft","positive","126603756971360256","Wed Oct 19 10:21:02 +0000 2011","RT @bestpantsTen: http://t.co/P4m8xvaz #iPads less desired than #Windows tablets, says study #Apple #Microsoft #MS" +"microsoft","positive","126599751402668032","Wed Oct 19 10:05:07 +0000 2011","RT @DariuszPorowski: RT Me too! @rem8: On my way home from 1st day of #MTS11. Had great time with @alead @DariuszPorowski and others #mv ..." +"microsoft","positive","126596256138137600","Wed Oct 19 09:51:14 +0000 2011","#mango shows #microsoft still has the taste for smartphone success http://t.co/mAOQaplH" +"microsoft","positive","126583935139454976","Wed Oct 19 09:02:17 +0000 2011","#Microsoft details Search improvements in #Windows 8 Start Screen http://t.co/f6AztiaD" +"microsoft","positive","126581165636333568","Wed Oct 19 08:51:18 +0000 2011","one more picture of the services provided in the #cloud. I am in love with #Microsoft http://t.co/Q0QLeokZ" +"microsoft","positive","126579574921371648","Wed Oct 19 08:44:57 +0000 2011","Windows Runtime and .NET: Better Together http://t.co/z7BBJ1xU #dotnet #dev #WinRT #microsoft" +"microsoft","positive","126578340902617088","Wed Oct 19 08:40:02 +0000 2011","Nice talk by @renatmin #KenyaOpenDoor #microsoft #community" +"microsoft","positive","126570919513686018","Wed Oct 19 08:10:33 +0000 2011","OMG :) :) #Microsoft #Sharepoint is actually working! Thanks to my excellent colleague William" +"microsoft","positive","126559269603647488","Wed Oct 19 07:24:15 +0000 2011","#windowsphone #wp7 #Microsoft really @google ? http://t.co/vI0htJRr #icecreamsandwichfail innovation by imitation sad sad" +"microsoft","positive","126555956975910912","Wed Oct 19 07:11:06 +0000 2011","The more i use Office 2010 the more i fall in love it with. #honest #genius #microsoft" +"microsoft","positive","126553559847288832","Wed Oct 19 07:01:34 +0000 2011","@guardiantech I love the ""And now it is looking to China:"" Apple funds sweatshops there while Gates funds a foundation #apple #microsoft" +"microsoft","positive","126552645497405440","Wed Oct 19 06:57:56 +0000 2011","That was a good one(commercial). #microsoft" +"microsoft","positive","126532025552347136","Wed Oct 19 05:36:00 +0000 2011","Welcome Skype to the #Microsoft family. We can do amazing things together. http://t.co/YKCHtu8h #welcomeskype" +"microsoft","positive","126529895923843072","Wed Oct 19 05:27:32 +0000 2011","<3 u #microsoft!!!" +"microsoft","positive","126499160995282944","Wed Oct 19 03:25:24 +0000 2011","Absolutely loving the ARC mouse, courtesy of the one and only @gregulator0. #inspired #microsoft" +"microsoft","positive","126494986983325696","Wed Oct 19 03:08:49 +0000 2011","Not a #Microsoft fan but this is cool. Video: Turn any surface into a #touchscreen http://t.co/yy1hNrbQ" +"microsoft","positive","126492789939765248","Wed Oct 19 03:00:05 +0000 2011","Wow #android #ics, lots of uh uh uh uh talk going on... forsure not a #microsoft #mango launch were people have public speaking skills" +"microsoft","positive","126479912407273472","Wed Oct 19 02:08:55 +0000 2011","Jus updated my computer to Windows 7 .....I'm on thanks to #microsoft" +"microsoft","negative","126802977813037057","Wed Oct 19 23:32:40 +0000 2011","ICS #android is going to kill both Mango #wmp #microsoft and #rimm. it already killed webOS and Nokia OS..." +"microsoft","negative","126795554079510528","Wed Oct 19 23:03:10 +0000 2011","Only people with names beginning with 'A' are getting mail this week (a la #Microsoft)" +"microsoft","negative","126793207613894659","Wed Oct 19 22:53:51 +0000 2011","#Microsoft #Outlook #2011 for #Mac #sucks… #hate" +"microsoft","negative","126790861831938048","Wed Oct 19 22:44:31 +0000 2011","#Microsoft freezes some #Xbox accounts after hack reports - http://t.co/w051BfS0 via @CanadaIT" +"microsoft","negative","126789739180326913","Wed Oct 19 22:40:04 +0000 2011","#Update #Microsoft MS11-078 - Critical : Vulnerability in .NET Framework and Microsoft Silverlight Could Allow R... http://t.co/izO0WUNt" +"microsoft","negative","126788223409197058","Wed Oct 19 22:34:02 +0000 2011","#Microsoft #Windows Media Centre GUI #Fail #MajorFail" +"microsoft","negative","126786271921184768","Wed Oct 19 22:26:17 +0000 2011","How #Apple eclipsed #Microsoft. http://t.co/WU9bFI1h" +"microsoft","negative","126782820709441536","Wed Oct 19 22:12:34 +0000 2011","@theresamarosy haha k, thought everybody knew #Microsoft sucks." +"microsoft","negative","126781543879421952","Wed Oct 19 22:07:30 +0000 2011","@theresamarosy bc #IE , like everything #Microsoft makes is junk and super prone to crashing." +"microsoft","negative","126778114385772548","Wed Oct 19 21:53:52 +0000 2011","When #Microsoft ""violated U.S. antitrust laws""—leaving #Netscape out of business & #Novell wounded—it played ""dumb"". http://t.co/0WHHmAK4" +"microsoft","negative","126771575772020736","Wed Oct 19 21:27:53 +0000 2011","#Microsoft Lync crash issue on #Mac OS X 10.7.2 [Fixed] - http://t.co/YZhCdgFP" +"microsoft","negative","126771575654596608","Wed Oct 19 21:27:53 +0000 2011","#Microsoft Lync crash issue on #Mac OS X 10.7.2 [Fixed] - http://t.co/rzucFejc" +"microsoft","negative","126771575591665665","Wed Oct 19 21:27:53 +0000 2011","#Microsoft Lync crash issue on #Mac OS X 10.7.2 [Fixed] - http://t.co/Gjiu2zz1" +"microsoft","negative","126766935634485249","Wed Oct 19 21:09:27 +0000 2011","RT @schestowitz: When #Microsoft broke the law it played ""victim"". Now it engages in racketeering, calls victims ""outlaws"" (""respect our ..." +"microsoft","negative","126763744557662209","Wed Oct 19 20:56:46 +0000 2011","#Nokia workers ask, is chief executive a #Microsoft mole? http://t.co/lWJP9GiU" +"microsoft","negative","126763108080427009","Wed Oct 19 20:54:14 +0000 2011","RT @idtheftprotect: #Microsoft has frozen some #Xbox Live (XBL) accounts after online gamers report they have been hacked. http://t.co/M ..." +"microsoft","negative","126761988608098304","Wed Oct 19 20:49:47 +0000 2011","I gave a try to Windows 8 Dev. Preview and it was good but the UI is way too messy right now, waiting for the Beta #microsoft #windows8" +"microsoft","negative","126759080860725248","Wed Oct 19 20:38:14 +0000 2011","Six PowerPoint Nightmares (and How to Fix Them) http://t.co/JsQ96NuQ #powerpoint #microsoft #presentations" +"microsoft","negative","126758070176059392","Wed Oct 19 20:34:13 +0000 2011","How #Apple eclipsed #Microsoft http://t.co/1fH0dFGj from @guardian" +"microsoft","negative","126754989409452032","Wed Oct 19 20:21:59 +0000 2011","This is kind of really creepy: Use your search engine the same way as @MileyCyrus would http://t.co/a050TG4L . . . #Microsoft" +"microsoft","negative","126754753697943552","Wed Oct 19 20:21:02 +0000 2011","Great time to be a family? #Microsoft #advertising enrages me. Again. http://t.co/pYT7vKRP" +"microsoft","negative","126753683630333952","Wed Oct 19 20:16:47 +0000 2011","With #Windows 8, #Microsoft can't forget past antitrust issues http://t.co/z4jWLQVu" +"microsoft","negative","126750481069510656","Wed Oct 19 20:04:04 +0000 2011","#Microsoft is currently paying to produce #swpats propaganda to lobby politicians with -- to make racketeering seem ""acceptable""" +"microsoft","negative","126748219261726721","Wed Oct 19 19:55:05 +0000 2011","There is nothing like sitting in the workshop all day talking to #microsoft and having to talk to them again tomorrow + waiting for parts." +"microsoft","negative","126748091083788288","Wed Oct 19 19:54:34 +0000 2011","RT @InformationWeek: Reader 'Tronman' compares #SteveBallmer to an albatross around #Microsoft's neck. Do you agree? Join the discu..." +"microsoft","negative","126747859960856576","Wed Oct 19 19:53:39 +0000 2011","When #Microsoft broke the law it played ""victim"". Now it engages in racketeering, calls victims ""outlaws"" (""respect our IP"")." +"microsoft","negative","126747247563112448","Wed Oct 19 19:51:13 +0000 2011","I would be a lot happier if #Microsoft Word didn't freeze every 5 minutes." +"microsoft","negative","126746946093330434","Wed Oct 19 19:50:01 +0000 2011","@Anas_Monaffal LOL , i do both .. and for sure #Microsoft #DotNet is perfect, simple o ay 7aja ;) .. but sorry I HATE WINDOWS PHONES :P" +"microsoft","negative","126744670465306624","Wed Oct 19 19:40:58 +0000 2011","#Microsoft took 6 months to recruit Everson and 9 months to lose her to #Faceboook via Adweek http://t.co/l5mTm6Ig" +"microsoft","negative","126744386137624576","Wed Oct 19 19:39:51 +0000 2011","Comp crash #Microsoft :/ Wouldn't get this with an iMac -__-" +"microsoft","negative","126744132763910144","Wed Oct 19 19:38:50 +0000 2011","Reader 'Tronman' compares #SteveBallmer to an albatross around #Microsoft's neck. Do you agree? Join the discussion! http://t.co/Mn39R2vc" +"microsoft","negative","126744130784198656","Wed Oct 19 19:38:50 +0000 2011","Reader 'Tronman' compares #SteveBallmer to an albatross around #Microsoft's neck. Do you agree? Join the discussion! http://t.co/2iJK0VB3" +"microsoft","negative","126744129135845377","Wed Oct 19 19:38:49 +0000 2011","Reader 'Tronman' compares #SteveBallmer to an albatross around #Microsoft's neck. Do you agree? Join the discussion! http://t.co/DFFkvnSJ" +"microsoft","negative","126743725677346816","Wed Oct 19 19:37:13 +0000 2011","#WindowsPhone7 would get a lot more blog and review love if #Microsoft provided a tool to grab screenshots. It's nearly impossible #wpt" +"microsoft","negative","126740373883191296","Wed Oct 19 19:23:54 +0000 2011","RT @Saptha: When u dine with a customer u shoukd never explain abt solutions unless he ask , M$ never lean the concept #microsoft #fail" +"microsoft","negative","126738867087884288","Wed Oct 19 19:17:55 +0000 2011","I hate #Microsoft PowerPoint!" +"microsoft","negative","126730748882460672","Wed Oct 19 18:45:39 +0000 2011","Is #Microsoft just another evil corporation? Beware when it comes to buying a #Windows8 PC http://t.co/QbgXXJMm" +"microsoft","negative","126730580934135809","Wed Oct 19 18:44:59 +0000 2011","#skype is now part of stupid #microsoft it seems" +"microsoft","negative","126730265551843329","Wed Oct 19 18:43:44 +0000 2011","#Skype is underwhelming: I use skype. It was recently bought by #Microsoft for $8.5 billion. As such it has a v... http://t.co/Xli2bR42" +"microsoft","negative","126727758108823553","Wed Oct 19 18:33:46 +0000 2011","Apple + Microsoft...The Difference: How #Apple Eclipsed #Microsoft @PSFK: http://t.co/Rmi8oSMi #brand #brands #marketing #innovation" +"microsoft","negative","126726935572262912","Wed Oct 19 18:30:30 +0000 2011","#Microsoft knows how to kick off an opening, but I don't quite see the #edge for Windows Phone or their tablets #iSorry http://t.co/9XDdXN1w" +"microsoft","negative","126726458776358913","Wed Oct 19 18:28:36 +0000 2011","RT @amarsanghera The fact that #Microsoft are using a #QRCode alongside one of their colour code shows the format is failing, opinions…" +"microsoft","negative","126724991495569408","Wed Oct 19 18:22:47 +0000 2011","If #Apple came out with gaming console (ala Xbox 360) that blew the current players out of water, #Microsoft and #Sony would be in trouble" +"microsoft","negative","126722961381134336","Wed Oct 19 18:14:43 +0000 2011","The fact that #Microsoft are using a #QRCode alongside one of their colour code shows the format is failing, opinions? http://t.co/BlhHGOpo" +"microsoft","negative","126722095743893504","Wed Oct 19 18:11:16 +0000 2011","If #Microsoft had more interoperability between their products (Xbox 360, Windows 7, Windows Phone 7, Zune) I'd be a Windows Phone user." +"microsoft","negative","126720736848117760","Wed Oct 19 18:05:52 +0000 2011","#Microsoft - We put the ""backwards"" into backwards compatibility. #instantfollowback" +"microsoft","negative","126719328035942400","Wed Oct 19 18:00:16 +0000 2011","Is #Nokia chief executive a #Microsoft mole? http://t.co/jkOTcrMb +Stephen Elop accused of destroying Nokia so Microsoft can buy it cheap." +"microsoft","negative","126718833493938176","Wed Oct 19 17:58:18 +0000 2011","#Microsoft #Dynamics #NAV is a cesspit. Extreme consultingware. To be avoided..." +"microsoft","negative","126716806688804865","Wed Oct 19 17:50:15 +0000 2011","Wow. Lots of negative comments about #windows8 on the @BuildWindows8 blog. I sure hope #Microsoft is listening #keepthedesktop" +"microsoft","negative","126714471543619584","Wed Oct 19 17:40:58 +0000 2011","When u dine with a customer u shoukd never explain abt solutions unless he ask , M$ never lean the concept #microsoft #fail" +"microsoft","negative","126712110095925250","Wed Oct 19 17:31:35 +0000 2011","Distrust and angst amongst #Nokia employees against #Selop #Microsoft fixation. http://t.co/jkOTcrMb" +"microsoft","negative","126705293055889408","Wed Oct 19 17:04:30 +0000 2011","I am quickly falling in love with #Apple #iCloud. Finally a web-based solution to wave my old #Microsoft Outlook bye-bye" +"microsoft","negative","126704033779023872","Wed Oct 19 16:59:30 +0000 2011","""Yeh behatreen samay hay parivar honeka"" says the #microsoft #ad in hindi. Preaching family values to indians? Get a local ad agency guys" +"microsoft","negative","126700315507572736","Wed Oct 19 16:44:43 +0000 2011","#BOFH excuse #447: According to #Microsoft, it's by design | #IFollowBack #Instantly" +"microsoft","negative","126700014385897472","Wed Oct 19 16:43:32 +0000 2011","I sware to go, #windows and #microsoft will be the death of me...I don't know why I keep giving windows (cont) http://t.co/3rkOchzr" +"microsoft","negative","126692062757269505","Wed Oct 19 16:11:56 +0000 2011","RT @EA_Jorge: I'm on day 80-something of my Xbox LIVE acct being hacked. This is worse than PSN hack. Will I ever get it back? #Microsof ..." +"microsoft","negative","126691815394000896","Wed Oct 19 16:10:57 +0000 2011","Thanks again #microsoft for wasting more of my time. Apple>windows http://t.co/GNy9G07B" +"microsoft","negative","126690175656001538","Wed Oct 19 16:04:26 +0000 2011","Dear #Microsoft, I should only have to hit the Windows update button *once* when manually running updates, not 4 or 5 times. #sysadmin" +"microsoft","negative","126689051960033280","Wed Oct 19 15:59:58 +0000 2011","Totally unproductive day with no access to #Microsoft Excel or Word, aaargh! Will be working late tonight after hubby fixes. #fingerscrossed" +"microsoft","negative","126686283769712640","Wed Oct 19 15:48:58 +0000 2011","RT @ManelFMartinez: How #Apple eclipsed #Microsoft http://t.co/IJ6XyDp3 #sun rise #in via @guardian" +"microsoft","negative","126686075287642112","Wed Oct 19 15:48:08 +0000 2011","How #Apple eclipsed #Microsoft http://t.co/IJ6XyDp3 #sun rise #in via @guardian" +"microsoft","negative","126682137972834305","Wed Oct 19 15:32:29 +0000 2011","I don't understand y @Google mail can work seamlessly in all browsers in all platforms and #Microsoft Exchg sucks n says you need IE6." +"microsoft","negative","126681641388216320","Wed Oct 19 15:30:31 +0000 2011","#Microsoft has frozen some #Xbox Live (XBL) accounts after online gamers report they have been hacked. http://t.co/MdQhb6x3" +"microsoft","negative","126681389570596865","Wed Oct 19 15:29:31 +0000 2011","@nicholi57 @yuvalz @MobileBurn lets's wait and see if it interests ppl, #MeeGo does... No one likes #Microsoft, we are just forced fed WPs" +"microsoft","negative","126680630087008257","Wed Oct 19 15:26:30 +0000 2011","I'm on day 80-something of my Xbox LIVE acct being hacked. This is worse than PSN hack. Will I ever get it back? #Microsoft #Xbox #Fail" +"microsoft","negative","126679134817624066","Wed Oct 19 15:20:33 +0000 2011","Interesting read - How #Apple eclipsed #Microsoft http://t.co/hFWpbMY6 via @guardian (cc: @elcheicon)" +"microsoft","negative","126677032837971968","Wed Oct 19 15:12:12 +0000 2011","So let me get this straight, #microsoft makes windows phone, microsoft owns skype but there's no #skype app for windows phone." +"microsoft","negative","126675041353076738","Wed Oct 19 15:04:18 +0000 2011","#microsoft update is so slow :(( http://t.co/37IGhGic" +"microsoft","negative","126672651459633152","Wed Oct 19 14:54:48 +0000 2011","Fuck #microsoft i love them to death but that red ring and no free xboxlive is killing me. #Sony all day :-\" +"microsoft","negative","126671856404144128","Wed Oct 19 14:51:38 +0000 2011","Fixing a pc that thinks it is 1601. I thought #Microsoft had the Y1.6K issue fixed already. #Techsupport" +"microsoft","negative","126668133405696000","Wed Oct 19 14:36:51 +0000 2011","http://t.co/PUJYV9c0 With friends like #Billgates and #steveballmet at #microsoft, who needs enemies ;-)" +"microsoft","negative","126666768541421571","Wed Oct 19 14:31:25 +0000 2011","#Microsoft 's latest offering leaves me praying for the Blue Screen of Death. http://t.co/gTqdvfxo #advertising #OhNoNotAgain" +"microsoft","negative","126666110237032448","Wed Oct 19 14:28:48 +0000 2011","I did the best of everyone who failed - Bill Gate #Microsoft" +"microsoft","negative","126662601139695616","Wed Oct 19 14:14:52 +0000 2011","Which planet is he living on? #Microsoft #CEO #Fail #palmface http://t.co/3zcq60SF" +"microsoft","negative","126647829656641536","Wed Oct 19 13:16:10 +0000 2011","I've seen a couple of iPhone 4S's already, but still no Windows Phone 7's ... ever! #Microsoft not so hot." +"microsoft","negative","126647697800310784","Wed Oct 19 13:15:38 +0000 2011","#Microsoft licensing process is annoying !!!" +"microsoft","negative","126647264943943682","Wed Oct 19 13:13:55 +0000 2011","It baffles me that Steve Ballmer can be a CEO of a company, let alone a giant like #Microsoft - The guys a moron." +"microsoft","negative","126646630702260224","Wed Oct 19 13:11:24 +0000 2011","#Microsoft arrogance again - Ballmer says you need to be a computer scientist to use #Android http://t.co/6PHh3Jb6" +"microsoft","negative","126645939208327168","Wed Oct 19 13:08:39 +0000 2011","#Microsoft arrogance again - Ballmer says you need to be a computer scientist to use #Android http://t.co/pltitgeI" +"microsoft","negative","126644987474608128","Wed Oct 19 13:04:52 +0000 2011","Is the sound of compliments that sound +more like obituaries #microsoft" +"microsoft","negative","126643061567668224","Wed Oct 19 12:57:13 +0000 2011","#Nokia co-operation with #Microsoft listed as the 4th riskiest business move of the year: http://t.co/IYo86gVj" +"microsoft","negative","126642517310570497","Wed Oct 19 12:55:03 +0000 2011","[WebProNews Finance] #Yahoo Revenue Drop Attributed to #Microsoft Deal http://t.co/EOrOSstX" +"microsoft","negative","126642328466243584","Wed Oct 19 12:54:18 +0000 2011","#microsoft still didn't managed access to #metro apps on ""older"" netbooks. #win8 #resolution" +"microsoft","negative","126642036572041216","Wed Oct 19 12:53:09 +0000 2011","#Yahoo Revenue Drop Attributed to #Microsoft Deal http://t.co/yRcKQeIs" +"microsoft","negative","126640632478445568","Wed Oct 19 12:47:34 +0000 2011","[WebProNews Finance] #Yahoo Revenue Drop Attributed to #Microsoft Deal http://t.co/ItdkLRry" +"microsoft","negative","126637450008346624","Wed Oct 19 12:34:55 +0000 2011","Nokia employees still worried that Elop is a Microsoft mole http://t.co/n4Cpu3vj #microsoft #windows" +"microsoft","negative","126636886977556480","Wed Oct 19 12:32:41 +0000 2011","I so wish that #SteveBallmer from #Microsoft would have to swallow his words about #Yahoo: http://t.co/XSJGIfop" +"microsoft","negative","126636535813636096","Wed Oct 19 12:31:17 +0000 2011","just upgraded to win 8 on my pc only for it to tell me it failed to install and they dont know what happened thats #microsoft for you!" +"microsoft","negative","126635317108289536","Wed Oct 19 12:26:27 +0000 2011","Really hate the #Microsoft #mcp site" +"microsoft","negative","126628406258450432","Wed Oct 19 11:58:59 +0000 2011","Screened by RepRisk – #Microsoft will undergo trial for its alleged anti-competitive practices @Microsoft" +"microsoft","negative","126622538557177856","Wed Oct 19 11:35:40 +0000 2011","I have concluded, having used it since March, that Windows 7 Ultimate is a complete catastrophe. Mr Gates, you have lost the plot #Microsoft" +"microsoft","negative","126618374972248064","Wed Oct 19 11:19:07 +0000 2011","How #Apple eclipsed #Microsoft http://t.co/dL9CRCZy" +"microsoft","negative","126614764339212288","Wed Oct 19 11:04:46 +0000 2011","Oh how much do I loathe #microsoft??? An hour to 'configure updates' which it is then unable to do so another 20 mins 'reverting changes'..." +"microsoft","negative","126603861933817856","Wed Oct 19 10:21:27 +0000 2011","http://t.co/zBLtZ5MW German Federal #Trojan's got a BigBrother. #Gov spying on its people, #Microsoft's 32bit/64bit #Windows is target" +"microsoft","negative","126602276872794112","Wed Oct 19 10:15:09 +0000 2011","#Microsoft, #Adobe lose $13.5bn to piracy: Report http://t.co/x4yWfNSV" +"microsoft","negative","126602276805685248","Wed Oct 19 10:15:09 +0000 2011","#Microsoft, #Adobe lose $13.5bn to piracy: Report http://t.co/wXBC1UUw" +"microsoft","negative","126599337361932288","Wed Oct 19 10:03:28 +0000 2011","System.IO.File.Delete - You lie! You didn't delete my file, and you didn't throw an exception. WTF!? #microsoft #net" +"microsoft","negative","126595347198902272","Wed Oct 19 09:47:37 +0000 2011","Wilth #Accessibility mode on in #SharePoint 2010, it seems that #Workflows sometimes do not start automatically #SP2010 #Workflow #Microsoft" +"microsoft","negative","126594799062102016","Wed Oct 19 09:45:26 +0000 2011","Internet Explorer 9 is the biggest pile of shit ever seriously #Microsoft #IE9" +"microsoft","negative","126586083470360576","Wed Oct 19 09:10:48 +0000 2011","I have been seriously slacking with my #mandarin studies these past few months. Blame #Microsoft and #Cisco!! #StudyingWhilstMotivated" +"microsoft","negative","126583473929588736","Wed Oct 19 09:00:26 +0000 2011","sledgehammer maybe! RT @SkandalasGP Office blues *drink?* RT @wuppy: #microsoft fail #notresponding for fcucks sake http://t.co/lCXa9q8P" +"microsoft","negative","126579470424473600","Wed Oct 19 08:44:32 +0000 2011","#skype is ruining the friendship with your aggressive #microsoft type selling. If i need something i'll ask. Thank you..." +"microsoft","negative","126579121303207936","Wed Oct 19 08:43:08 +0000 2011","Office blues *drink?* RT @wuppy: #microsoft fail #notresponding for fcucks sake http://t.co/VtYbqI91" +"microsoft","negative","126577595553824770","Wed Oct 19 08:37:05 +0000 2011","@joannejacobs I'd like to criticise #Microsoft's Steve Ballmer face-to-face for all the pain & suffering their products cause. #justsaying" +"microsoft","negative","126574432159408129","Wed Oct 19 08:24:30 +0000 2011","#microsoft fail #notresponding for fcucks sake http://t.co/lCXa9q8P" +"microsoft","negative","126573688941318144","Wed Oct 19 08:21:33 +0000 2011","How #apple eclipsed #microsoft http://t.co/4bWXJaDg by @charlesarthur #business @guardian" +"microsoft","negative","126573680665964544","Wed Oct 19 08:21:31 +0000 2011","RT @guardiantech: How #Apple eclipsed #Microsoft http://t.co/GK6YjTFY <- Microsoft's future is in the corporate world, not consumer" +"microsoft","negative","126572762411171840","Wed Oct 19 08:17:52 +0000 2011","â™» @waynp: Not the MS hate bit tho. < Waddya mean? #microsoft sucks bonobo monkeys' asses... #Linux roolz. For the extremists, there's #BSD" +"microsoft","negative","126572362316513280","Wed Oct 19 08:16:17 +0000 2011","#Microsoft, #Adobe lose $13.5bn to piracy: Report - The Times of India http://t.co/5IbOBNrH" +"microsoft","negative","126570851389800448","Wed Oct 19 08:10:17 +0000 2011","#Skype often crashing: #microsoft, what are you doing?" +"microsoft","negative","126570732430966785","Wed Oct 19 08:09:48 +0000 2011","#Microsoft's #SteveBallmer attacks #Android phones ..... well #windows7mobile is gay, you metro! http://t.co/sQnE1iLb via @Telegraph" +"microsoft","negative","126570180070481920","Wed Oct 19 08:07:37 +0000 2011","â™» @mariospr: Stand up for your freedom to install free software. Please sign up! http://t.co/YOOTfKTM #gnu #against #microsoft" +"microsoft","negative","126567507350913024","Wed Oct 19 07:56:59 +0000 2011","#Microsoft just bought #Skype officially. So how's the future of #Skype on any other devices/operating systems? :(" +"microsoft","negative","126566106252062720","Wed Oct 19 07:51:25 +0000 2011","#Google go went gone..server issues! first time since ages...now my faith in Tech. from #RIM to #Microsoft has been completely lifted" +"microsoft","negative","126565636703924225","Wed Oct 19 07:49:33 +0000 2011","RT @sander_koers: #Google Apps vs. #Microsoft #Office 365: ""it is clear that Google Apps should be the top choice"" http://t.co/OySZmjVi #in" +"microsoft","negative","126564844211154944","Wed Oct 19 07:46:24 +0000 2011","Ouch!!! #Microsoft #Azure #fail http://t.co/LJpGVZJT" +"microsoft","negative","126560723794010112","Wed Oct 19 07:30:02 +0000 2011","#Google Apps vs. #Microsoft #Office 365: ""it is clear that Google Apps should be the top choice"" http://t.co/KDTD4zRe" +"microsoft","negative","126558437017530368","Wed Oct 19 07:20:57 +0000 2011","#microsoft #SBS 2011 disabling third party software #updates via #gpo" +"microsoft","negative","126546665342640128","Wed Oct 19 06:34:10 +0000 2011","How #Apple eclipsed #Microsoft http://t.co/KxboqsKj" +"microsoft","negative","126546043193147394","Wed Oct 19 06:31:42 +0000 2011","Unmindful Ballmer, collects half a billion, doesn't say thank you. http://t.co/lwtpXejo #android #google #microsoft" +"microsoft","negative","126543225501650944","Wed Oct 19 06:20:30 +0000 2011","#Apple eclipsed #Microsoft http://t.co/wEdPbcPx" +"microsoft","negative","126525932625334272","Wed Oct 19 05:11:47 +0000 2011","http://t.co/5oK4XcOw Microsoft !! i hate you again !! #microsoft" +"microsoft","negative","126519630868119552","Wed Oct 19 04:46:45 +0000 2011","Open Letter: Steve Ballmer, Please Retire - Forbes - Please retweet! #MSFT needs a techie leader. http://t.co/E8uMRmro #microsoft #ballmer" +"microsoft","negative","126518620095393792","Wed Oct 19 04:42:44 +0000 2011","@MatthewGPhy One Operating System to rule them all and in the darkness bind them. #Microsoft" +"microsoft","negative","126517164416049152","Wed Oct 19 04:36:57 +0000 2011","I wonder if I tweet Bill Gates he'll fix my laptop... + +#Microsoft +#vista +#fml" +"microsoft","negative","126515911321591808","Wed Oct 19 04:31:58 +0000 2011","RT @anup_pillai: How #Apple eclipsed #Microsoft | http://t.co/r6hNEtTu" +"microsoft","negative","126513363529383937","Wed Oct 19 04:21:51 +0000 2011","After I don't know how many years, you still can't copy a #microsoft #word document headings to #powerpoint. #justsaying" +"microsoft","negative","126508179281547264","Wed Oct 19 04:01:16 +0000 2011","#microsoft #careers site is giving errors for any thing i search!! #weird +http://t.co/vJ2JpKtM http://t.co/02dYONXz" +"microsoft","negative","126504435227701248","Wed Oct 19 03:46:22 +0000 2011","Installing .NET framework, this might take very many years. Come back when your unborn child is in college. #microsoft" +"microsoft","negative","126488915996721153","Wed Oct 19 02:44:42 +0000 2011","Watching podcast of Steve Jobs & Bill Gates at 07 D5. Gates sucks at tech predictions! All talk #Apple #Microsoft" +"microsoft","negative","126485347935862784","Wed Oct 19 02:30:31 +0000 2011","My#Dell comp on shitty #Microsoft vista blue screened i swear this is steve jobs ghost coming back to finish what he started#appledomination" +"microsoft","negative","126484955047006209","Wed Oct 19 02:28:57 +0000 2011","#WTF #Microsoft #sbs2008 #rww only works in #IE. #browserwars" +"microsoft","neutral","126809228194217984","Wed Oct 19 23:57:30 +0000 2011","My last day of #Microsoft #SharePoint training: 10175A/Microsoft SharePoint 2010, Application Development - http://t.co/pKMBGsNr" +"microsoft","neutral","126808500356644864","Wed Oct 19 23:54:37 +0000 2011","Following dear #Microsoft CEO's logic, 550,000 #Android devices activated each day NEED to belong to computer scientists. I am enlightened!!" +"microsoft","neutral","126808393892634625","Wed Oct 19 23:54:11 +0000 2011","#EEDAR report: half of HD console owners buying #DLC http://t.co/5tO4bS31 #business #microsoft #pc #playstation" +"microsoft","neutral","126805369619087360","Wed Oct 19 23:42:10 +0000 2011","Congrats to #Microsoft's Monica Diaz for being part of @HispanicBizMag 100 Most Influential Hispanics! http://t.co/qj9QDBhA" +"microsoft","neutral","126805239209803777","Wed Oct 19 23:41:39 +0000 2011","Windows Phone Mango Update Released to 'Nearly Everyone' http://t.co/SX1NQJJ1 #microsoft #Technology" +"microsoft","neutral","126804966491955201","Wed Oct 19 23:40:34 +0000 2011","RT @WinBetaDotOrg: Microsoft releases developer preview of ""Project Roslyn"" compiler http://t.co/lpb9n510 #Microsoft #beta" +"microsoft","neutral","126804937438003200","Wed Oct 19 23:40:27 +0000 2011","#Microsoft We support you. Retweet if u support us or want to." +"microsoft","neutral","126804497493266433","Wed Oct 19 23:38:42 +0000 2011","@MSPartners can join: Small Business Specialist Community (SBCS) http://t.co/IlQPKEz0! #Microsoft #MPN" +"microsoft","neutral","126803763603312640","Wed Oct 19 23:35:47 +0000 2011","RT @SAPWorkSmarter: Tomorrow at #SAPTechEd Bangalore: #SAP & #Microsoft Building Apps for Both Worlds w #DuetEnterprise & #SAPGateway. h ..." +"microsoft","neutral","126803594216341506","Wed Oct 19 23:35:07 +0000 2011","#Microsoft CEO Steve #Ballmer on Not Buying #Yahoo: “Sometimes, You’re Lucky†http://t.co/7TEtDPP9" +"microsoft","neutral","126803513190789121","Wed Oct 19 23:34:48 +0000 2011","RT @KumarSachi: Another Gem from #Microsoft - Curate your personal history with ""Project Greenwich"". Available later this month. http: ..." +"microsoft","neutral","126803044426985472","Wed Oct 19 23:32:56 +0000 2011","RT @NeowinFeed: Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/nwMbseVm #neowin" +"microsoft","neutral","126802586178293760","Wed Oct 19 23:31:07 +0000 2011","#microsoft @diggita Microsoft - Macro in Word 2007 (gerardopaolillo) http://t.co/O3M07opC" +"microsoft","neutral","126802366996561920","Wed Oct 19 23:30:14 +0000 2011","#Microsoft CEO Steve Ballmer on Not Buying Yahoo: “Sometimes, You’re Lucky†<http://t.co/rcml9XQR>" +"microsoft","neutral","126800882477174784","Wed Oct 19 23:24:20 +0000 2011","#Microsoft: #Mango Now Available to Nearly Everyone - http://t.co/vQkJ84AC #windowsphone #wp7" +"microsoft","neutral","126800407585501184","Wed Oct 19 23:22:27 +0000 2011","If #Microsoft earnings disappoint, blame consumer PC sales - http://t.co/edXe0xBd via @cnet" +"microsoft","neutral","126800313029099520","Wed Oct 19 23:22:05 +0000 2011","Microsoft bitchslaps VMware in new paper - #Microsoft calls out #VMware for bogus benchmark numbers - http://t.co/dBsMa1Pb" +"microsoft","neutral","126798908335734785","Wed Oct 19 23:16:30 +0000 2011","Bill Gates to testify in Novel v. #Microsoft antitrust laws#UI t http://t.co/H0GscVZR" +"microsoft","neutral","126798906129526784","Wed Oct 19 23:16:29 +0000 2011","If #Microsoft earnings disappoint, blame consumer PC sales http://t.co/xEKwTlUg" +"microsoft","neutral","126797598425223169","Wed Oct 19 23:11:17 +0000 2011","RT @MSAnyWhere: Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/Fpv2yDof #microsoft" +"microsoft","neutral","126797364823457792","Wed Oct 19 23:10:22 +0000 2011","Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone: Microsoft has said th... http://t.co/uEpATRzx" +"microsoft","neutral","126797364433387520","Wed Oct 19 23:10:22 +0000 2011","Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/GbKV9pbQ #microsoft" +"microsoft","neutral","126797363418370048","Wed Oct 19 23:10:21 +0000 2011","Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/S54leHKi" +"microsoft","neutral","126797339758305280","Wed Oct 19 23:10:16 +0000 2011","Should you upgrade to #Microsoft Office 2010. http://t.co/gYGuYr64 #tech #Office2010" +"microsoft","neutral","126797209244151808","Wed Oct 19 23:09:45 +0000 2011","Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/vyNtU8xW" +"microsoft","neutral","126797207159582720","Wed Oct 19 23:09:44 +0000 2011","Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/nwMbseVm #neowin" +"microsoft","neutral","126797206232645635","Wed Oct 19 23:09:44 +0000 2011","Microsoft opens the spigot, Windows Phone Mango available to all #microsoft #windowsphone http://t.co/e4LLmmyg" +"microsoft","neutral","126796929366638592","Wed Oct 19 23:08:38 +0000 2011","#Microsoft #Cloud GSA Loses $2.5 Billion Cloud Contract Fight: Google recently dropped a case against the D... http://t.co/cObJRw66 #TCN" +"microsoft","neutral","126795614649466880","Wed Oct 19 23:03:24 +0000 2011","""@MuraineR: #Microsoft launching the OmniTouch, how funny I though that was an @ALUEnterprise solution! #copyright http://t.co/k1kDqz04" +"microsoft","neutral","126795256225210368","Wed Oct 19 23:01:59 +0000 2011","Love the template name #Microsoft... Everyone that does #SharePoint, go to your site's /_layouts/xsl/internal.xsl for a laugh." +"microsoft","neutral","126795026771607553","Wed Oct 19 23:01:04 +0000 2011","#Microsoft ’s new “great to be a family†#adverts begin to air, including #Office2010 #Windows7 and #WP7. +http://t.co/L5J2UEEQ +@winrumors" +"microsoft","neutral","126795013752504320","Wed Oct 19 23:01:01 +0000 2011","Club CALUMO was FUN! See the vidio CALUMO Blog #Denali #Microsoft #SQLServer http://t.co/8THqSRBd via @calumo" +"microsoft","neutral","126795009986002946","Wed Oct 19 23:01:00 +0000 2011","Club CALUMO was FUN! See the vidio CALUMO Blog #Denali #Microsoft #SQLServer http://t.co/Hfzf2cJe via @calumo" +"microsoft","neutral","126794825998663680","Wed Oct 19 23:00:16 +0000 2011","Tomorrow at #SAPTechEd Bangalore: #SAP & #Microsoft Building Apps for Both Worlds w #DuetEnterprise & #SAPGateway. http://t.co/te4NQmxN" +"microsoft","neutral","126793516398546945","Wed Oct 19 22:55:04 +0000 2011","TechNet Blogs- Technical Overview of #DuetEnterprise for #Microsoft #SharePoint and #SAP http://t.co/o9V1J49v" +"microsoft","neutral","126792687138508801","Wed Oct 19 22:51:46 +0000 2011","#Microsoft researchers want to turn your hand into a touchscreen +http://t.co/HV2oi81b" +"microsoft","neutral","126792112313352192","Wed Oct 19 22:49:29 +0000 2011","http://t.co/hrkWT2VA #Microsoft #Opens The Door To Two #Spanish #Entrepreneurs With #Medical #Project" +"microsoft","neutral","126791726559010816","Wed Oct 19 22:47:57 +0000 2011","Just installed Windows 7 Ultimate Service Pack 1 with IE9. It looks like IE9 is trying to look like Chrome. #windows7 #ie9 #microsoft #ie8" +"microsoft","neutral","126791479925555200","Wed Oct 19 22:46:59 +0000 2011","Gentle Ballmer meekly suggests the competition is doomed - #Microsoft is winning, winning, winning!: http://t.co/7ntP5MS1 via @thetecheye" +"microsoft","neutral","126790637243740160","Wed Oct 19 22:43:38 +0000 2011","sponsors and supporters at the #ascribeconference include #microsoft #ubisys #zetes #healthinformation #connectingforhealth #emis" +"microsoft","neutral","126789918826565632","Wed Oct 19 22:40:46 +0000 2011","Ballmer confirms Nokia phone debut; you can quote him on that - http://t.co/Oe9Imdob - #microsoft #SteveBallmer" +"microsoft","neutral","126789806905769984","Wed Oct 19 22:40:20 +0000 2011","Microsoft releases developer preview of ""Project Roslyn"" compiler http://t.co/lpb9n510 #Microsoft #beta" +"microsoft","neutral","126789806356303872","Wed Oct 19 22:40:20 +0000 2011","Microsoft releases developer preview of ""Project Roslyn"" compiler http://t.co/igGyeT9S #Microsoft #beta" +"microsoft","neutral","126789710705213440","Wed Oct 19 22:39:57 +0000 2011","I wish there was #Microsoft #PowerPoint for #iPad." +"microsoft","neutral","126789040015020033","Wed Oct 19 22:37:17 +0000 2011","Watching this week's episode of Hawaii Five-O. Great use of Windows Phone 7 & SkyDrive. Oh yeah a Dell Inspiron Duo was there too #microsoft" +"microsoft","neutral","126788856476471296","Wed Oct 19 22:36:33 +0000 2011","@WSPSanDiego Kickoff tomorrow 10/20 at San Diego #Microsoft Store at 6:00pm @wspwest ping me w/questions #in" +"microsoft","neutral","126788488183037953","Wed Oct 19 22:35:05 +0000 2011","On the blog we discuss #Microsoft embracing of #Apache #Hadoop for big data - + +http://t.co/q3YNLQck #opensource #bigdata #IT" +"microsoft","neutral","126786825913245698","Wed Oct 19 22:28:29 +0000 2011","#Microsoft Works Suite 2003 Review http://t.co/Yie1rSSz #windowsxphomesoftware #officesuites #family" +"microsoft","neutral","126786517531242496","Wed Oct 19 22:27:16 +0000 2011","Did a quick video call test between two diff. #Android tablets using #Skype & it worked great! #Microsoft hasn't broken it yet." +"microsoft","neutral","126786021340884992","Wed Oct 19 22:25:17 +0000 2011","What was the best device in 90s ? #Apple #Sony #Samsung #Microsoft or other Companies..:)" +"microsoft","neutral","126785035599749121","Wed Oct 19 22:21:22 +0000 2011","Microsoft CEO Steve Ballmer Looks Ahead at Social, Mobile Plans | Adweek http://t.co/jEENZQDw #tech #apple #microsoft" +"microsoft","neutral","126784430185521154","Wed Oct 19 22:18:58 +0000 2011","#EvolutionOfIT #cloud event @BenchmarkLearn can any #microsoft or @MSLearning folks help create us a windows azure pass ex. BLAZURE ??" +"microsoft","neutral","126782184748097536","Wed Oct 19 22:10:03 +0000 2011","Creating #Pareto charts using #Microsoft #Excel http://t.co/OFXuNqhY" +"microsoft","neutral","126781699811061760","Wed Oct 19 22:08:07 +0000 2011","#microsoft #volume 1 #microsoft powerpoint #procedural syntax http://t.co/InQbcG68 GO! with Microsoft PowerPoint 2007 Volume 1" +"microsoft","neutral","126781384600727552","Wed Oct 19 22:06:52 +0000 2011","#Microsoft Pushing out #Mango to more Windows Phones http://t.co/xPIaZ8Xp #wp7 #orange #samsung #lg #optimus7 #htc #dell #msft" +"microsoft","neutral","126781241423962112","Wed Oct 19 22:02:34 +0000 2011","#Microsoft drops developer preview of its ‘Roslyn’ compiler http://t.co/XgUrXNPa #uncategorized" +"microsoft","neutral","126780592036646913","Wed Oct 19 22:03:43 +0000 2011","More Colorful Photos Of Nokia's First Windows Phone Just Leaked!! http://t.co/sU4l744v #nokia #wp7 #microsoft #windowsphone7" +"microsoft","neutral","126779798986047488","Wed Oct 19 22:00:34 +0000 2011","#Microsoft shows 'touch screen' for any surface http://t.co/X7Z87xzt" +"microsoft","neutral","126779783760723968","Wed Oct 19 22:00:30 +0000 2011","#Microsoft learning all day long-For FREE-SpecTECHular San Antonio. Register?uick. November 10 - http://t.co/GinBVlSK" +"microsoft","neutral","126779778090008576","Wed Oct 19 22:00:29 +0000 2011","#Microsoft learning all day long-For FREE-SpecTECHular San Antonio. Register?uick. November 10 - http://t.co/bVKP5Caq" +"microsoft","neutral","126779040282587138","Wed Oct 19 21:57:33 +0000 2011","#SIGSource #Microsoft talent source program boasts a procure-to-pay model to manage temp workforce, concept works for VMS #Beeline" +"microsoft","neutral","126778210783473665","Wed Oct 19 21:54:15 +0000 2011","@grumbledook still poor central management from Apple then? #microsoft" +"microsoft","neutral","126776771159916546","Wed Oct 19 21:48:32 +0000 2011","RT @WinRumors: If you missed this earlier, Steve #Ballmer bashes #Android phones [video] http://t.co/NvD7MQVk #microsoft #windows" +"microsoft","neutral","126775752610942976","Wed Oct 19 21:44:29 +0000 2011","#Nokia 800 Renders Revealed http://t.co/K3txY6JZ #mobilenews #android #gingerbread #microsoft #nokia800 #nokian9" +"microsoft","neutral","126774185577021445","Wed Oct 19 21:38:15 +0000 2011","#Microsoft Boxes Up Two New Xbox 360 Holiday Bundles: With the holiday season just around the… http://t.co/cHyXxdI2" +"microsoft","neutral","126773807875751936","Wed Oct 19 21:36:45 +0000 2011","RT @KentisG: Agree ! RT @MuraineR: #Microsoft launching the OmniTouch, how funny I though that was an @ALUEnterprise soluti… (cont) http ..." +"microsoft","neutral","126773769262997504","Wed Oct 19 21:36:36 +0000 2011","@JackDavidson95 ill be manager after a week! Yeh joint ownership of #microsoft ahhhhh" +"microsoft","neutral","126773680196947968","Wed Oct 19 21:36:15 +0000 2011","RT @Microsoft_EDU: Quality education is vital. Attend #Microsoft & #Desire2Learn session at #EDUCAUSE http://t.co/hlRxf2dF #EDU11" +"microsoft","neutral","126773504644349954","Wed Oct 19 21:35:33 +0000 2011","#Microsoft To Launch Its Own Version Of #Facebook Timeline Like Web Service Called Project #Greenwich http://t.co/MmS14pDW" +"microsoft","neutral","126773458519605248","Wed Oct 19 21:35:22 +0000 2011","@lukerussell1 #iWantAnAutomaticPromotion and we'll be running #Microsoft before you know it." +"microsoft","neutral","126772660024774656","Wed Oct 19 21:32:12 +0000 2011","#Microsoft drops developer preview of its ‘Roslyn’ compiler http://t.co/niSKSmL4" +"microsoft","neutral","126772118879879169","Wed Oct 19 21:30:03 +0000 2011","Creating #Pareto charts using #Microsoft #Excel http://t.co/B6rYS5gX" +"microsoft","neutral","126771594470232064","Wed Oct 19 21:27:58 +0000 2011","Developers: Let Microsoft Market Your App for Windows Phone http://t.co/QZUqhCxx #wp7 #wp7dev #windowsphone #Microsoft #Marketplace" +"microsoft","neutral","126769762452770819","Wed Oct 19 21:20:41 +0000 2011","@ScottShipman1 Glad to hear! Hey @Mckellip thank you for the #Microsoft #Lync video participation #onthebigscreen :) @CoreBTS #CoreAppTech" +"microsoft","neutral","126768953027608576","Wed Oct 19 21:17:28 +0000 2011","RT @NeowinFeed: Microsoft Stores offer up free Windows Phone 7 devices #microsoft #microsoftstores http://t.co/hJ8qlOC9 #neowin" +"microsoft","neutral","126768739692720128","Wed Oct 19 21:16:37 +0000 2011","Google Docs presentation makes PowerPoint weep, beg for mercy http://t.co/9rVHs6jk via @engadget #google #powerpoint #microsoft #haha" +"microsoft","neutral","126768259734315008","Wed Oct 19 21:14:43 +0000 2011","RT @Flipbooks #Microsoft Researchers Want to Turn Your Hand Into a Touchscreen http://t.co/NDpyFvpK rt @TferThomas #Technology #Tech" +"microsoft","neutral","126768125386571776","Wed Oct 19 21:14:10 +0000 2011","RT @Flipbooks: #Microsoft Researchers Want to Turn Your Hand Into a Touchscreen http://t.co/HIbjxHVD rt @TferThomas #Technology #Tech" +"microsoft","neutral","126768055769513984","Wed Oct 19 21:13:54 +0000 2011","#Microsoft Researchers Want to Turn Your Hand Into a Touchscreen http://t.co/HIbjxHVD rt @TferThomas #Technology #Tech" +"microsoft","neutral","126768000949948416","Wed Oct 19 21:13:41 +0000 2011","Nice comparison w #Microsoft :) RT @google: Watch the live stream of Vic Gundotra & Sergey Brin at #w2s http://t.co/vPxANJtz" +"microsoft","neutral","126767824386531329","Wed Oct 19 21:12:59 +0000 2011",".. downloading CTP3 of #Microsoft #SQL2012 to play around ...http://www.microsoft.com/sqlserver/en/us/default.aspx" +"microsoft","neutral","126767467077976065","Wed Oct 19 21:11:34 +0000 2011","DPR Client: @EvolveTech partnering with #Microsoft on #Office 365 webinar Tuesday, Nov. 1, at 10 a.m. ET. Sign up! http://t.co/hnrzdFk5" +"microsoft","neutral","126767049312714752","Wed Oct 19 21:09:54 +0000 2011","Because you are what you search: Now everyone wants to customize search #Microsoft as well as #MCHammer #WiredDoo http://t.co/HlicKxPV" +"microsoft","neutral","126766670109872128","Wed Oct 19 21:08:24 +0000 2011","RT @techinciter: Suppose Microsoft Had Bought Siri? - Forbes http://t.co/ZRYNdhDi #apple #microsoft #siri #msft" +"microsoft","neutral","126766092629712896","Wed Oct 19 21:06:06 +0000 2011","Windows Phone firmware updates due next week to enable Internet Sharing http://t.co/yPF60YEe #microsoft #windows" +"microsoft","neutral","126765064299614209","Wed Oct 19 21:02:01 +0000 2011","RT @MSFTDynamicsERP: Time to learn, inspire and innovate at the #Microsoft Dynamics #AX2012 EMEA Tech Conference http://t.co/bzX4UE6K # ..." +"microsoft","neutral","126764662510452736","Wed Oct 19 21:00:25 +0000 2011","@TheZeroOfTime #microsoft" +"microsoft","neutral","126764604218028033","Wed Oct 19 21:00:11 +0000 2011","1st public test release for the Roslyn project. http://t.co/0j9ly9Y8 giving APIs to the C# compiler (and visual basic but ..;). #microsoft" +"microsoft","neutral","126762652402860033","Wed Oct 19 20:52:26 +0000 2011","Updates: Windows Phone 7.5 now available more broadly http://t.co/miMgwgwo #microsoft" +"microsoft","neutral","126762576557248512","Wed Oct 19 20:52:08 +0000 2011","RT @SAP_MICROSOFT: Follow @whymicrosoft for insight on #Microsoft solutions which drive productivity for people and their organizations" +"microsoft","neutral","126761994098442241","Wed Oct 19 20:49:49 +0000 2011","#Microsoft builds a functioning 'holodesk' to interact with digital items - http://t.co/ougSB108 via @TheNextWeb" +"microsoft","neutral","126761880801910784","Wed Oct 19 20:49:22 +0000 2011","@nokia Is #Nokia going to release their new #WindowsPhone in Norway this year? #WP7 #Microsoft" +"microsoft","neutral","126761759041265664","Wed Oct 19 20:48:53 +0000 2011","Ballmer On Not Buying Yahoo: +http://t.co/ogoaKRBf + +#Microsoft #Yahoo #Skype #Xbox #Google #Bing #Ballmer http://t.co/LwUYs0uP" +"microsoft","neutral","126761312016547840","Wed Oct 19 20:47:06 +0000 2011","#Microsoft delivers #developer preview of Roslyn compiler as a service http://t.co/nM30ugQQ" +"microsoft","neutral","126761103677071362","Wed Oct 19 20:46:16 +0000 2011","RT @ScottRockHill: #MIcrosoft Launches Reseller RewardHub for Resllers http://t.co/scPUl9Yz" +"microsoft","neutral","126761034341040128","Wed Oct 19 20:46:00 +0000 2011","#GA-repost: RT @whymicrosoft Are you making the move from #GoogleApps to #Microsoft? >> #Metalogix can migrate y... http://t.co/z3Z3X9pA" +"microsoft","neutral","126758826337771520","Wed Oct 19 20:37:13 +0000 2011","RT @whymicrosoft Are you making the move from #GoogleApps to #Microsoft? >> #Metalogix can migrate you from #GoogleApps to #SharePoint" +"microsoft","neutral","126758715981434881","Wed Oct 19 20:36:47 +0000 2011","#Microsoft builds a functioning ‘holodesk’ to interact with digital items http://t.co/BgrZnNvs #uncategorized" +"microsoft","neutral","126756834257285120","Wed Oct 19 20:29:18 +0000 2011","#seguridadparatodos is using #Microsoft #Security #Essentials and you? (thx @msftsecresponse) - Our analysis [Spanish] http://t.co/hUt3FQyA" +"microsoft","neutral","126755580604981249","Wed Oct 19 20:24:20 +0000 2011","RT @RossMistry: #Microsoft Codename ""Data Explorer"" CTP Coming Next Month http://t.co/NkhuoM7U #SQLServer #SQL2012 #SQLAzure" +"microsoft","neutral","126755433108082688","Wed Oct 19 20:23:44 +0000 2011","Bill Gates jumps over chairs during interviews http://t.co/dmVtFsVg #BillGates #microsoft #parkour #cheatedabit" +"microsoft","neutral","126755073664618496","Wed Oct 19 20:22:19 +0000 2011","Minority Report-esque interface developed by #Microsoft Research and Carnegie Mellon University http://t.co/0yqEKDzg via @Digg" +"microsoft","neutral","126754216160145409","Wed Oct 19 20:18:54 +0000 2011","I can't c it as an important invention ... Siri is an incomparable alternative! -- PocketTouch http://t.co/4DPIoyqT #MicroSoft #Siri" +"microsoft","neutral","126753468160540672","Wed Oct 19 20:15:56 +0000 2011","Get rid of that tangled mess behind your computer desk with this #Microsoft Wireless Mouse and Keyboard: http://t.co/n4LewRAy" +"microsoft","neutral","126752950654746624","Wed Oct 19 20:13:53 +0000 2011","#Microsoft delivers developer preview of Roslyn compiler as a service http://t.co/5yup4iMK" +"microsoft","neutral","126752714385391616","Wed Oct 19 20:12:56 +0000 2011","RT @nedraallmond: @WSPSanDiego Kickoff tomorrow 10/20 at San Diego #Microsoft Store at 6:00pm @wspwest http://t.co/iuaa4yuE" +"microsoft","neutral","126752712300826624","Wed Oct 19 20:12:56 +0000 2011","RT @nedraallmond: @WSPSanDiego Kickoff tomorrow 10/20 at San Diego #Microsoft Store at 6:00pm @wspwest http://t.co/GP8tIRXN" +"microsoft","neutral","126752262281371649","Wed Oct 19 20:11:08 +0000 2011","non conductive #surface become #multitouch. #touch #gestures: #microsoft #windows 7 @ #nui #apps http://t.co/Ygpnusfv" +"microsoft","neutral","126751213692792832","Wed Oct 19 20:06:58 +0000 2011","@WesleyBackelant Indeed I survived #sqlpass #sqlkilt and a couple of vendor evenings :-) I spared #sqlkaraoke my great voice :-) #microsoft" +"microsoft","neutral","126751180557778945","Wed Oct 19 20:06:51 +0000 2011","BUILD Windows #Samsung #Slate PC in US #Microsoft Store http://t.co/bm4NeNiT #msdn #bldwin" +"microsoft","neutral","126750292212584449","Wed Oct 19 20:03:19 +0000 2011","CNET » #Microsoft #OmniTouch allows multitouch input on arbitrary, everyday surfaces (via @andretelles) » http://t.co/fEB4uIMr" +"microsoft","neutral","126749618229878784","Wed Oct 19 20:00:38 +0000 2011","Microsoft Roslyn CTP is finally out...Check it out http://t.co/cSTAnsLF #roslyn #visualstudio #compiler #csharp #net #microsoft" +"microsoft","neutral","126749567428468736","Wed Oct 19 20:00:26 +0000 2011","Vertical Solutions is giving away a #microsoft office pro pack at the #2011solutions conference! Stop by their booth for a chance to win!" +"microsoft","neutral","126749486545506304","Wed Oct 19 20:00:07 +0000 2011","#Microsoft Pre-order Call of Duty: Modern Warfare 3 and get $20 off Xbox Live 12M Card, $10 bounce back co.. http://t.co/3ov35Z3R" +"microsoft","neutral","126749360745758722","Wed Oct 19 19:59:37 +0000 2011","Reporting in #Microsoft Dynamics #AX2012 http://t.co/Q4oClJfS" +"microsoft","neutral","126749318198730752","Wed Oct 19 19:59:27 +0000 2011","#Microsoft Codename ""Data Explorer"" CTP Coming Next Month http://t.co/NkhuoM7U #SQLServer #SQL2012 #SQLAzure" +"microsoft","neutral","126748764869378048","Wed Oct 19 19:57:15 +0000 2011","RT @aislyngreene: #Microsoft wants to transform your hand into a touchscreen: http://t.co/an3bmArc via @AddThis" +"microsoft","neutral","126748146582818816","Wed Oct 19 19:54:47 +0000 2011","http://t.co/gAPEyL5N #Harmonix #Microsoft" +"microsoft","neutral","126747712082292736","Wed Oct 19 19:53:04 +0000 2011","RT @techinciter: Suppose Microsoft Had Bought Siri? - Forbes http://t.co/S19qrDjU #apple #microsoft #siri" +"microsoft","neutral","126747117120274432","Wed Oct 19 19:50:42 +0000 2011","#Microsoft Hints at Windows Phone #Apps from Indian Publishers and Services http://t.co/VYA3ed6h #wp7 #windowsphone #msft" +"microsoft","neutral","126746786806247424","Wed Oct 19 19:49:23 +0000 2011","RT @techinciter: Open Letter: Steve Ballmer, Please Retire - Forbes - Please retweet! #MSFT needs a techie leader. http://t.co/E8uMRmro ..." +"microsoft","neutral","126746562062848001","Wed Oct 19 19:48:29 +0000 2011","When are they going to drop the charade and list Steve Ballmer as one of the producers of Hawaii Five-O? #productplacement #microsoft #ftl" +"microsoft","neutral","126746140791144448","Wed Oct 19 19:46:49 +0000 2011","RT @dansaap: At the #VMworld party, looks good but I wonder who are the better dancers; #VMware or #Microsoft employees! ;-)" +"microsoft","neutral","126745920967680000","Wed Oct 19 19:45:57 +0000 2011","I'm nowhere near to be a scientst but thks to Steve Balmer 4 declarng me a scientst. Been using #Android frm last 3yrs. #Microsoft #Google" +"microsoft","neutral","126745707414691840","Wed Oct 19 19:45:06 +0000 2011","That Huge AOL, Yahoo, And Microsoft Ad-Selling Deal Isn't Done Yet (AOL, MSFT, YHOO) http://t.co/Vj678HK3 #apple #microsoft" +"microsoft","neutral","126745457232846849","Wed Oct 19 19:44:06 +0000 2011","#microsoft announces SP3 for #office2007 and #sharepoint2007." +"microsoft","neutral","126745293378166784","Wed Oct 19 19:43:27 +0000 2011","#Microsoft wants to transform your hand into a touchscreen: http://t.co/J8FgWWgS via @AddThis" +"microsoft","neutral","126745275686600705","Wed Oct 19 19:43:23 +0000 2011","@WSPSanDiego Kickoff tomorrow 10/20 at San Diego #Microsoft Store at 6:00pm @wspwest http://t.co/5FiP2vVg or ping me #in" +"microsoft","neutral","126745248121618434","Wed Oct 19 19:43:16 +0000 2011","Steve #Bullmer says it's hard for him to get excited about #android phones...even though #Microsoft makes loads of money off of their sales." +"microsoft","neutral","126744229912379392","Wed Oct 19 19:39:13 +0000 2011","Suppose Microsoft Had Bought Siri? - Forbes http://t.co/HoJx73Hh #apple #microsoft #siri" +"microsoft","neutral","126743926903283713","Wed Oct 19 19:38:01 +0000 2011","Suppose Microsoft Had Bought Siri? - Forbes http://t.co/ZRYNdhDi #apple #microsoft #siri #msft" +"microsoft","neutral","126743901968150528","Wed Oct 19 19:37:55 +0000 2011","@WSPSanDiego Kickoff tomorrow 10/20 at San Diego #Microsoft Store at 6:00pm @wspwest http://t.co/5FiP2vVg or ping me" +"microsoft","neutral","126743843348561920","Wed Oct 19 19:37:41 +0000 2011","#Microsoft to Slash its #Water Impact in Quincy, Washington - http://t.co/0R5LMHZj" +"microsoft","neutral","126743100809945090","Wed Oct 19 19:34:44 +0000 2011","Good summary of planned updates to #Microsoft #Cloud services in Q4 - #SharePoint Online, #Office365, #CRM, #Azure. http://t.co/o35vv1wt" +"microsoft","neutral","126743096896655362","Wed Oct 19 19:34:43 +0000 2011","Good summary of planned updates to #Microsoft #Cloud services in Q4 - #SharePoint Online, #Office365, #CRM, #Azure. http://t.co/3bL6rh5v" +"microsoft","neutral","126742966890004481","Wed Oct 19 19:34:12 +0000 2011","@VSLive What is up with #ATT at #Microsoft? Full bars in one building and no service in another only 1000 or so feet away? #vslive" +"microsoft","neutral","126742062635810817","Wed Oct 19 19:30:37 +0000 2011","#Microsoft researchers want to turn your hand into a touchscreen http://t.co/byYIcRpS" +"microsoft","neutral","126741965915168771","Wed Oct 19 19:30:14 +0000 2011","RT @sb_greenbiz: #Microsoft's supplier #sustainability movement & the effect it could have on #plastics companies http://t.co/m0otPgEm" +"microsoft","neutral","126741817365504000","Wed Oct 19 19:29:38 +0000 2011","Watch new technology under development of #Microsoft http://t.co/fQlhyAat" +"microsoft","neutral","126741679024783360","Wed Oct 19 19:29:05 +0000 2011","#Microsoft #Cloud Microsoft, Infosys Partner to Bring Cloud to Enterprises: Speaking to CMSWire before the ... http://t.co/GV1edcvp #TCN" +"microsoft","neutral","126741473726177280","Wed Oct 19 19:28:16 +0000 2011","#Microsoft Eyes A New Approach to Social #Search http://t.co/lh6pRoAM" +"microsoft","neutral","126740991087611904","Wed Oct 19 19:26:21 +0000 2011","Microsoft wants to transform your hand into a touchscreen: http://t.co/Gwi35vtP @TechFlash #microsoft #omnitouch" +"microsoft","neutral","126740408926609409","Wed Oct 19 19:24:02 +0000 2011","Microsoft CEO Looks Ahead at Social and Mobile Plans http://t.co/KFwQ8r4T #Technology #Microsoft #Nokia" +"microsoft","neutral","126740066138722305","Wed Oct 19 19:22:41 +0000 2011","Ufff... after #Microsoft #MTS11 and #MVP meeting. Was great! #MVPBuzz" +"microsoft","neutral","126739377232687105","Wed Oct 19 19:19:56 +0000 2011","Handling Multi-touch and Mouse Input in All Browsers http://t.co/duK6ze2G #microsoft #ie" +"microsoft","neutral","126737802657402880","Wed Oct 19 19:13:41 +0000 2011","Not even the Durban rain can stop the part. Prime Circle rocking #Microsoft #techedafrica" +"microsoft","neutral","126737400658538496","Wed Oct 19 19:12:05 +0000 2011","#Linux Licensing in Conflict with Secure Boot Support http://t.co/vAyDRedn #opensource #uefi #microsoft" +"microsoft","neutral","126737368001683457","Wed Oct 19 19:11:57 +0000 2011","#Microsoft CEO Looks Ahead at Social and Mobile Plans http://t.co/wYmFplCs #marketing" +"microsoft","neutral","126737116049833984","Wed Oct 19 19:10:57 +0000 2011","i see the #microsoft #lync 64-bit installer still tries to install to C:\Program Files (x86)" +"microsoft","neutral","126736393987817472","Wed Oct 19 19:08:05 +0000 2011","Listening to Doug Leland #Microsoft big data guru. Social media impact huge not just data wise, but within tools to manage data #gartnerSYM" +"microsoft","neutral","126735639508037632","Wed Oct 19 19:05:05 +0000 2011","Follow @whymicrosoft for insight on #Microsoft solutions which drive productivity for people and their organizations" +"microsoft","neutral","126735624299487232","Wed Oct 19 19:05:02 +0000 2011","Doesn't #Microsoft Research's new touch technology resemble a lot the one developed by Pranav Mistry in ~2009? http://t.co/yZ9xGQGM" +"microsoft","neutral","126735411811848192","Wed Oct 19 19:04:11 +0000 2011","#Microsoft researchers want to turn your hand into a touchscreen: http://t.co/UoPSyLTO | #Institute #University #accuracy #applications" +"microsoft","neutral","126735013667545088","Wed Oct 19 19:02:36 +0000 2011",".@allie_hooker #Microsoft has the edge in its cloud integration across its different platforms. Tablets still definitely lag behind though." +"microsoft","neutral","126734632120102912","Wed Oct 19 19:01:05 +0000 2011","#Microsoft Released Camera Codec Pack For Supporting RAW File #Windows" +"microsoft","neutral","126734120058490881","Wed Oct 19 18:59:03 +0000 2011","RT @ObinnaOsobalu: How to resolve incorrect file associations in #Microsoft #Office2010 http://t.co/FgbcXyTP" +"microsoft","neutral","126733687944515584","Wed Oct 19 18:57:20 +0000 2011","Undecided how i feel about new #microsoft #searchengine idea, but could be cool minus the whole celebrity impersonation http://t.co/BdfKJGkz" +"microsoft","neutral","126733589328048128","Wed Oct 19 18:56:57 +0000 2011","#apple #facebook @facebook #microsoft hahahahaha http://t.co/XeNSzCjh" +"microsoft","neutral","126733404870942720","Wed Oct 19 18:56:12 +0000 2011","RT @UMDNews: The Future of #Information: Innovators from #Google, #Microsoft &#Twitter Nov. 14-18 Info & registration at http://t.co/k ..." +"microsoft","neutral","126732739083898881","Wed Oct 19 18:53:34 +0000 2011","@bitwizards I'm Chillin in #Microsoft Commons at #CIO Summit http://t.co/kj890KmF" +"microsoft","neutral","126732384602296320","Wed Oct 19 18:52:09 +0000 2011","RT @drkhan: Are you going to #RSNA? Visit #Microsoft at booth 1411 http://t.co/wdHFz7hp #RSNA2011 #RSNA11 #radiology #Amalga #HealthVault" +"microsoft","neutral","126732343254851584","Wed Oct 19 18:51:59 +0000 2011","At the #VMworld party, looks good but I wonder who are the better dancers; #VMware or #Microsoft employees! ;-)" +"microsoft","neutral","126732316000264192","Wed Oct 19 18:51:53 +0000 2011","@Brittjje #MICROSOFT" +"microsoft","neutral","126731883659804672","Wed Oct 19 18:50:10 +0000 2011","#Microsoft #Search Idea Could Let You Search Like Miley http://t.co/JoMucrMb" +"microsoft","neutral","126731759646810113","Wed Oct 19 18:49:40 +0000 2011","#Microsoft bringing #touchscreens to walls, clothes, and more http://t.co/bmQf5wEx #tech" +"microsoft","neutral","126729624217600001","Wed Oct 19 18:41:11 +0000 2011","#HPVirtualSystem 4 #Microsoft is the latest solution from HP & Microsoft’s $250 million I2A initiative http://t.co/4VCIUZGL" +"microsoft","neutral","126729179994656769","Wed Oct 19 18:39:25 +0000 2011","How to install Windows 7 – What to do before start [Part 2]: http://t.co/vvaslUmA via @Pureinfotech #win7 #tech #software #backup #microsoft" +"microsoft","neutral","126728328358014980","Wed Oct 19 18:36:02 +0000 2011","#Microsoft Excel Tips: How To Alternate the Color Between Rows #tips +http://t.co/WJWREFjJ" +"microsoft","neutral","126728277896347649","Wed Oct 19 18:35:50 +0000 2011","#pardus SecureBoot'a Karşı Duruyoruz!: http://t.co/Uiwn6fgQ #fsf #secureboot #linux #microsoft" +"microsoft","neutral","126727087770963968","Wed Oct 19 18:31:06 +0000 2011","#Microsoft #Office: A #Shortcut To #Find Your #Microsoft #Office #Documents http://t.co/QojWxB50 via @ibad.info" +"microsoft","neutral","126726863811903488","Wed Oct 19 18:30:13 +0000 2011","Are you going to #RSNA? Visit #Microsoft at booth 1411 http://t.co/wdHFz7hp #RSNA2011 #RSNA11 #radiology #Amalga #HealthVault" +"microsoft","neutral","126726826923012096","Wed Oct 19 18:30:04 +0000 2011","Today's #bargain? Check #Microsoft Bluetooth Mobile Keyboard 6000 http://t.co/sXpNubdH #tech #deals" +"microsoft","neutral","126726199123771392","Wed Oct 19 18:27:34 +0000 2011","Agree ! RT @MuraineR: #Microsoft launching the OmniTouch, how funny I though that was an @ALUEnterprise soluti… (cont) http://t.co/xyWfXsDL" +"microsoft","neutral","126725918629695489","Wed Oct 19 18:26:28 +0000 2011","RT @E2EVC: #Microsoft #SCVMM #2012- Session by @virtualfat - all u need to know about the new 2012 version. #e2evc Register http://t.co/ ..." +"microsoft","neutral","126725691059351552","Wed Oct 19 18:25:33 +0000 2011","http://t.co/cBCEFILN - Many-To-Many Control for #Microsoft #LightSwitch" +"microsoft","neutral","126725006729281536","Wed Oct 19 18:22:50 +0000 2011","MS Access: a database, reporting tool, programmable forms w/ VBA, part of MS Office #Microsoft should add web front end admin console/util" +"microsoft","neutral","126724433791549440","Wed Oct 19 18:20:34 +0000 2011","RT @thomasgoubin: RT @cmaneu: #stage #Toulouse #Microsoft #net #SuperBoîte #Win8 #geeks = http://t.co/kWfptH4P (Please RT ;)" +"microsoft","neutral","126724174378045440","Wed Oct 19 18:19:32 +0000 2011","iCloud Control Panel for Windows: http://t.co/8nJ4VHNL #win7 #windows7 #apple #ipad #iphone #ipod #iphone4s #microsoft #icloud" +"microsoft","neutral","126723741651714048","Wed Oct 19 18:17:49 +0000 2011","#Microsoft opens a temp #Kinect / Windows Phone Store http://t.co/txnKgFzl #wp7 #windowsphone #xbox360 #wp75 #msft" +"microsoft","neutral","126723269326938112","Wed Oct 19 18:15:56 +0000 2011","Hanging out at #Microsoft Commons with @VinceMayfield" +"microsoft","neutral","126723018989907968","Wed Oct 19 18:14:56 +0000 2011","RT @AndroidTopNews: Steve Ballmer: Android is for computer science geeks http://t.co/o7vJl4Tl #News #android #google #microsoft #steve_b ..." +"microsoft","neutral","126722729444515840","Wed Oct 19 18:13:47 +0000 2011","Windows 8 on a #Tablet? Can #Microsoft put a bruise on the #ipad? - http://t.co/WGRMnyPi #productivity #office" +"microsoft","neutral","126722549584379905","Wed Oct 19 18:13:04 +0000 2011","Steve Ballmer: Android is for computer science geeks http://t.co/o7vJl4Tl #News #android #google #microsoft #steve_ballmer" +"microsoft","neutral","126722463265599488","Wed Oct 19 18:12:44 +0000 2011","@Rich021 Thank you my friend! Much appreciated and keep up the great work! I hope you clean house on #Microsoft #Rewards here! #Retail" +"microsoft","neutral","126722284776980480","Wed Oct 19 18:12:01 +0000 2011","Creating #Pareto charts using #Microsoft #Excel http://t.co/dXbKq8q8" +"microsoft","neutral","126722253579763712","Wed Oct 19 18:11:54 +0000 2011","Project Greenwich 2 be released by #Microsoft Research lets people curate their own personal history http://t.co/ocre5FwU" +"microsoft","neutral","126722158394228736","Wed Oct 19 18:11:31 +0000 2011","#Microsoft researchers want to turn your hand into a #touchscreen http://t.co/WJAB0tMQ via @arstechnica" +"microsoft","neutral","126722000549974016","Wed Oct 19 18:10:53 +0000 2011","Set up an environment where you get innovation. Share those expectations with your suppliers. #SIGSource #Microsoft" +"microsoft","neutral","126721061797629953","Wed Oct 19 18:07:10 +0000 2011","RT @Surface: PC Podcast 39 (#GITEX, #Microsoft #Surface) | http://t.co/RsccE99M http://t.co/EyLVACYn +Hear the latest with @Surface from ..." +"microsoft","neutral","126720971515248641","Wed Oct 19 18:06:48 +0000 2011","PC Podcast 39 (#GITEX, #Microsoft #Surface) | http://t.co/RsccE99M http://t.co/EyLVACYn +Hear the latest with @Surface from GITEX in Dubai." +"microsoft","neutral","126719738293391360","Wed Oct 19 18:01:54 +0000 2011","#Microsoft's plan for bringing its #BI tools to iOS, Android, and Windows 8 +devices +http://ww... Read more at http://t.co/bhhgh5QC" +"microsoft","neutral","126718237252648960","Wed Oct 19 17:55:56 +0000 2011","Join us Wednesday Nov. 2 for a webinar featuring #Microsoft #Lync deployments with #snom #UC. Register today at http://t.co/B1pr1kIX #VAR" +"microsoft","neutral","126717789829472256","Wed Oct 19 17:54:10 +0000 2011","RT @jilldyche: Just saw my bud @EdMaguire of Credit Agricole Securities interviewed by @TylerMatheison @CNBC, covering #Microsoft. You ..." +"microsoft","neutral","126717208557649920","Wed Oct 19 17:51:51 +0000 2011","RT @infamarketplace: Tool to allow users to apply #DataQuality rules directly from MS Excel. Learn more @infamarketplace. http://t.co/j ..." +"microsoft","neutral","126716851706277889","Wed Oct 19 17:50:26 +0000 2011","#Microsoft builds a functioning ‘holodesk’ to interact with digital items http://t.co/otvmQxJ2" +"microsoft","neutral","126716781510410240","Wed Oct 19 17:50:09 +0000 2011","Just saw my bud @EdMaguire of Credit Agricole Securities interviewed by @TylerMatheison @CNBC, covering #Microsoft. You rocked it, Ed! #fb" +"microsoft","neutral","126716221575004160","Wed Oct 19 17:47:56 +0000 2011","RT @robakk: Start your private cloud fast with the new HP VirtualSystem for Microsoft http://t.co/XEzz42Z2 #HP #Microsoft #HyperV #Syste ..." +"microsoft","neutral","126715906096238592","Wed Oct 19 17:46:40 +0000 2011","RT @The_Victor2 HoloDesk: #Microsoft unveils technology that's one step closer to human/digital interaction. http://t.co/oP379ycF #hardware" +"microsoft","neutral","126714453105446912","Wed Oct 19 17:40:54 +0000 2011","Start your private cloud fast with the new HP VirtualSystem for Microsoft http://t.co/XEzz42Z2 #HP #Microsoft #HyperV #SystemCenter" +"microsoft","neutral","126713704099221504","Wed Oct 19 17:37:55 +0000 2011","RT @sharepoinTony: @hersheytech @kofax @collabware @avepoint @nintex engaged w >40 customers #SharePoint ECM Summit at the #Microsoft ..." +"microsoft","neutral","126713363689517056","Wed Oct 19 17:36:34 +0000 2011","RT @TheCloudNetwork: #Microsoft #Cloud Steve Ballmer At Web 2.0: Microsoft CEO Disses Android, iPhone: According to GeekWire, Ba... http ..." +"microsoft","neutral","126713356739547136","Wed Oct 19 17:36:33 +0000 2011","@hersheytech @kofax @collabware @avepoint @nintex engaged w >40 customers #SharePoint ECM Summit at the #Microsoft Tech Center Irvine" +"microsoft","neutral","126713203240607744","Wed Oct 19 17:35:56 +0000 2011","RT @DaveBakerD2L: RT @goVHS: #Microsoft has just released a case study featuring @goVHS and @Desire2Learn. http://t.co/iG4Qm3Sn #goVHS #D2L" +"microsoft","neutral","126712975309537280","Wed Oct 19 17:35:02 +0000 2011","New #HPVirtualSystem 4 #Microsoft enhances development, deployment & mgmnt of IT http://t.co/Q3cbxvKb" +"microsoft","neutral","126711721376235520","Wed Oct 19 17:30:03 +0000 2011","By 4:30pm: For E-Commerce Times, Erika Morphy seeks experts re: #Microsoft ""Playful Learning"": pros/cons for kids: http://t.co/WPO9l63f" +"microsoft","neutral","126710782615494657","Wed Oct 19 17:26:19 +0000 2011","Steve Ballmer At Web 2.0: #Microsoft CEO Disses Android, #iPhone http://t.co/JivkVbAh #sem #mobile" +"microsoft","neutral","126710485604237313","Wed Oct 19 17:25:08 +0000 2011","#Microsoft CEO on #Android: You Have to Be a #Computer #Scientist to Use It - huffington post http://t.co/7PDBvauM #tech #mobile" +"microsoft","neutral","126709729924874241","Wed Oct 19 17:22:08 +0000 2011","#microsoft" +"microsoft","neutral","126709714565349376","Wed Oct 19 17:22:04 +0000 2011","With #Holodesk from #Microsoft research, you can manipulate 3-D, virtual images with your hands http://t.co/lTws9xvx" +"microsoft","neutral","126708211108683776","Wed Oct 19 17:16:06 +0000 2011","Spend $499 at a Microsoft Store and Get a Free Windows Phone http://t.co/1zRhXnVd #wp7 #wp7dev #windowsphone #Microsoft" +"microsoft","neutral","126708014081257472","Wed Oct 19 17:15:19 +0000 2011","RT @WinBetaDotOrg: Microsoft giving away free Windows Phones with a $499 purchase http://t.co/K2qYY9ex #Microsoft #Windows_Phone" +"microsoft","neutral","126707947744145409","Wed Oct 19 17:15:03 +0000 2011","What’s new in the #Microsoft world after #bldwin? Find out at #DevReach in Europe. Register at" +"microsoft","neutral","126707946955616256","Wed Oct 19 17:15:03 +0000 2011","RT @jblank23: #Microsoft Turns Your Body Into A Touchscreen @PSFK: http://t.co/GMT9sLfN via @AddThis" +"microsoft","neutral","126707601504346112","Wed Oct 19 17:13:40 +0000 2011","#Microsoft, Steve Ballmer, speaks at Web 2.0 Summit in San Francisco: http://t.co/awtf2xJW #Bing #Android #Google #Apple #Amazon" +"microsoft","neutral","126707418536218624","Wed Oct 19 17:12:57 +0000 2011","#Microsoft Turns Your Body Into A Touchscreen @PSFK: http://t.co/GMT9sLfN via @AddThis" +"microsoft","neutral","126707389142544384","Wed Oct 19 17:12:50 +0000 2011","#Microsoft's Ballmer Swipes at #Google http://t.co/Ve3LMoTn" +"microsoft","neutral","126706701968736256","Wed Oct 19 17:10:06 +0000 2011","Microsoft giving away free Windows Phones with a $499 purchase http://t.co/K2qYY9ex #Microsoft #Windows_Phone" +"microsoft","neutral","126706701306052609","Wed Oct 19 17:10:06 +0000 2011","Microsoft giving away free Windows Phones with a $499 purchase http://t.co/GHPcGCpX #Microsoft #Windows_Phone" +"microsoft","neutral","126706255413772288","Wed Oct 19 17:08:20 +0000 2011","#SIGSource Dawn Evans interviewing #Microsoft's Tim McBride in SIG's Inside Source exec video series. In a new role in financial operations" +"microsoft","neutral","126705857206562816","Wed Oct 19 17:06:45 +0000 2011","#Microsoft #Cloud Steve Ballmer At Web 2.0: Microsoft CEO Disses Android, iPhone: According to GeekWire, Ba... http://t.co/5L3vWLZJ #TCN" +"microsoft","neutral","126705440640860161","Wed Oct 19 17:05:05 +0000 2011","$4.5B #Nortel #IP Deal Still Under #DOJ Review: #Microsoft Exec (via @IPLaw360) http://t.co/DGIkwyoi #in" +"microsoft","neutral","126705187942449152","Wed Oct 19 17:04:05 +0000 2011","first steps to holographic user interfaces? http://t.co/XkavtOm9 #microsoft #research" +"microsoft","neutral","126705130681794560","Wed Oct 19 17:03:51 +0000 2011","New post: Touch Everywhere http://t.co/KKDup8v1 #microsoft #research #touch" +"microsoft","neutral","126704776506380288","Wed Oct 19 17:02:27 +0000 2011","Two Extremes of Touch Interaction #Microsoft #future #fb http://t.co/Di9QkGa5" +"microsoft","neutral","126704323542523904","Wed Oct 19 17:00:39 +0000 2011","@MSPartners can join: BizSpark http://t.co/YTyl8Wm0 or WebsiteSpark http://t.co/10AzXP9q! #Microsoft #MPN" +"microsoft","neutral","126704037738459136","Wed Oct 19 16:59:31 +0000 2011","Ballmer's vision for #Microsoft http://t.co/MWlPnWtM" +"microsoft","neutral","126703842732683265","Wed Oct 19 16:58:44 +0000 2011","RT @imason_inc: Search Governance in #SharePoint 2010 http://t.co/dPlbPQHO #microsoft" +"microsoft","neutral","126703823837335552","Wed Oct 19 16:58:40 +0000 2011","CNET » #Microsoft #OmniTouch allows multitouch input on arbitrary, everyday surfaces » http://t.co/PbwdSTmb http://t.co/0D4xV6D3" +"microsoft","neutral","126703372207271936","Wed Oct 19 16:56:52 +0000 2011","RT @calvinnieh: Learn how to keep Microsoft #Exchange costs low today at 11am Pacific. Win with #Microsoft and #NetApp Webcast @ http:/ ..." +"microsoft","neutral","126702051001516032","Wed Oct 19 16:51:37 +0000 2011","Search Governance in #SharePoint 2010 http://t.co/xtqv4DW5 #microsoft" +"microsoft","neutral","126701413907701760","Wed Oct 19 16:49:05 +0000 2011","RT @HersheyTech: @tomcastiglia and @sharepointony now presenting on #SharePoint upgrade planning at our #SharePoint ECM Summit at the #M ..." +"microsoft","neutral","126701318978019328","Wed Oct 19 16:48:43 +0000 2011","New #70-680 #Exam #Questions are available @techxams http://t.co/S0VxsRuU #Microsoft" +"microsoft","neutral","126701312447496192","Wed Oct 19 16:48:41 +0000 2011","#Microsoft applies for celebrity search #patent. #bing-o http://t.co/vaDvkMSk #HermanCain.co directs you to...#RonPaul http://t.co/8Qgp5J48" +"microsoft","neutral","126701230331412480","Wed Oct 19 16:48:21 +0000 2011","RT @iphone4S_nerd: #Microsoft’s Ballmer: Android phones are boring, overcomplicated http://t.co/xyM8fxD0 via @BGR" +"microsoft","neutral","126700724389281792","Wed Oct 19 16:46:21 +0000 2011","Nokia to reveal multiple Windows Phone devices next http://t.co/0CIUtecX #mobile #nokia #microsoft" +"microsoft","neutral","126700451163942912","Wed Oct 19 16:45:16 +0000 2011","#Microsoft headquarters tomorrow! #Blogalicious @beblogalicious" +"microsoft","neutral","126699934694129664","Wed Oct 19 16:43:13 +0000 2011","Microsoft's CEO, Steve Ballmer critics the user friendliness of Android. #Android #Google #Microsoft #WP7 http://t.co/rbEv95eE" +"microsoft","neutral","126699730028855296","Wed Oct 19 16:42:24 +0000 2011","Learn how to keep Microsoft #Exchange costs low today at 11am Pacific. Win with #Microsoft and #NetApp Webcast @ http://t.co/3NJAayMp" +"microsoft","neutral","126699437711040512","Wed Oct 19 16:41:14 +0000 2011","#Microsoft launching the OmniTouch, how funny I though that was an @ALUEnterprise solution! #copyright http://t.co/5ExkV0L7 via @l_evenement" +"microsoft","neutral","126699406765465601","Wed Oct 19 16:41:07 +0000 2011","Check out this great Blog w/ Tips from #YouSendIt - what 2 Consider When Migrating 2 #Microsoft Exchange 2010 http://t.co/jw1UbODI @YSI" +"microsoft","neutral","126699405356171264","Wed Oct 19 16:41:06 +0000 2011","Check out this great Blog w/ Tips from #YouSendIt - what 2 Consider When Migrating 2 #Microsoft Exchange 2010 http://t.co/eeF3780B @YSI" +"microsoft","neutral","126699162879275008","Wed Oct 19 16:40:09 +0000 2011","Survivable Branch Appliances & Basic Hybrid Gateways for #Microsoft #Lync and Office Communications Server http://t.co/MqUojAAf #Audiocodes" +"microsoft","neutral","126698863447916544","Wed Oct 19 16:38:57 +0000 2011","Why do Linux fanatics want to make Windows 8 less secure? http://t.co/OGkaYZO7 #linux #microsoft" +"microsoft","neutral","126698757503991808","Wed Oct 19 16:38:32 +0000 2011","Microsoft applies for celebrity search patent #microsoft #bing: Microsoft has applied for a new patent that take... http://t.co/4qoQqDCY" +"microsoft","neutral","126698756845481984","Wed Oct 19 16:38:32 +0000 2011","Microsoft applies for celebrity search patent #microsoft #bing http://t.co/5KfqKfgo #microsoft" +"microsoft","neutral","126698755884978177","Wed Oct 19 16:38:32 +0000 2011","Microsoft applies for celebrity search patent #microsoft #bing http://t.co/WABa8K9B" +"microsoft","neutral","126698717930717184","Wed Oct 19 16:38:22 +0000 2011","RT @TechZader: Microsoft applies for celebrity search patent #microsoft #bing http://t.co/5LbarN3n" +"microsoft","neutral","126698707252027392","Wed Oct 19 16:38:20 +0000 2011","Microsoft applies for celebrity search patent #microsoft #bing http://t.co/7kAorRKZ" +"microsoft","neutral","126698704290852865","Wed Oct 19 16:38:19 +0000 2011","Microsoft applies for celebrity search patent #microsoft #bing http://t.co/mfgQuZKU #neowin" +"microsoft","neutral","126698703410040834","Wed Oct 19 16:38:19 +0000 2011","Microsoft applies for celebrity search patent #microsoft #bing http://t.co/5LbarN3n" +"microsoft","neutral","126698504910413827","Wed Oct 19 16:37:32 +0000 2011","Securing #Microsoft Exchange 2010 with #SSL Certificate http://t.co/DAwnAbvX #Exchange2010 #ITSecurity" +"microsoft","neutral","126698182141947904","Wed Oct 19 16:36:15 +0000 2011","Need info - #Microsoft #CRM - then check this out - http://t.co/seRMv1o7" +"microsoft","neutral","126697870689710080","Wed Oct 19 16:35:00 +0000 2011","Not sure how I feel about the Black Keys playing at the U-Village Microsoft store. Guess they *are* getting paid, that's good. #Microsoft" +"microsoft","neutral","126697419953012737","Wed Oct 19 16:33:13 +0000 2011","http://t.co/J3p3KYHf +#Apple #Google #Microsoft #Blackberry #Nokia #HTC #Samsung #Motorola #WindowsPhone7 #iPhone #Android #Droid #Tablets" +"microsoft","neutral","126697038187474944","Wed Oct 19 16:31:42 +0000 2011","will be at the London #Microsoft Partner Business Briefing tomorrow - see some of you there :)" +"microsoft","neutral","126696856959983616","Wed Oct 19 16:30:59 +0000 2011","Microsoft turns your body into a touchscreen http://t.co/8LbJuq4y #Microsoft" +"microsoft","neutral","126696843299135488","Wed Oct 19 16:30:56 +0000 2011","@DaniWeb MS business intelligence development studio. Where u create SSRS reports and SSIS packages. Wonderful world of #microsoft :)" +"microsoft","neutral","126696759077511168","Wed Oct 19 16:30:35 +0000 2011","Microsoft turns your body into a touchscreen http://t.co/j1T6kHgP #Microsoft" +"microsoft","neutral","126696757294927874","Wed Oct 19 16:30:35 +0000 2011","Microsoft turns your body into a touchscreen http://t.co/YE0NZy82 #Microsoft" +"microsoft","neutral","126696688046968832","Wed Oct 19 16:30:19 +0000 2011","Join Brainware in ORLANDO 11/2-discuss financial best practices, automation tech w/speakers from #disney and #microsoft http://t.co/gLYzOXMc" +"microsoft","neutral","126696671894704129","Wed Oct 19 16:30:15 +0000 2011","Did you know there is a #Microsoft Learning facebook page where you can find deals and course info #MCTSummitNA" +"microsoft","neutral","126696627955171328","Wed Oct 19 16:30:04 +0000 2011","Tool to allow users to apply #DataQuality rules directly from MS Excel. Learn more @infamarketplace. http://t.co/jkZqrnO6 #Excel #Microsoft" +"microsoft","neutral","126696501849227264","Wed Oct 19 16:29:34 +0000 2011","@tomcastiglia and @sharepointony now presenting on #SharePoint upgrade planning at our #SharePoint ECM Summit at the #Microsoft Tech Center" +"microsoft","neutral","126696344881594368","Wed Oct 19 16:28:57 +0000 2011","#Microsoft are patenting applying personas of celebrities to #Bing results http://t.co/JBmsaU6O" +"microsoft","neutral","126695550975356928","Wed Oct 19 16:25:47 +0000 2011","Check out this Interesting Blog on Tips from #YouSendIt 4 wht 2 Consider When Migrating 2 #Microsoft Exchange 2010 http://t.co/yLF30h0h" +"microsoft","neutral","126695264487604224","Wed Oct 19 16:24:39 +0000 2011","RT @sholliman: #SIGSource #Microsoft's Tami Reller says get on the bus and if you don't want to be on the bus, get off the road." +"microsoft","neutral","126694893841158145","Wed Oct 19 16:23:11 +0000 2011","Learn how you can now upload SRS reports to #Microsoft CRM 2011 Online. #CRM2011 http://t.co/RrcF9W2V" +"microsoft","neutral","126694267560271872","Wed Oct 19 16:20:41 +0000 2011","#SIGSource #Microsoft's Tami Reller says get on the bus and if you don't want to be on the bus, get off the road." +"microsoft","neutral","126693945123147776","Wed Oct 19 16:19:25 +0000 2011","Yahoo! Just received welcome letter to join #TFS Preview. Thanks #Microsoft" +"microsoft","neutral","126693187346632704","Wed Oct 19 16:16:24 +0000 2011","Does anyone out there have some good online training or tips on using InforPath 2010 to build forms with rules? #Microsoft #SharePoint" +"microsoft","neutral","126693021365452800","Wed Oct 19 16:15:44 +0000 2011","Check out our newly revamped Testimonials page for more information on how #Microsoft #Dynamics #CRM can work fo... http://t.co/nnvy8NP2" +"microsoft","neutral","126692888741548032","Wed Oct 19 16:15:13 +0000 2011","Print Ads Dead ? Not with #Microsoft #TAG on your ad learn more here - http://t.co/vfZRTkaa" +"microsoft","neutral","126692854981595136","Wed Oct 19 16:15:05 +0000 2011","#Microsoft Is Getting More Serious About Daily (#Bing) #Deals http://t.co/yt8AW0gK" +"microsoft","neutral","126692811859963904","Wed Oct 19 16:14:54 +0000 2011","Nokia 800 “Searay†Windows Phone press shots leak http://t.co/HwZzIEKk #microsoft #windows" +"microsoft","neutral","126692357277102080","Wed Oct 19 16:13:06 +0000 2011","A Child’s-Eye View Of #Microsoft’s #Kinect For Kids: http://t.co/xW1ACTtj | #solutions #Kasper" +"microsoft","neutral","126691842736668673","Wed Oct 19 16:11:03 +0000 2011","Microsoft CEO Steve Ballmer confirms Nokia Windows Phone devices due next week http://t.co/rDTzumtZ #Microsoft #Nokia" +"microsoft","neutral","126691830774505472","Wed Oct 19 16:11:00 +0000 2011","#microsoft 1st ever global ad campaign:great full circle focus on integrated product connectivity and global relevance http://t.co/ZCevlB1a" +"microsoft","neutral","126691604839927808","Wed Oct 19 16:10:07 +0000 2011","#Breakfast at #Microsoft with #PMs in SF 10/25: http://t.co/JXpUVecS" +"microsoft","neutral","126691578361298945","Wed Oct 19 16:10:00 +0000 2011","Check out our newly revamped Testimonials page for more information on how #Microsoft #Dynamics #CRM can work for you! http://t.co/hmT51bdf" +"microsoft","neutral","126691206976643072","Wed Oct 19 16:08:32 +0000 2011","#HTC Arrive #Windows Phone (#Sprint) Review http://t.co/CdkPyjZV #windowsphone7 #xboxlive #mobile #cdma #microsoft" +"microsoft","neutral","126691146209558528","Wed Oct 19 16:08:17 +0000 2011","What to Consider When Migrating to #Microsoft Exchange 2010: http://t.co/jmyt6myL #yousendit booth 624 #gartnersym" +"microsoft","neutral","126691006207893505","Wed Oct 19 16:07:44 +0000 2011","RT @NeowinFeed: Steve Ballmer slams Android #microsoft #steveballmer http://t.co/GQqyQbaG #neowin" +"microsoft","neutral","126690823306870784","Wed Oct 19 16:07:00 +0000 2011","#Microsoft Explains and Refines the #Windows 8 Start Screen - http://t.co/IIlXENKI #ux #ixd" +"microsoft","neutral","126690446872297473","Wed Oct 19 16:05:30 +0000 2011","Steve Ballmer slams Android #microsoft #steveballmer: Microsoft CEO Steve Ballmer was highly critical of Google'... http://t.co/dgNBSbuJ" +"microsoft","neutral","126690445119070210","Wed Oct 19 16:05:30 +0000 2011","Steve Ballmer slams Android #microsoft #steveballmer http://t.co/iYTWIZzv #microsoft" +"microsoft","neutral","126690443588153345","Wed Oct 19 16:05:30 +0000 2011","Steve Ballmer slams Android #microsoft #steveballmer http://t.co/fD1m8vN1" +"microsoft","neutral","126690396720988160","Wed Oct 19 16:05:19 +0000 2011","Steve Ballmer slams Android #microsoft #steveballmer http://t.co/rVTTHD7P" +"microsoft","neutral","126690392568639488","Wed Oct 19 16:05:18 +0000 2011","Steve Ballmer slams Android #microsoft #steveballmer http://t.co/GQqyQbaG #neowin" +"microsoft","neutral","126690357042884608","Wed Oct 19 16:05:09 +0000 2011","Check out our podacst regarding Sugar's Edge In Tems Of Database Structure http://t.co/Qv4DAw7A #Microsoft CRM" +"microsoft","neutral","126690074946568192","Wed Oct 19 16:04:02 +0000 2011","Steve Ballmer slams Android #microsoft #steveballmer http://t.co/wfSSexks" +"microsoft","neutral","126689592832294913","Wed Oct 19 16:02:07 +0000 2011","#Microsoft Stores offering a free Windows Phone with a $499 purchase http://t.co/nfpvegDK #wp7 #windowsphone #wp75 #mango #msft" +"microsoft","neutral","126689002114924544","Wed Oct 19 15:59:46 +0000 2011","The Great Tech War Of 2012 : http://t.co/RbEvrtfz #Tech #Mobile #2012 #Wireless #Apple #Google #Microsoft" +"microsoft","neutral","126688055691190274","Wed Oct 19 15:56:00 +0000 2011","New #snom Webinar Nov. 2: Accelerate #Microsoft #Lync Deployments with snom #UC. Register here http://t.co/FYyIIMIX #VARs #phones" +"microsoft","neutral","126687847762771970","Wed Oct 19 15:55:11 +0000 2011","#Europe Approves #Microsoft Takeover of #Skype -http://mrkt.ms/r9CGEO #UK #London" +"microsoft","neutral","126686806585520128","Wed Oct 19 15:51:03 +0000 2011","Isn't this a little too much? +http://t.co/ILWPs3f9 #xbox #microsoft #technology" +"microsoft","neutral","126686450019344384","Wed Oct 19 15:49:38 +0000 2011","@allyfish87 leaning that way for battery. Have #Microsoft xbox, media pc and zune so would a Droid fit better? #idk thanks tho :)" +"microsoft","neutral","126686367479631872","Wed Oct 19 15:49:18 +0000 2011","RT @Microsoft_EDU: Quality #education is vital. Attend #Microsoft & #Desire2Learn session at #EDUCAUSE http://t.co/Gym6pUUx #EDU11" +"microsoft","neutral","126686111656448000","Wed Oct 19 15:48:17 +0000 2011","#Microsoft should follow suit with #Bing and #Windows Live http://t.co/9s0f4SNn" +"microsoft","neutral","126686012884787200","Wed Oct 19 15:47:53 +0000 2011","Esse prof acha que ninguém sabe mexer no #microsoft word" +"microsoft","neutral","126685318899449856","Wed Oct 19 15:45:08 +0000 2011","#TonyBates on #Microsoft Acquisition of #Skype via Microsoft - http://t.co/GtoJxLhH" +"microsoft","neutral","126685194278289408","Wed Oct 19 15:44:38 +0000 2011","Check this video out -- Steve Ballmer: We were lucky not to buy Yahoo! ALALUYA http://t.co/V9rmRAUh #microsoft #yahoo #SteveBallmer" +"microsoft","neutral","126685077823422466","Wed Oct 19 15:44:10 +0000 2011","Ballmer says you need to be a computer scientist to use Android http://t.co/TlKWhOsd #android #android #microsoft" +"microsoft","neutral","126684141306650625","Wed Oct 19 15:40:27 +0000 2011","Geek Fact for October 19th - 1998: U.S. Department of Justice antitrust case against #Microsoft began. #geekfact" +"microsoft","neutral","126683940764401665","Wed Oct 19 15:39:39 +0000 2011","A Child’s-Eye View Of #Microsoft’s #Kinect For #Kids http://t.co/mnamu0xF" +"microsoft","neutral","126683909722357760","Wed Oct 19 15:39:32 +0000 2011","A Child’s-Eye View Of #Microsoft’s #Kinect For #Kids http://t.co/nzhbzzW3 via @TechCrunch" +"microsoft","neutral","126683288239751169","Wed Oct 19 15:37:04 +0000 2011","Nokia 800 aka Nokia Searay renders http://t.co/NwI63GhB #wp7 #nokia #microsoft #mango" +"microsoft","neutral","126683225123852288","Wed Oct 19 15:36:49 +0000 2011","As mainly a #Microsoft #business user is it OK to like #IBM #ibmconnections ?" +"microsoft","neutral","126681783222808578","Wed Oct 19 15:31:05 +0000 2011","RT @FierceWireless: #Microsoft's Ballmer promises #Nokia Windows phones next week, slams #Android. http://t.co/OH8Btu2e #WP7 #Window..." +"microsoft","neutral","126680710403719168","Wed Oct 19 15:26:49 +0000 2011","#Microsoft’s Ballmer: Android phones are boring, overcomplicated http://t.co/xyM8fxD0 via @BGR" +"microsoft","neutral","126680178196877312","Wed Oct 19 15:24:42 +0000 2011","@gsmarena_com Talk, talk, talk... release the phone already, before they are completely obsolete #Nokia #Microsoft" +"microsoft","neutral","126680085959942144","Wed Oct 19 15:24:20 +0000 2011","#Microsoft's Ballmer promises #Nokia Windows phones next week, slams #Android. http://t.co/5DOmls5e #WP7 #WindowsPhone" +"microsoft","neutral","126679050742800385","Wed Oct 19 15:20:13 +0000 2011","#Microsoft is working with local Thai partner to show how to apply a location-based IT system to address flood crisis (Bangkok Post)" +"microsoft","neutral","126678934237626368","Wed Oct 19 15:19:46 +0000 2011","""Metro (design language)"" http://t.co/r7RdW1PH Microsoft took inspiration from King County Metro! #microsoft #metro #windows8 #bus" +"microsoft","neutral","126678725491294209","Wed Oct 19 15:18:56 +0000 2011","Microsoft's Ballmer Trashes Android, Touts Skype http://t.co/VkFnwmTJ #microsoft #skype #android" +"microsoft","neutral","126678690334638080","Wed Oct 19 15:18:48 +0000 2011","RT @gkumaran: #EMC White paper on Automated Perf Optimization for #Microsoft Applications with EMC #VMAX, FAST VP and #HyperV #SCVMM htt ..." +"microsoft","neutral","126678683393081344","Wed Oct 19 15:18:46 +0000 2011","The NEW #Microsoft Action Pack gives @MSPartners access to software, development tools, training and market visibility!" +"microsoft","neutral","126678622449831937","Wed Oct 19 15:18:31 +0000 2011","RT @PacktEnterprise: Book Published! #Microsoft #SharePoint 2010 Power User Cookbook: http://t.co/A8ZqMpYP @spmentor http://t.co/v0mO0VkQ" +"microsoft","neutral","126678454639927297","Wed Oct 19 15:17:51 +0000 2011","VIDEO: #Microsoft CEO Steve Ballmer talks at #Web2.0Summit about #Yahoo, Bing, and more. http://t.co/YEAXVP1V" +"microsoft","neutral","126677913058820097","Wed Oct 19 15:15:42 +0000 2011","Microsoft's Steve Ballmer attacks Android phones http://t.co/bkgep6zm #Android #Microsoft #SteveBallmer #Google" +"microsoft","neutral","126677721647554561","Wed Oct 19 15:14:57 +0000 2011","Blessing in disguise. Ballmer feels lucky #Microsoft didn't buy #Yahoo in 2008 http://t.co/2Acd5GOz #business" +"microsoft","neutral","126677403480231937","Wed Oct 19 15:13:41 +0000 2011","Are you an digital expert? Or an online amateur? - http://t.co/oizGNXGT #Microsoft" +"microsoft","neutral","126676455936622593","Wed Oct 19 15:09:55 +0000 2011","A Web service developed by #Microsoft Research lets people curate their own personal history | http://t.co/TYAdzHQx" +"microsoft","neutral","126676302873899008","Wed Oct 19 15:09:18 +0000 2011","@Cyruss1989 @wpcentral I think they're referring to the obvious partnership between #Microsoft and #Nokia aka #Bing and #Navteq." +"microsoft","neutral","126675994378637312","Wed Oct 19 15:08:05 +0000 2011","RT @SymonPerriman: Sign up for online #Microsoft #HyperV #Virtualization 70-659 training on Nov. 2. It is $99, has a $150 exam voucher! ..." +"microsoft","neutral","126675231896117248","Wed Oct 19 15:05:03 +0000 2011","#Ballmer: We're Lucky #Microsoft Didn't Buy #Yahoo: http://t.co/oHbevXdP | #CEO #John #Steve #Web #acquisition #comment #conference" +"microsoft","neutral","126674948671545344","Wed Oct 19 15:03:55 +0000 2011","""You don't have to be a computer scientist to use a Windows Phone,"" said #Microsoft 's #SteveBallmer, ""but you do to use an Android phone.""" +"microsoft","neutral","126674919969923072","Wed Oct 19 15:03:49 +0000 2011","RT @Startpack_en: Guys from #Microsoft tell #Startpack developers about how to integrates with #SystemCenter via web services. http://t. ..." +"microsoft","neutral","126674748674547712","Wed Oct 19 15:03:08 +0000 2011","Sign up for online #Microsoft #HyperV #Virtualization 70-659 training on Nov. 2. It is $99, has a $150 exam voucher! +http://t.co/zcHV6TaC" +"microsoft","neutral","126673949907107840","Wed Oct 19 14:59:58 +0000 2011","Guys from #Microsoft tell #Startpack developers about how to integrates with #SystemCenter via web services. http://t.co/yziwv1wd" +"microsoft","neutral","126673753680773122","Wed Oct 19 14:59:11 +0000 2011","#Microsoft quarterly earnings out tomorrow, here's what to expect: http://t.co/YpTCBeON" +"microsoft","neutral","126673364231270400","Wed Oct 19 14:57:38 +0000 2011","#Windows8 #Microsoft please implement a UAC prompt when a non elevated process drops on elevated process instead of failing silently." +"microsoft","neutral","126672797731790848","Wed Oct 19 14:55:23 +0000 2011","#Microsoft #Cloud Microsoft launches Virtual Academy in Kenya: Microsoft East and Southern Africa launched ... http://t.co/GllUQiTJ #TCN" +"microsoft","neutral","126671949320556544","Wed Oct 19 14:52:00 +0000 2011","#MICROSOFT MULTIMEDIA STRAVINSKY : THE RITE OF SPRING (CD-ROM) [VERSION 1.0] Review: MICROSOFT MULTIMEDIA STRAVI... http://t.co/iX4FEcM2" +"microsoft","neutral","126671882211692545","Wed Oct 19 14:51:44 +0000 2011","#Microsoft Service Pack installer want's my notebook to be plugged in. Why? Plugged it in to start installation and plugged it out. Worked." +"microsoft","neutral","126671654540677122","Wed Oct 19 14:50:50 +0000 2011","#Microsoft's Ballmer Trashes Android, Touts Skype + http://t.co/96BMOxv4" +"microsoft","neutral","126671006302617600","Wed Oct 19 14:48:15 +0000 2011","RT @TheStreet: Ballmer: #Microsoft Beating #Google in the Cloud - http://t.co/rfiaNud0 via @gigaom" +"microsoft","neutral","126669870090829824","Wed Oct 19 14:43:45 +0000 2011","Listening to this, how much do #Microsoft miss Bill Gates? http://t.co/H4OuzdOk He is a genius, as was Jobs. Bill saw it, but Steve made it." +"microsoft","neutral","126669451369263106","Wed Oct 19 14:42:05 +0000 2011","White Paper: EMC Automated Performance Optimization for #Microsoft Applications http://t.co/aIK3EjWT #EMC #powerlink credential needed" +"microsoft","neutral","126668947503325184","Wed Oct 19 14:40:05 +0000 2011","Ballmer: #Microsoft Beating #Google in the Cloud - http://t.co/rfiaNud0 via @gigaom" +"microsoft","neutral","126668764199665664","Wed Oct 19 14:39:21 +0000 2011","High Tech Director of Business Development *selling consulting services* #Oracle #Microsoft #SAP #SanFrancisco #jobs http://t.co/OWf25yJB" +"microsoft","neutral","126668119354781696","Wed Oct 19 14:36:47 +0000 2011","What the world needs is an ""IT/Programmers"" channel/web feed 24 hours a day - tune in to watch people coding/discussing IT #Microsoft" +"microsoft","neutral","126667815116750848","Wed Oct 19 14:35:35 +0000 2011","#Microsoft HOY What’s Next Stream: Windows PC http://t.co/2MQ0I0Cm ➨ @MicrosoftLatam" +"microsoft","neutral","126667274080894976","Wed Oct 19 14:33:26 +0000 2011","MindTree Introduces 'mPromo' Trade Promotion Solution on #Microsoft #Azure for the Consumer Packaged Goods industry. http://t.co/H1A3V7zv" +"microsoft","neutral","126666679785766914","Wed Oct 19 14:31:04 +0000 2011","@markcullum1 - Because it's #microsoft....." +"microsoft","neutral","126666425715798016","Wed Oct 19 14:30:03 +0000 2011","#Microsoft's First Global Multi-Product Holiday Campaign http://t.co/6nUFdiBN" +"microsoft","neutral","126665951172243456","Wed Oct 19 14:28:10 +0000 2011","Take time to sign this and also please pass it on. Thank You : http://t.co/euW1RSnb R.I.P Jeremey Latiola. #steve jobs #microsoft" +"microsoft","neutral","126665581519835136","Wed Oct 19 14:26:42 +0000 2011","First time a technology company is in the running for marketer of the year. Vote David: http://t.co/xT2SWSxg #Microsoft @getitdone" +"microsoft","neutral","126664658651332608","Wed Oct 19 14:23:02 +0000 2011","RT @KnowYourMobile Steve Ballmer says new Windows Phones will launch at Nokia World event | http://t.co/uQghSamj #WP7 #microsoft" +"microsoft","neutral","126664524156764160","Wed Oct 19 14:22:30 +0000 2011","Dear #Microsoft - seize the moment - focus on developing Windows7 phones for enterprise solutions. Succeed where #RIM is failing." +"microsoft","neutral","126664410029764608","Wed Oct 19 14:22:03 +0000 2011","#Infosys kicks off its #cloud drive with #Microsoft partnership" +"microsoft","neutral","126664404891746304","Wed Oct 19 14:22:02 +0000 2011","#Microsoft CEO Steve Ballmer on Not Buying Yahoo: “Sometimes, You’re Lucky†» http://t.co/pxHqdSav" +"microsoft","neutral","126663746767699969","Wed Oct 19 14:19:25 +0000 2011","RT @MobileTechWorld: Jet Car Stunts Xbox Live game for Windows Phone released http://t.co/O3hYxBxh #wp7 #xbox #microsoft" +"microsoft","neutral","126662968434900992","Wed Oct 19 14:16:19 +0000 2011","Ballmer On Not Buying Yahoo: +http://t.co/ypM4GXf9 + +#Microsoft #Yahoo #Skype #Xbox #Google #Bing #Ballmer" +"microsoft","neutral","126662905084129280","Wed Oct 19 14:16:04 +0000 2011","#Google ##Mobile Boss Andy Rubin on #Apple, #Microsoft and #Tablets http://t.co/rSnSG5nt #techgadgets #android" +"microsoft","neutral","126661971566264320","Wed Oct 19 14:12:21 +0000 2011","You are welcome to join our LIVE free webinar right now. It's about #Citrix monitoring through #Microsoft #SCOM: http://t.co/Sr7pIP9B" +"microsoft","neutral","126660704496402433","Wed Oct 19 14:07:19 +0000 2011","#Occupy #Microsoft nyc! windows phone camp" +"microsoft","neutral","126659682076082176","Wed Oct 19 14:03:16 +0000 2011","RT @mashable: #Microsoft CEO Steve Ballmer on Not Buying #Yahoo: ""Sometimes, You're Lucky"" - http://t.co/afWazWOy" +"microsoft","neutral","126659577033928704","Wed Oct 19 14:02:51 +0000 2011","RT @TechEd_NA: Join #Microsoft Experts in 1 hr for a live technical chat on #WindowsPhone #Mango Live Tiles http://t.co/YPzLTs50 #wpdev ..." +"microsoft","neutral","126659465985536000","Wed Oct 19 14:02:24 +0000 2011","Join #Microsoft Experts in 1 hr for a live technical chat on #WindowsPhone #Mango Live Tiles http://t.co/YPzLTs50 #wpdev #wp7 #msteched" +"microsoft","neutral","126659266315698177","Wed Oct 19 14:01:36 +0000 2011","Microsoft's Kinect brings 'playful learning' to the Xbox 360 http://t.co/AoOj8CaI #microsoft #kinect #xbox #socialmedia" +"microsoft","neutral","126658871363239936","Wed Oct 19 14:00:02 +0000 2011","Skype and Microsoft Team Up - A match made in heaven? - http://t.co/lsFTp8RL #microsoft #skype # videoconferencing #socialmedia" +"microsoft","neutral","126658110084489216","Wed Oct 19 13:57:01 +0000 2011","#itasum11 - Gadgeteer http://t.co/ecWX3Ajl - The New Way to make inspire and stimulate learning #edugeek #kinext #microsoft" +"microsoft","neutral","126657343965507584","Wed Oct 19 13:53:59 +0000 2011","Book Published! #Microsoft #SharePoint 2010 Power User Cookbook: http://t.co/A8ZqMpYP @spmentor http://t.co/v0mO0VkQ" +"microsoft","neutral","126656580715429888","Wed Oct 19 13:50:56 +0000 2011","Gartner BI 2011: Microsoft and Oracle! http://t.co/ujN5j8dW #Gartner #BI # Oracle #Microsoft" +"microsoft","neutral","126656236157538304","Wed Oct 19 13:49:34 +0000 2011","#Microsoft Zune - Digital player - HDD 30 GB - WMA, AAC, #MP3 - video playback - display: 3… http://t.co/qgzHlLpv" +"microsoft","neutral","126656052971323393","Wed Oct 19 13:48:50 +0000 2011","Early cloud #Microsoft partners will be rewarded w/ a Cloud Accelerate badge, & additional internal use rights! http://t.co/pjkyf0gf" +"microsoft","neutral","126655887740903424","Wed Oct 19 13:48:11 +0000 2011","RT @Newsweek: The 30 Greenest Tech Companies in the World http://t.co/0xzsUR0K #GreenRankings #IBM #Intel #Cisco #Microsoft #Apple #Goog ..." +"microsoft","neutral","126655887178870784","Wed Oct 19 13:48:11 +0000 2011","FreeSoftwareFoundation publicly warns of danger of implementing #Microsoft #Restrictedboot w/o #UEFI http://t.co/NASQbmaT #anonymous #USDOR" +"microsoft","neutral","126655373015924736","Wed Oct 19 13:46:08 +0000 2011","New #snom Webinar Nov. 2: Accelerate #Microsoft #Lync Deployments with snom #UC. Register here http://t.co/B1pr1kIX #VARs #phones" +"microsoft","neutral","126654466735878144","Wed Oct 19 13:42:32 +0000 2011","Why Ice Cream Sandwich Might Make Things Worse - '#Google needs to look to #Microsoft as an example' http://t.co/7UXWvH6h via @pcmag" +"microsoft","neutral","126653861371973632","Wed Oct 19 13:40:08 +0000 2011","TODAY! Come along to #Microsoft Office & #ECDL Open Evening at #Pitman #Training #Cheltenham from 4pm to 7.30pm - http://t.co/fBoej2vd" +"microsoft","neutral","126653619327082496","Wed Oct 19 13:39:10 +0000 2011","MS Outlook Quick Access Toolbar - How to customize - http://t.co/VzkEdZaC #tips #microsoft" +"microsoft","neutral","126653015754149888","Wed Oct 19 13:36:46 +0000 2011","Beware phone #scam calls pretending to be #Microsoft who claim your machine has a #virus - http://t.co/5F2p4XcB" +"microsoft","neutral","126652714804461569","Wed Oct 19 13:35:34 +0000 2011","#EMC White paper on Automated Perf Optimization for #Microsoft Applications with EMC #VMAX, FAST VP and #HyperV #SCVMM http://t.co/kAHnFla7" +"microsoft","neutral","126652661251571712","Wed Oct 19 13:35:22 +0000 2011","The Cloud Offers the Ultimate Music Choice <http://t.co/wd05MHPU> #Microsoft #Emp #CloudComputing" +"microsoft","neutral","126652256404779008","Wed Oct 19 13:33:45 +0000 2011","Exciting news: #Microsoft Names @Metalogix a Managed Partner in the High Potential ISV Group http://t.co/rHkqYXo2 #sharepoint" +"microsoft","neutral","126651931287498752","Wed Oct 19 13:32:28 +0000 2011","New innotive IT program coming to #Dubai featuring @comptia and #Microsoft qualifications http://t.co/a6fLlkgE RT please" +"microsoft","neutral","126651497718095873","Wed Oct 19 13:30:44 +0000 2011","CE-Oh no he didn't: Steve Ballmer lays into Android http://t.co/uTmHyIFD #microsoft #ballmer #android" +"microsoft","neutral","126651446589526016","Wed Oct 19 13:30:32 +0000 2011","RT @cmaneu: #stage #Toulouse #Microsoft #net #SuperBoîte #Win8 #geeks = http://t.co/7UHSLQII (Please RT ;)" +"microsoft","neutral","126651412443693057","Wed Oct 19 13:30:24 +0000 2011","Read how SunGard #IntelliMatch achieved 53% ^ in performance (50 mill transactions in <25 mins) #Intel #HP #Microsoft http://t.co/2i1k64wx" +"microsoft","neutral","126651402549346304","Wed Oct 19 13:30:22 +0000 2011","#Microsoft #Cloud windows+server - RT @WesleyBackelant: Announcing the Microsoft ...: WindowsAzure : @Wesle... http://t.co/NaPViOT8 #TCN" +"microsoft","neutral","126651401010028544","Wed Oct 19 13:30:21 +0000 2011","#Microsoft #Cloud Oracle, Windows Garner Different Views over Multitenant Cloud ...: Microsoft's cloud appr... http://t.co/lVzJNRPs #TCN" +"microsoft","neutral","126651399730769922","Wed Oct 19 13:30:21 +0000 2011","#Microsoft #Cloud Getting Started with Microsoft's Windows Azure Cloud: The Lay of ...: Good tools integrat... http://t.co/2luPCfho #TCN" +"microsoft","neutral","126651360602112000","Wed Oct 19 13:30:12 +0000 2011","How to use search folders to organise your emails in #Microsoft #Outlook - http://t.co/NuMm5N3U" +"microsoft","neutral","126650859290501121","Wed Oct 19 13:28:12 +0000 2011","RT @MobileTechWorld: Jet Car Stunts Xbox Live game for Windows Phone released http://t.co/csUC7hVH #wp7 #xbox #microsoft" +"microsoft","neutral","126650839279476736","Wed Oct 19 13:28:07 +0000 2011","Jet Car Stunts Xbox Live game for Windows Phone released http://t.co/O3hYxBxh #wp7 #xbox #microsoft" +"microsoft","neutral","126650651370467328","Wed Oct 19 13:27:22 +0000 2011","#stage #Toulouse #Microsoft #net #SuperBoîte #Win8 #geeks = http://t.co/7UHSLQII (Please RT ;)" +"microsoft","neutral","126649108202143744","Wed Oct 19 13:21:15 +0000 2011","#Silver &#GOLD #Apple tumbles no matter how many sold #Microsoft buys them 4 a nice price so get in there show #justsayin #Droid rules inend" +"microsoft","neutral","126647567877541889","Wed Oct 19 13:15:07 +0000 2011","#Microsoft shows 'touch screen' for any surface http://t.co/sNaPW5tl" +"microsoft","neutral","126647558469722112","Wed Oct 19 13:15:05 +0000 2011","#Microsoft Community Blogs The 7/365 Review - The Cloud's Impact on Business http://t.co/gPB2MjFW" +"microsoft","neutral","126646015863427072","Wed Oct 19 13:08:57 +0000 2011","@nand_krish007 #Microsoft arrogance again - Ballmer says you need to be a computer scientist to use #Android http://t.co/pltitgeI" +"microsoft","neutral","126645991947513857","Wed Oct 19 13:08:52 +0000 2011","I gave @RyannosaurusRex +K about Microsoft Windows on @klout http://t.co/Y33v8wVr #klout #microsoft #windows" +"microsoft","neutral","126645459568705536","Wed Oct 19 13:06:45 +0000 2011","#Microsoft's Steve Ballmer attacks #Android phones: http://t.co/x2YmWESs" +"microsoft","neutral","126644332177530880","Wed Oct 19 13:02:16 +0000 2011","See how you can save 40% on dozens of #Microsoft #certification practice exams http://t.co/KojQrE1q via @EricLigman" +"microsoft","neutral","126644191299252224","Wed Oct 19 13:01:42 +0000 2011","RT @elearnnet: #microsoft #edu Microsoft's Kinect brings 'playful learning' to the Xbox 360 - The Guardian http://t.co/MW22MJOG" +"microsoft","neutral","126643947190759425","Wed Oct 19 13:00:44 +0000 2011","Changing the past and organizing the future with #Microsoft http://t.co/bYYFhtpZ" +"microsoft","neutral","126642779064504320","Wed Oct 19 12:56:06 +0000 2011","RT @HealthVault: Learn how to create and use your emergency profile on #Microsoft #HealthVault - #PHR #healthit - Step by step #Docs htt ..." +"microsoft","neutral","126642612412227585","Wed Oct 19 12:55:26 +0000 2011","#microsoft #edu Microsoft's Kinect brings 'playful learning' to the Xbox 360 - The Guardian http://t.co/MW22MJOG" +"microsoft","neutral","126642611598540801","Wed Oct 19 12:55:26 +0000 2011","#microsoft #edu UD School Board seats up for grabs - Delaware County Daily Times http://t.co/YCXCo5kK" +"microsoft","neutral","126642532863049728","Wed Oct 19 12:55:07 +0000 2011","#OmniTouch Projector (which is supported by #Microsoft ) Turns Anything Into a Touchscreen http://t.co/9yt5Y9Ab aracılığıyla @geekosystem" +"microsoft","neutral","126642137424076800","Wed Oct 19 12:53:33 +0000 2011","2 hours south of Richmond, VA --> #Microsoft Builds Two-Headed Data Center in Feds’ Backyard http://t.co/aWwVVoDR #cloud #tech" +"microsoft","neutral","126641727254695937","Wed Oct 19 12:51:55 +0000 2011","#SteveBallmer just called me a ""Computer Scientist"" :) http://t.co/u1zmU9Ih #Microsoft #Android" +"microsoft","neutral","126641223229386752","Wed Oct 19 12:49:55 +0000 2011","RT @AppSense: “@Simon_townsend: #Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http ..." +"microsoft","neutral","126640775730700288","Wed Oct 19 12:48:08 +0000 2011","RT @AppSenseTechie: #Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/fCXdU7mA" +"microsoft","neutral","126640203828969472","Wed Oct 19 12:45:52 +0000 2011","Can you add multiple queues to the workspace in MS CRM Online 2011? ..and how? #microsoft #crm #queues #customerservice" +"microsoft","neutral","126638865971806209","Wed Oct 19 12:40:33 +0000 2011","Infosys, has tied up with Microsoft to provide cloud-based offerings to clients... http://t.co/zwqwpdQ4 #cloud #infosys #microsoft #technews" +"microsoft","neutral","126638751307931648","Wed Oct 19 12:40:05 +0000 2011","#Microsoft details #Windows8's new Start Screen Search http://t.co/Z6X5rE5q" +"microsoft","neutral","126638261421613056","Wed Oct 19 12:38:09 +0000 2011","#Google #Mobile Boss Andy Rubin on #Apple, #Microsoft and Tablets, Live at AsiaD - Ina Fried - #Mobile - AllThingsD http://t.co/S0jkqPwn" +"microsoft","neutral","126637865550618624","Wed Oct 19 12:36:34 +0000 2011","RT @MichaelKroker: Steve Ballmer: #Nokia to reveal multiple #Windows Phone devices next week, #Microsoft may make their own - http://t.c ..." +"microsoft","neutral","126637677335429121","Wed Oct 19 12:35:49 +0000 2011","Steve Ballmer: #Nokia to reveal multiple #Windows Phone devices next week, #Microsoft may make their own - http://t.co/KnmpXSW0" +"microsoft","neutral","126637437953900546","Wed Oct 19 12:34:52 +0000 2011","“@Simon_townsend: #Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/gyGE5SCaâ€" +"microsoft","neutral","126637411114553344","Wed Oct 19 12:34:46 +0000 2011","“@AppSenseTechie: #Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/jXhFHPTJâ€" +"microsoft","neutral","126636379479343106","Wed Oct 19 12:30:40 +0000 2011","#Microsoft's Ballmer: We're Building Windows Server 8 Around Cloud - well I guess thats makes sense!" +"microsoft","neutral","126636379324170240","Wed Oct 19 12:30:40 +0000 2011","RT @Simon_Townsend: #Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/LwFLXASL" +"microsoft","neutral","126636194653147136","Wed Oct 19 12:29:56 +0000 2011","New #microsoft #WindowsLiveBook V2 Mobile Video + #GraphicsWorkstation http://t.co/LcUvLvAd" +"microsoft","neutral","126635954520854528","Wed Oct 19 12:28:58 +0000 2011","Apple’ cash balance = $75.9 billion, surpassing the total operating balance of the U.S. government #rich #apple #google #microsoft #money" +"microsoft","neutral","126635633245564928","Wed Oct 19 12:27:42 +0000 2011","Learned a new word ""BLOB"" regarding to #Microsoft #SharePoint moving .docs from the database to a fileserver. #vmworld #VMworld2011" +"microsoft","neutral","126635573921316864","Wed Oct 19 12:27:28 +0000 2011","hmm, that should have read 'the milky white film of busyness' :D #vs2010 #microsoft" +"microsoft","neutral","126635080100757504","Wed Oct 19 12:25:30 +0000 2011","RT @technabob: Using a #Touchscreen Through Your Pocket Made Possible by #Microsoft #PocketTouch: Have you… http://t.co/PJpFeyTF" +"microsoft","neutral","126635053139763200","Wed Oct 19 12:25:24 +0000 2011","“@Simon_townsend: #Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/xtZMPSMTâ€" +"microsoft","neutral","126634784326828032","Wed Oct 19 12:24:20 +0000 2011","#Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/fCXdU7mA" +"microsoft","neutral","126634780954595328","Wed Oct 19 12:24:19 +0000 2011","#Microsoft tests and proves #AppSense enterprise scalability! 78K users on one personalization DB - http://t.co/LwFLXASL" +"microsoft","neutral","126634396773130240","Wed Oct 19 12:22:47 +0000 2011","does anyone else get the 'milky white film of business' with Visual Studio? #vs2010 #microsoft" +"microsoft","neutral","126632917643427841","Wed Oct 19 12:16:54 +0000 2011","Using a #Touchscreen Through Your Pocket Made Possible by #Microsoft #PocketTouch: Have you… http://t.co/PJpFeyTF" +"microsoft","neutral","126632860386988033","Wed Oct 19 12:16:41 +0000 2011","After years of failures, #Microsoft is finally making money in the handheld market -- but not in the way you’d think" +"microsoft","neutral","126632280985845760","Wed Oct 19 12:14:23 +0000 2011","#Microsoft #Dynamics GP soon to offer 64-bit Hyper-V images only : http://t.co/82CFFcHZ" +"microsoft","neutral","126631882149474305","Wed Oct 19 12:12:48 +0000 2011","Annie: has just had a look at our downloaded #Windows8 op sys. Where #AppleMacs go, #Microsoft follows!!" +"microsoft","neutral","126631805288849408","Wed Oct 19 12:12:29 +0000 2011","Seeking a Manager of IT for Immediate Hire in Sterling, VA! Send resumes to julia.snik@rht.com for consideration! #dcjobs #cisco #microsoft" +"microsoft","neutral","126631188394811392","Wed Oct 19 12:10:02 +0000 2011","Ballmer: We’re beating Google in the cloud http://t.co/YFAmguRF #ballmer #google #microsoft #cloud" +"microsoft","neutral","126631077468049408","Wed Oct 19 12:09:36 +0000 2011","Good luck to #CPG team RT @MindTree_PR: #MindTree Introduces mPromo #TradePromotion Solution on #Microsoft #Azure http://t.co/aTNnITtt #CGT" +"microsoft","neutral","126630800413310976","Wed Oct 19 12:08:30 +0000 2011","#MindTree Introduces mPromo #TradePromotion Solution on #Microsoft #Azure http://t.co/LkjHqrnR #CPG #CGT" +"microsoft","neutral","126630728690704384","Wed Oct 19 12:08:13 +0000 2011","Open Virtualization Format adopted by #VMware #Microsoft and #Citrix. How come no mention for open source leader #RedHat? #vmworld #SEC1802" +"microsoft","neutral","126628957566156800","Wed Oct 19 12:01:11 +0000 2011","Database sizing #Microsoft #SharePoint 2010 #VMware #VMworld http://t.co/ZcdTjyrp" +"microsoft","neutral","126628527155052544","Wed Oct 19 11:59:28 +0000 2011","RT @VentureBeat: #Ballmer: “We’ve picked our play†in #social with #Skype deal http://t.co/03IHBOgj #microsoft #xbox #in" +"microsoft","neutral","126626502937812992","Wed Oct 19 11:51:25 +0000 2011","#JaccuseDZ RT @zeggar-@amaboura the entreprise version of #Microsoft #office365 is not available yet in #Algeria... http://t.co/BuWiL76A" +"microsoft","neutral","126626166261022720","Wed Oct 19 11:50:05 +0000 2011","M10135 Microsoft Course. RRP £1640 + VAT. Our Price = £1148 + VAT http://t.co/vOTLSnfX #ittraining #microsoft" +"microsoft","neutral","126625578567729152","Wed Oct 19 11:47:45 +0000 2011","CEO Steve Ballmer says that #Microsoft dodged a bullet when it failed to acquire #Yahoo! in 2008 http://t.co/nlCxoCUz" +"microsoft","neutral","126625418118832128","Wed Oct 19 11:47:06 +0000 2011","Technology 4 StartUp Britain Week: Attracting Customers With SEO and PPC http://t.co/o7tHRXPr #microsoft" +"microsoft","neutral","126625193786490881","Wed Oct 19 11:46:13 +0000 2011","@amaboura the entreprise version of #Microsoft #office365 is not available yet in #Algeria i guess" +"microsoft","neutral","126622411587190784","Wed Oct 19 11:35:10 +0000 2011","#Microsoft Takes a Shot at the #Wii in Their Newest #DanceCentral 2 Ad [Video] http://t.co/A8tlwr3o #Kinect #games" +"microsoft","neutral","126622356817981440","Wed Oct 19 11:34:57 +0000 2011","@WonderLaura, Please cld u send me steps on how to add a custom .master files through SPD 2010. i just downloaded some files from #Microsoft" +"microsoft","neutral","126622297917374464","Wed Oct 19 11:34:43 +0000 2011","#VisualStudio 11 Developer Preview Training Kit released: start learning - http://t.co/d6meZBZv #Microsoft" +"microsoft","neutral","126621573271650304","Wed Oct 19 11:31:50 +0000 2011","#Microsoft news: #SQLServer licensing will be different from december2011 | #Office365 and #Azure will be available in #Algeria in 2013" +"microsoft","neutral","126621144177577984","Wed Oct 19 11:30:07 +0000 2011","How to oranise and sort your emails with the categories feature in #Microsoft #Outlook - http://t.co/NSpqbzFZ" +"microsoft","neutral","126621142743138305","Wed Oct 19 11:30:07 +0000 2011","#Microsoft Announces New “Playful Learning†Titles - Wired News : http://t.co/nN8oietT #av" +"microsoft","neutral","126619580096462848","Wed Oct 19 11:23:55 +0000 2011","Ballmer Feels Lucky Microsoft Didn't Buy Yahoo in 2008 http://t.co/MK8pv0TN #gdapps #ballmer #microsoft #yahoo" +"microsoft","neutral","126619293977812992","Wed Oct 19 11:22:46 +0000 2011","@cloudnewsindia The War of #Email #Cloud Computing: #Google vs. #Microsoft http://t.co/zu9pSwSG" +"microsoft","neutral","126619258099744768","Wed Oct 19 11:22:38 +0000 2011","#Microsoft chief admits lucky escape from buying Yahoo! http://t.co/xOqL8Wqh" +"microsoft","neutral","126619256015167488","Wed Oct 19 11:22:37 +0000 2011","#Microsoft's Kinect brings 'playful learning' to the Xbox 360 http://t.co/1KAi853k" +"microsoft","neutral","126617626435780608","Wed Oct 19 11:16:09 +0000 2011","#HEUTE - #Microsoft #Office #2010 Home & Student Product Key Card [1 User] - statt 149€ nur 89,99€ - http://t.co/3QXy2Zo4" +"microsoft","neutral","126617142761230337","Wed Oct 19 11:14:13 +0000 2011","RT @mattimooz: I'm at #microsoft and been served #cake for lunch, ok not just cake but its definitely involved. You live a good life @bennuk" +"microsoft","neutral","126616809079193600","Wed Oct 19 11:12:54 +0000 2011","@MrRobinson ""hard for"" balms ""to get excited over Android phones"" because #Microsoft isn't collecting enough royalties yet. #isaidit" +"microsoft","neutral","126616636621996032","Wed Oct 19 11:12:13 +0000 2011","“@GeeCloud: Congratulations to Softline Pastel (@SageGroupZA) for winning #ISV of the Year at #Microsoft #partnersummit†@SoftlinePastel" +"microsoft","neutral","126615565673570305","Wed Oct 19 11:07:57 +0000 2011","#HEUTE - #Microsoft #Office #2010 Home & Student Product Key Card [1 User] - statt 149€ nur 89,99€ - http://t.co/YLlITFAt" +"microsoft","neutral","126615350916816896","Wed Oct 19 11:07:06 +0000 2011","#Microsoft #Cloud Getting Physical with Virtual Servers: Q. Is it important that Windows Server and Microso... http://t.co/n8ky79pQ #TCN" +"microsoft","neutral","126614792092909568","Wed Oct 19 11:04:53 +0000 2011","Build once, scale to many: #Microsoft allows creators to extend experiences to multiple touchpoints like xBox, Windows Mobile. #ULforthewin" +"microsoft","neutral","126614365280542720","Wed Oct 19 11:03:11 +0000 2011","@cloudnewsindia New Version of #Cloud InTune is Available: Posted on #Windows 7 News...#Microsoft has just..http://bit.ly/pY0hlZ" +"microsoft","neutral","126614116927422464","Wed Oct 19 11:02:12 +0000 2011","@cloudnewsindia New Version of #Cloud InTune is Available: Posted on #Windows 7 News...#Microsoft has just released..http://bit.ly/pY0hlZ" +"microsoft","neutral","126614037608935424","Wed Oct 19 11:01:53 +0000 2011","#Ballmer On Not Buying #Yahoo: “Sometimes You’re Lucky†via @techcrunch http://t.co/MrjkOXnO #Microsoft" +"microsoft","neutral","126613919623159808","Wed Oct 19 11:01:25 +0000 2011","Microsoft's Steve Ballmer attacks Android phones http://t.co/ExByurKL #windows #android #microsoft #google" +"microsoft","neutral","126613753499353088","Wed Oct 19 11:00:45 +0000 2011","#Microsoft Windows Server 2008 R2 Enterprise OEM #preisvergleich http://t.co/0jWsfMQn Bewertung softline24 #test #Microsoft" +"microsoft","neutral","126613640278315008","Wed Oct 19 11:00:18 +0000 2011","#Microsoft announces official SQL Server driver for #Linux - http://t.co/lgTx40UM #SqlServer" +"microsoft","neutral","126613630551719936","Wed Oct 19 11:00:16 +0000 2011","There is a chance? #Microsoft and #Mobile http://t.co/p4ChgGwg" +"microsoft","neutral","126613275331919872","Wed Oct 19 10:58:51 +0000 2011","#Job #ICT System Administrator Windows: GSI Consultants, Zug http://t.co/96Vi7GIu #Microsoft #IT" +"microsoft","neutral","126611886979555328","Wed Oct 19 10:53:20 +0000 2011","Steve Ballmer: Bunch of new Nokia Windows Phones coming next week http://t.co/8F2FVygU via @esphoneblog #wp7 #nokia #microsoft" +"microsoft","neutral","126611785666134016","Wed Oct 19 10:52:56 +0000 2011","RT @qmatteoq: Leaving for #smau : I'll be at the #Microsoft stand as a trainer for the #wpdev labs. Meet me there!" +"microsoft","neutral","126611604925194240","Wed Oct 19 10:52:13 +0000 2011","#microsoft ‘People-Centric’ Windows Phone 7.5 Update Released http://t.co/oImNAkVf" +"microsoft","neutral","126611557370183681","Wed Oct 19 10:52:02 +0000 2011","Microsoft Stores giving away free Windows Phone devices with $499 spend #microsoft #wp7 #free #msftstore http://t.co/QbYUm2Ce" +"microsoft","neutral","126611372552355841","Wed Oct 19 10:51:18 +0000 2011","Leaving for #smau : I'll be at the #Microsoft stand as a trainer for the #wpdev labs. Meet me there!" +"microsoft","neutral","126610800382181376","Wed Oct 19 10:49:01 +0000 2011","Just walked in the door of the #Microsoft #kenyaopendoor conference..." +"microsoft","neutral","126610131671715840","Wed Oct 19 10:46:22 +0000 2011","A New Landscape for Retail Banking: New report from #Efma with #Microsoft http://t.co/2XNz3F9C" +"microsoft","neutral","126610091855192064","Wed Oct 19 10:46:12 +0000 2011","Windows 7 Roadmap = #Windows #8 Roadmap: M3 final 6.1.6801 (7) and 6.2.8102 (8). #Beta at #CES? :O #Microsoft" +"microsoft","neutral","126610035655704577","Wed Oct 19 10:45:59 +0000 2011","Microsoft Stores giving away free Windows Phone devices with $499 spend http://t.co/YEVwfL4N #microsoft #windows" +"microsoft","neutral","126609904298491904","Wed Oct 19 10:45:28 +0000 2011","Big Data and the Cloud - More Hype or a Real Workload? http://t.co/WA0bP7Mx #azure #hpc #microsoft" +"microsoft","neutral","126609815278592000","Wed Oct 19 10:45:06 +0000 2011","#Microsoft acquisition of #Skype has closed. http://t.co/Vbr0CPKr Will it be integrated this with #technology like #Exchange and #Outlook?" +"microsoft","neutral","126608763456200704","Wed Oct 19 10:40:56 +0000 2011","Novidade #Microsoft: RT @Cibereconomia: This New Microsoft Technology Turns Anything Into A Touchscreen (MSFT) http://t.co/yqh20Z0u" +"microsoft","neutral","126607853640364032","Wed Oct 19 10:37:19 +0000 2011","#Infosys, #Microsoft team up to offer #cloud #services - http://t.co/9dEVf9Tz" +"microsoft","neutral","126607793280126976","Wed Oct 19 10:37:04 +0000 2011","#Microsoft India Webcast LIVE on Cloud Management with System Center App Controller Codename “Concero†http://t.co/TSYXWdKh #Cloud" +"microsoft","neutral","126607106953580546","Wed Oct 19 10:34:21 +0000 2011","#Microsoft's Steve Ballmer attacks #Android phones http://t.co/cuoW78du" +"microsoft","neutral","126606671874228225","Wed Oct 19 10:32:37 +0000 2011","Microsoft Excel: How To Alternate the Color Between Rows - http://t.co/x7TZk9sG #microsoft" +"microsoft","neutral","126606546556829696","Wed Oct 19 10:32:07 +0000 2011","RT @guardiantech: #Microsoft Kinect brings 'playful learning' to the Xbox 360 http://t.co/ys79quRD #Gaming" +"microsoft","neutral","126606023174791168","Wed Oct 19 10:30:02 +0000 2011","#OmniTouch #Microsoft http://t.co/qwk0YUrH via @youtube" +"microsoft","neutral","126604925881954305","Wed Oct 19 10:25:41 +0000 2011","Get #Microsoft #http://t.co/TMUrucvW certified now!! http://t.co/3Qr7XoCy" +"microsoft","neutral","126604903408869378","Wed Oct 19 10:25:35 +0000 2011","Early cloud #Microsoft partners can qualify for Microsoft Cloud Accelerate! http://t.co/pjkyf0gf" +"microsoft","neutral","126604828150480896","Wed Oct 19 10:25:17 +0000 2011","RT @MahimaKurra: @CloudNewsIndia #Microsoft Announces #Open Source based #Cloud Service http://t.co/MtK3XZ2l" +"microsoft","neutral","126604631894790144","Wed Oct 19 10:24:31 +0000 2011","Great, now #Skype is a part of #Microsoft stack http://t.co/vZfUv9OI." +"microsoft","neutral","126604425832837120","Wed Oct 19 10:23:41 +0000 2011","#Microsoft CEO #Steve_Ballmer on Not Buying #Yahoo: “Sometimes, You’re Lucky†http://t.co/YgISO7xt" +"microsoft","neutral","126604425635692545","Wed Oct 19 10:23:41 +0000 2011","#Microsoft CEO #Steve_Ballmer on Not Buying #Yahoo: “Sometimes, You’re Lucky†http://t.co/D0u2o9nZ" +"microsoft","neutral","126604152536178688","Wed Oct 19 10:22:36 +0000 2011","#Microsoft idea: Search the Internet like a celebrity (Todd Bishop/GeekWire) http://t.co/WhROX8KA" +"microsoft","neutral","126602212511203328","Wed Oct 19 10:14:54 +0000 2011","â–º #Job #IT #Projekt #Management #Microsoft | Projekt für Freelancer: Programm Manager (m/w) in Berlin » http://t.co/MO4jtpTL" +"microsoft","neutral","126601735417499648","Wed Oct 19 10:13:00 +0000 2011","thinking thinking thinking thinking #dotnet #asp #microsoft" +"microsoft","neutral","126600989150158849","Wed Oct 19 10:10:02 +0000 2011","Creating #Pareto charts using #Microsoft #Excel http://t.co/B59lPyCS" +"microsoft","neutral","126600074825445376","Wed Oct 19 10:06:24 +0000 2011","#Microsoft Changes The Yet Known# Design of #Windows Eight http://t.co/EGa3wrr4" +"microsoft","neutral","126599980847869952","Wed Oct 19 10:06:02 +0000 2011","#Google sets alternative to #Microsoft http://t.co/EqAzzbcz" +"microsoft","neutral","126599671400497153","Wed Oct 19 10:04:48 +0000 2011","Watch short video on #Microsoft #Lync End User Adoption. A practical solution for users http://t.co/Qpk5DHEn #ITPro #UCOMS #OCS #WhyMSFT" +"microsoft","neutral","126599087389806592","Wed Oct 19 10:02:29 +0000 2011","#HEUTE - #Microsoft #Office #2010 Home & Student Product Key Card [1 User] - statt 149€ nur 89,99€ - http://t.co/tdMFe0nI" +"microsoft","neutral","126598871760642048","Wed Oct 19 10:01:37 +0000 2011","Funny #BillGates #Aprons w/Google #1 ranked #LTCartoon “Got #Microsoft“? http://t.co/CStd8cPM #humor #foodie #microsoft" +"microsoft","neutral","126598867847348224","Wed Oct 19 10:01:36 +0000 2011","Funny #BillGates #Aprons w/Google #1 ranked #LTCartoon “Got #Microsoft“? http://t.co/DRvKSVF6 #humor #foodie #microsoft" +"microsoft","neutral","126598515248992257","Wed Oct 19 10:00:12 +0000 2011","#Kinect Makes Learning Playful with Help from Sesame Street and National Geographic http://t.co/xQXP2uWx Genius idea ;) #Microsoft" +"microsoft","neutral","126597883477762048","Wed Oct 19 09:57:42 +0000 2011","Windows Phone Tango and Apollo rumors suggest new screen specs +#microsoft #wp7" +"microsoft","neutral","126597538794057728","Wed Oct 19 09:56:19 +0000 2011","Steve Ballmer, #Microsoft's chief executive believes that #Google's #Android mobile phones for being too complicated. What do you..." +"microsoft","neutral","126596825875611648","Wed Oct 19 09:53:30 +0000 2011","#Kinect -projector makes all surfaces touchscreen #ar #touch #microsoft #xbox360 #gaming #games http://t.co/67CMvCki" +"microsoft","neutral","126596045575684096","Wed Oct 19 09:50:23 +0000 2011","Ballmer On Not Buying Yahoo: “Sometimes You’re Luckyâ€: Speaking today at Web 2.0, #Microsoft… http://t.co/KOt83DxF" +"microsoft","neutral","126595447455367168","Wed Oct 19 09:48:01 +0000 2011","This coming from a guy who is #Microsoft PC, Mug, pen, everything RT @GiuseppePietta: @1Ngare both fail phones.windows phone 7 to the rescue" +"microsoft","neutral","126595014846459904","Wed Oct 19 09:46:18 +0000 2011","Steve Ballmer people need to be computer scientists to figure out how to use #Android phones | @Telegraph http://t.co/B5X0SDgp #Microsoft" +"microsoft","neutral","126593877321842690","Wed Oct 19 09:41:47 +0000 2011","Windows Phone Tango and Apollo rumors suggest new screen specs http://t.co/8X30jQAH #microsoft #windows" +"microsoft","neutral","126593450253623297","Wed Oct 19 09:40:05 +0000 2011","#Microsoft to Require yearly #Sustainability Reporting by Vendors http://t.co/71Rm0QOT via @CaelusGreenRoom #CSR #GMIC" +"microsoft","neutral","126592300070608896","Wed Oct 19 09:35:30 +0000 2011","CLOUD NEWS - Infosys boosts its cloud solution with Microsoft partnership - http://t.co/RNHzqjGR - @Infosys #cloud #microsoft #azure" +"microsoft","neutral","126592158802264064","Wed Oct 19 09:34:57 +0000 2011","#Microsoft discontinued product named #ENSEMBLE STUDIOS on October 14, 2011. Review and comment: http://t.co/eJ8a2Msm" +"microsoft","neutral","126592000395984896","Wed Oct 19 09:34:19 +0000 2011","#Microsoft #Cloud Windows Intune: Microsoft Offers Cloud Partners New Incentive ...: ... calendar year, whi... http://t.co/ufI0YlL7 #TCN" +"microsoft","neutral","126591072884359168","Wed Oct 19 09:30:38 +0000 2011","#Microsoft #SCVMM #2012- Session by @virtualfat - all u need to know about the new 2012 version. #e2evc Register http://t.co/eZBtVm9Z" +"microsoft","neutral","126591029993406464","Wed Oct 19 09:30:28 +0000 2011","This New Microsoft Technology Turns Anything Into A Touchscreen (MSFT) http://t.co/Qcih8fjg #apple #microsoft" +"microsoft","neutral","126590959290032128","Wed Oct 19 09:30:11 +0000 2011","... and we were just told what a #microsoft minute is: it is 95 sconds + 1 second which is from 1 to 10 minutes. :)" +"microsoft","neutral","126590231137886209","Wed Oct 19 09:27:17 +0000 2011","#Microsoft and #Nokia Announce suite of ‘Microsoft Apps’ for Nokia’s Symbian Belle Platform http://t.co/c1YryIFh #WP7 #WP75" +"microsoft","neutral","126588750628257792","Wed Oct 19 09:21:24 +0000 2011","SeNews : Ballmer: 70% Of The Time, Google & Bing Are The Same, So Try Bing! #Microsoft #Bing : http://t.co/6RmJ9Bgn" +"microsoft","neutral","126588693577338881","Wed Oct 19 09:21:11 +0000 2011","SeNews : Microsoft extends revenue guarantees to Yahoo #Yahoo #Microsoft : http://t.co/bR3MHHVg" +"microsoft","neutral","126588643782574080","Wed Oct 19 09:20:59 +0000 2011","SeNews : Ballmer on not buying Yahoo: 'Sometimes you're lucky' #Microsoft #Yahoo : http://t.co/E0kvSTj4" +"microsoft","neutral","126588145549574144","Wed Oct 19 09:19:00 +0000 2011","RT @InternetBH: #Microsoft shows 'touch screen' for any surface + +http://t.co/rT9g3pUH" +"microsoft","neutral","126586994536091648","Wed Oct 19 09:14:26 +0000 2011","Financial Services track at partner summit, 9 people in the room. And yet FS is such a lucrative channel #Microsoft #PartnerSummit" +"microsoft","neutral","126586306464727041","Wed Oct 19 09:11:41 +0000 2011","Find & give information about software bugs & help improve #Microsoft products. Apply to become a #Beta Tester http://t.co/OSWEfixz #ITPro" +"microsoft","neutral","126585952394166272","Wed Oct 19 09:10:17 +0000 2011","#Teamfollowback -Microsoft expands Windows Phone Marketplace to India #microsoft #windowsphone #india" +"microsoft","neutral","126585097297199104","Wed Oct 19 09:06:53 +0000 2011","Check this video out -- Sesame Street and Nat Geo TV coming to Kinect http://t.co/iw7kjQ81 #xbox #kinect #microsoft" +"microsoft","neutral","126584640453611521","Wed Oct 19 09:05:04 +0000 2011","Cerulean is the most advanced ERP cloud for hosted dynamics #microsoft #dynamics - http://t.co/4J53yJwd" +"microsoft","neutral","126583805229285376","Wed Oct 19 09:01:45 +0000 2011","RT @Pocketlint: Ballmer on Nokia: Really looking forward to bunch of new devices next week http://t.co/xjUwssZs #wp7 #nokia #Microsoft" +"microsoft","neutral","126582210420674560","Wed Oct 19 08:55:25 +0000 2011","#Microsoft announces two new Xbox 360 holiday bundles http://t.co/GgLwTSlT" +"microsoft","neutral","126582173787619328","Wed Oct 19 08:55:16 +0000 2011","Omni Touch Screen System: going beyond the ""touch screens"" http://t.co/aROkfnOS #Featured #Tech&Gadgets #Microsoft #Omnitouch" +"microsoft","neutral","126581768529788929","Wed Oct 19 08:53:40 +0000 2011","Rumor: New screen sizes, specs coming to Windows Phones #windowsphone #microsoft http://t.co/kHTDQSHo" +"microsoft","neutral","126581659612094464","Wed Oct 19 08:53:14 +0000 2011","#Teamfollowback #Microsoft investigating Windows Phone Mango Zune freezing problems" +"microsoft","neutral","126581308158779392","Wed Oct 19 08:51:50 +0000 2011","Check this video out -- OmniTouch - Demo Video #Microsoft demonstrates touch screen for any surface http://t.co/NL9E08Q7" +"microsoft","neutral","126581270435201024","Wed Oct 19 08:51:41 +0000 2011","OmniTouch from #Microsoft makes every surface a #Touchscreen | #gadgets http://t.co/sSUrPnve" +"microsoft","neutral","126580539636449281","Wed Oct 19 08:48:47 +0000 2011","we are authorized test center PJ, Selangor, for IT #exams (#Cisco #CCNA, #Microsoft #MCSE, Red Hat #RHCE, etc.) http://t.co/16LPfctH" +"microsoft","neutral","126580273965047810","Wed Oct 19 08:47:45 +0000 2011","ToWhat technological extensivity #Microsoft willProvide the #CloudComputing ? Thanks #MS for Bein aPart ofMy generation http://t.co/bsnbAhBp" +"microsoft","neutral","126580014819983360","Wed Oct 19 08:46:41 +0000 2011","#Microsoft anuncia SQL Server ODBC Driver para Linux! http://t.co/kDbNjYyB #MS #Linux #Opensource #SQL #Softwarelibre" +"microsoft","neutral","126579602524082176","Wed Oct 19 08:45:03 +0000 2011","#Teamfollowback Microsoft Q1 2012 earnings results due on Thursday #microsoft" +"microsoft","neutral","126579169646751744","Wed Oct 19 08:43:20 +0000 2011","Congratulations to Softline Pastel (@SageGroupZA) for winning #ISV of the Year at #Microsoft #partnersummit" +"microsoft","neutral","126579166589108224","Wed Oct 19 08:43:19 +0000 2011","Vincent Mugambi #Microsoft takes us through #WP app" +"microsoft","neutral","126577845421096960","Wed Oct 19 08:38:04 +0000 2011","#Microsoft #Cloud Infosys kicks off its cloud drive with Microsoft partnership: Infosys has accelerated its... http://t.co/utU4mwjK #TCN" +"microsoft","neutral","126577204078456832","Wed Oct 19 08:35:31 +0000 2011","#Microsoft #Wireless #Keyboard 3000 Review http://t.co/C4GOy04V #wirelesskeyboardandmouse #mousecombos" +"microsoft","neutral","126577114005782528","Wed Oct 19 08:35:10 +0000 2011","#Free Genuine Copy #Microsoft #Windows 8 Pre-Beta #OS: I don’t know how long it’s been out… http://t.co/EY8GOIrk" +"microsoft","neutral","126577085081849856","Wed Oct 19 08:35:03 +0000 2011","http://t.co/VgMYdlDS found article on http://t.co/UAWHbOLz Fake #Microsoft scammers lure users into #granting remote access to PCs" +"microsoft","neutral","126576350067818497","Wed Oct 19 08:32:08 +0000 2011","Omni Touch Screen System: going beyond the ""touch screens"" http://t.co/SH2fCwuS #Featured #Tech&Gadgets #Microsoft #Omnitouch" +"microsoft","neutral","126576121784442880","Wed Oct 19 08:31:13 +0000 2011","Pinball FX2 offers free 'Paranormal' table starting next Wednesday http://t.co/vYXqUbJg #microsoft #pinball_fx_2" +"microsoft","neutral","126573645421228032","Wed Oct 19 08:21:23 +0000 2011","RT @NeowinFeed: Rumor: New screen sizes, specs coming to Windows Phones #windowsphone #microsoft http://t.co/Ja9sEesh #neowin" +"microsoft","neutral","126573265178202112","Wed Oct 19 08:19:52 +0000 2011","RT @TCMagazine: Microsoft talks about the Start menu search in Windows 8 http://t.co/P8Go0gkS #microsoft #search #start #windows8" +"microsoft","neutral","126573186774089728","Wed Oct 19 08:19:34 +0000 2011","Rumor: New screen sizes, specs coming to Windows Phones #windowsphone #microsoft http://t.co/Ja9sEesh #neowin" +"microsoft","neutral","126572513483436032","Wed Oct 19 08:16:53 +0000 2011","RT @kalezo: we strongly recommend students that are following us to sign up for the #imaginecup 2012 contest! http://t.co/ytn5OHym #micr ..." +"microsoft","neutral","126572384126894080","Wed Oct 19 08:16:22 +0000 2011","we strongly recommend students that are following us to sign up for the #imaginecup 2012 contest! http://t.co/ytn5OHym #microsoft" +"microsoft","neutral","126572306230288385","Wed Oct 19 08:16:04 +0000 2011","#Microsoft's Steve Ballmer attacks #Android phones - #Telegraph http://t.co/EvkkeA8l via @Telegraph" +"microsoft","neutral","126572275876106240","Wed Oct 19 08:15:56 +0000 2011","Rumor: New screen sizes, specs coming to Windows Phones #windowsphone #microsoft: A rumor has surfaced that sugg... http://t.co/wHDrAQO4" +"microsoft","neutral","126572275016286208","Wed Oct 19 08:15:56 +0000 2011","Rumor: New screen sizes, specs coming to Windows Phones #windowsphone #microsoft http://t.co/gOQgJvvb #microsoft" +"microsoft","neutral","126570339105914881","Wed Oct 19 08:08:15 +0000 2011","RT @FierceCIO: What happens to #Skype now that it is officially #Microsoft's? http://t.co/9FzSH0RY $MSFT" +"microsoft","neutral","126569564963217408","Wed Oct 19 08:05:10 +0000 2011","Any #Microsoft trainers in Oxfordshire who offer free training to local charities? Unfortunately out of my catchment area @Makethatlink" +"microsoft","neutral","126569115082166273","Wed Oct 19 08:03:23 +0000 2011","If you missed it : #Microsoft working on ‘PocketTouch’ device input through clothing http://t.co/eqz4HkNk" +"microsoft","neutral","126568785921572864","Wed Oct 19 08:02:04 +0000 2011","Windows Phone Tango and Apollo to have new screen resolutions? http://t.co/KJwugoM3 #WP7 #Microsoft" +"microsoft","neutral","126568308584628224","Wed Oct 19 08:00:10 +0000 2011","wasn't impressed with the #iPhone4S and now not impressed with the #GalaxyNexus, so let's see what #Nokia - #Microsoft have to offer..." +"microsoft","neutral","126568122261061632","Wed Oct 19 07:59:26 +0000 2011","RT @rinconindia: #OpenText #Alchemy Client programs are now compatible with #Microsoft #Office #2007." +"microsoft","neutral","126567546165014528","Wed Oct 19 07:57:09 +0000 2011","#Microsoft CEO Steve Ballmer on Not Buying #Yahoo : “Sometimes, You’re Lucky†http://t.co/WMCXWysh" +"microsoft","neutral","126566918911041536","Wed Oct 19 07:54:39 +0000 2011","#Microsoft excludes tablets from its holiday advertising push http://t.co/kPslZjFC" +"microsoft","neutral","126566540010192896","Wed Oct 19 07:53:09 +0000 2011","The debate is, am I allowed to use my BB pen at #Microsoft?" +"microsoft","neutral","126566417276469248","Wed Oct 19 07:52:40 +0000 2011","been using #microsoft @office for years? well here are some tips to be more efficient that even you might not know! http://t.co/HnXvwypp" +"microsoft","neutral","126566305716387840","Wed Oct 19 07:52:13 +0000 2011","#Microsoft #LifeCam NX-3000 #Webcam (Gray) Review http://t.co/zJMHBPH7 #nx3000 #laptopwebcam #notebooks #laptop" +"microsoft","neutral","126565140702298112","Wed Oct 19 07:47:35 +0000 2011","The Scribble SharePoint Blog: SharePoint 2013 Workflows. Maybe Microsoft can buy Nintex? #Microsoft #SharePoint #SP210 #Workflow #Nintex" +"microsoft","neutral","126564015873867777","Wed Oct 19 07:43:07 +0000 2011","@siobhancoda is your anti virus isn't just a 90 day trial, could leave you exposed in the near future #free #microsoft #antivirus #software" +"microsoft","neutral","126564004612161536","Wed Oct 19 07:43:04 +0000 2011","Steve Ballmer Says ""Luck"" Kept Microsoft From Buying Yahoo In 2008 http://t.co/9sDNmC0Y #Microsoft #Yahoo" +"microsoft","neutral","126563375189725184","Wed Oct 19 07:40:34 +0000 2011","Microsoft CEO says company got ""lucky"" when Yahoo deal fell apart #microsoft #steveballmer: Microsoft CEO Steve ... http://t.co/Zyetras0" +"microsoft","neutral","126563374384422912","Wed Oct 19 07:40:34 +0000 2011","Microsoft CEO says company got ""lucky"" when Yahoo deal fell apart #microsoft #steveballmer http://t.co/8fxt1ef8 #microsoft" +"microsoft","neutral","126563373230997504","Wed Oct 19 07:40:34 +0000 2011","Microsoft CEO says company got ""lucky"" when Yahoo deal fell apart #microsoft #steveballmer http://t.co/qGxn8d2l" +"microsoft","neutral","126563286392123392","Wed Oct 19 07:40:13 +0000 2011","Now I that i know god exists, realisation dawns on me that, he hates me +#Microsoft #ICPC" +"microsoft","neutral","126562736242040833","Wed Oct 19 07:38:02 +0000 2011","#Microsoft excludes #tablets from its holiday advertising push http://t.co/QlArpUaT" +"microsoft","neutral","126562457375350785","Wed Oct 19 07:36:55 +0000 2011","#microsoft says #android is too complex. Interesting - from a user angle contrast #windows with #macos for a comparison.http://goo.gl/Wu3Zm" +"microsoft","neutral","126561633978945536","Wed Oct 19 07:33:39 +0000 2011","#Intervate wins content management partner of the year 2011. #Microsoft #techedafrica" +"microsoft","neutral","126561530258001920","Wed Oct 19 07:33:14 +0000 2011","#Intervate wins the portals and collaboration partner of the year 2011 #Microsoft #techedafrica" +"microsoft","neutral","126561428319649793","Wed Oct 19 07:32:50 +0000 2011","#Microsoft's Steve Ballmer attacks #Android phones (Telegraph) - http://t.co/9wbIy1Er" +"microsoft","neutral","126559900036894720","Wed Oct 19 07:26:46 +0000 2011","RT @marcfletcher: #Intervate wins the portals and collaboration partner of the year 2011 #Microsoft #techedafrica" +"microsoft","neutral","126559880860545024","Wed Oct 19 07:26:41 +0000 2011","RT @InternetBH: Microsoft finalises Skype deal - IT Business - News & Features - http://t.co/F9b14RY3: http://t.co/275l0zay #skype #Micr ..." +"microsoft","neutral","126557628531875840","Wed Oct 19 07:17:44 +0000 2011","RT @NeowinFeed: Microsoft CEO says company got ""lucky"" when Yahoo deal fell apart #microsoft #steveballmer http://t.co/l5uNInSW #neowin" +"microsoft","neutral","126557171742810112","Wed Oct 19 07:15:55 +0000 2011","Microsoft talks about the Start menu search in Windows 8 http://t.co/P8Go0gkS #microsoft #search #start #windows8" +"microsoft","neutral","126555866332798976","Wed Oct 19 07:10:44 +0000 2011","Microsoft CEO says company got ""lucky"" when Yahoo deal fell apart #microsoft #steveballmer http://t.co/l5uNInSW #neowin" +"microsoft","neutral","126554531713650688","Wed Oct 19 07:05:26 +0000 2011","RT @sanjeevgadre: #Ballmer says that #Microsoft dodged a bullet when it failed to acquire #Yahoo in 2008 | Wish 1 of my ex-employers was ..." +"microsoft","neutral","126553189511208960","Wed Oct 19 07:00:06 +0000 2011","#Ballmer says that #Microsoft dodged a bullet when it failed to acquire #Yahoo in 2008 | Wish 1 of my ex-employers was lucky like that 2" +"microsoft","neutral","126552864876269568","Wed Oct 19 06:58:48 +0000 2011","Ballmer Feels Lucky Microsoft Didn't Buy Yahoo in 2008, http://t.co/mPZoilRs #microsoft" +"microsoft","neutral","126552665063829504","Wed Oct 19 06:58:01 +0000 2011","#HP VirtualSystem for #Microsoft is the latest solution from HP & Microsoft. http://t.co/w8IXiboT #hpci" +"microsoft","neutral","126549688857010177","Wed Oct 19 06:46:11 +0000 2011","#Microsoft employees reveal they’re working on next-generation #Xbox http://t.co/XE7RL7a8" +"microsoft","neutral","126548837958557696","Wed Oct 19 06:42:48 +0000 2011","RT @ShawnWildermuth: OMFG: http://t.co/Rit4WJq3 #gates #ballmer #nightAtTheRoxbury #microsoft" +"microsoft","neutral","126545396913815552","Wed Oct 19 06:29:08 +0000 2011","@mdhughes I have the strangest feeling we've done this exact thing before. #arial #microsoft" +"microsoft","neutral","126542911654477824","Wed Oct 19 06:19:15 +0000 2011","Microsoft #Cloud Services, ...federal government appears to have resulted in a decision favoring #Microsoft http://t.co/bz0uabpP" +"microsoft","neutral","126541046615244801","Wed Oct 19 06:11:51 +0000 2011","Any surface touch http://t.co/ymNtCIKG #Microsoft #Technology" +"microsoft","neutral","126538706944401409","Wed Oct 19 06:02:33 +0000 2011","#microsoft #3Q (or 1Q12) #conferencecall 10/20/11 http://t.co/zm9Dye63" +"microsoft","neutral","126537896999124992","Wed Oct 19 05:59:20 +0000 2011","Free Microsoft adCenter Credits http://t.co/OBUxExci #webhostingcoupon #adcenter #credits #free #microsoft" +"microsoft","neutral","126536228538564608","Wed Oct 19 05:52:42 +0000 2011","RT @SQLServer: #Microsoft made many announcements at #SQLPass Summit. Read the blog post for a recap: http://t.co/KSgglnDC #sqlserver #s ..." +"microsoft","neutral","126536172867559424","Wed Oct 19 05:52:29 +0000 2011","#Microsoft excludes tablets from its holiday advertising #push: Fresh details of Microsoft’s… http://t.co/SXFPd9Xj" +"microsoft","neutral","126535951764819968","Wed Oct 19 05:51:36 +0000 2011","Hm... Can't decide if I should like it or not... #Microsoft #OmniTouch http://t.co/k074N0z2" +"microsoft","neutral","126534265205829632","Wed Oct 19 05:44:54 +0000 2011","Amazing that at #microsoft #TechEdAfrica everyone is running iOS phones !" +"microsoft","neutral","126534195777519616","Wed Oct 19 05:44:37 +0000 2011","RT @WesleyBackelant: Marketeer of the Year @ http://t.co/O07MOLI4 Please support @davidmerzel #microsoft" +"microsoft","neutral","126532191831343105","Wed Oct 19 05:36:40 +0000 2011","#Microsoft extends revenue guarantees to Yahoo: Yahoo disclosed today that Microsoft extended… http://t.co/NWLalDdd" +"microsoft","neutral","126528995394199552","Wed Oct 19 05:23:57 +0000 2011","Ballmer On Not Buying #Yahoo: “Sometimes You’re Lucky†http://t.co/7ButjnVO > most interesting part = nascent #Microsoft cloud leadership" +"microsoft","neutral","126528296354725890","Wed Oct 19 05:21:11 +0000 2011","New Microsoft Security Essentials v2.1 Released with updated Antimalware Engine http://t.co/2etgYjEZ #Microsoft #SecurityEssentials #Windows" +"microsoft","neutral","126528197750829056","Wed Oct 19 05:20:47 +0000 2011","#MIcrosoft Launches Reseller RewardHub for Resllers http://t.co/scPUl9Yz" +"microsoft","neutral","126526992609849344","Wed Oct 19 05:16:00 +0000 2011","RT @Maven_Infosoft: #Patent Wars: #Google vs. #Samsung vs. #Apple vs. #Motorola vs. #Microsoft vs. #Oracle - http://t.co/PjxURwrp" +"microsoft","neutral","126525976925585408","Wed Oct 19 05:11:58 +0000 2011","Cyclone II 1.0.2 http://t.co/n6LR23KJ #c++ #microsoft #music" +"microsoft","neutral","126524240324337664","Wed Oct 19 05:05:04 +0000 2011","Zero-day exploits are low in number, but pose big threat, experts say: http://t.co/Tk7i6fw4 #Microsoft #ITsecurity" +"microsoft","neutral","126523034906529793","Wed Oct 19 05:00:16 +0000 2011","Coursey, Forbes - Open Letter: *Steve Ballmer, Please Retire*; http://t.co/1uYbXD0Q #OpenLetter #SteveBallmer #Microsoft" +"microsoft","neutral","126522766261366784","Wed Oct 19 04:59:12 +0000 2011","#Patent Wars: #Google vs. #Samsung vs. #Apple vs. #Motorola vs. #Microsoft vs. #Oracle - http://t.co/PjxURwrp" +"microsoft","neutral","126522655892447233","Wed Oct 19 04:58:46 +0000 2011","#Microsoft #Cloud Microsoft - Capture the Cloud: ... through the Rookie, Veteran, and All-Star tiers provid... http://t.co/tnxYUxvA #TCN" +"microsoft","neutral","126519858035822594","Wed Oct 19 04:47:39 +0000 2011","#Microsoft Natural #Ergonomic #Keyboard 4000 Review http://t.co/blz7dtsy" +"microsoft","neutral","126519595682119681","Wed Oct 19 04:46:36 +0000 2011","@ParadoxicalMike yes... our is under lock and key... we are guarded by the mighty #Microsoft.... unlike #google with it's androids" +"microsoft","neutral","126514963924787201","Wed Oct 19 04:28:12 +0000 2011","The way you can understand +all of the social media +is as the creation of +a new kind of public space. #MICROSOFT" +"microsoft","neutral","126514187647201280","Wed Oct 19 04:25:07 +0000 2011","Should #Microsoft and #Google design their software for mobile devices with two screens?" +"microsoft","neutral","126508567053340672","Wed Oct 19 04:02:47 +0000 2011","RT @neon_ness: Yahoo Profit Drops 26 Per Cent! http://t.co/Jd3X8SGM #yahoo #microsoft" +"microsoft","neutral","126508512930050048","Wed Oct 19 04:02:34 +0000 2011","Save ink by printing certain areas of your doc. Highlight the text, click Print, under Page Range tick ‘Selection’. #microsoft #word #office" +"microsoft","neutral","126507878382174208","Wed Oct 19 04:00:03 +0000 2011","@bsteelooper: I vouched for you in #programmer, #microsoft engineer, and #ethical hacker on Connect.Me http://t.co/HIJqPBso" +"microsoft","neutral","126507753484193792","Wed Oct 19 03:59:33 +0000 2011","Microsoft adCenter Promo Code – Free $25 credit http://t.co/XwC7xvK8 #webhostingcoupon #adcenter #code #credit #free #microsoft #promo" +"microsoft","neutral","126507677919617024","Wed Oct 19 03:59:15 +0000 2011","Yahoo Profit Drops 26 Per Cent! http://t.co/7AdG1Hss #yahoo #microsoft" +"microsoft","neutral","126507292777652224","Wed Oct 19 03:57:43 +0000 2011","Yahoo Profit Drops 26 Per Cent! http://t.co/Jd3X8SGM #yahoo #microsoft" +"microsoft","neutral","126506577946615808","Wed Oct 19 03:54:53 +0000 2011","Testing Windows 8 Developer Preview #microsoft" +"microsoft","neutral","126500912578564097","Wed Oct 19 03:32:22 +0000 2011","U-Village store fits Microsoft consumer strategy http://t.co/OyGo65p1 #microsoft #Technology" +"microsoft","neutral","126500486047207425","Wed Oct 19 03:30:40 +0000 2011","Microsoft Announces Interactive TV For Children’s Games & Story Books #Xbox #Microsoft #family #fb http://t.co/BOCzCmbg" +"microsoft","neutral","126500411522809856","Wed Oct 19 03:30:23 +0000 2011","Microsoft CEO on not http://t.co/veCmSAHQ #Buying #Lucky #Microsoft" +"microsoft","neutral","126499066229170176","Wed Oct 19 03:25:02 +0000 2011","#Reviora can host #Microsoft #Dynamics #SL #MSDYNERP - http://t.co/WFoopX1Y" +"microsoft","neutral","126498215204892672","Wed Oct 19 03:21:39 +0000 2011","New post: Microsoft Windows 7 Home Premium http://t.co/FkruCQIs #Home #Microsoft #Premium" +"microsoft","neutral","126496197220380672","Wed Oct 19 03:13:38 +0000 2011","CEO Ballmer's (non)answer on whether Microsoft will build its own Windows Phone #Ballmer #Microsoft #fb http://t.co/9GuMpF8V" +"microsoft","neutral","126494059987603456","Wed Oct 19 03:05:08 +0000 2011","Congrats to the 5 big #winners of the ""Get certified-Get happy"" #Sweepstakes! EZ log-in>Contests>Winners: http://t.co/396AtoHF #Microsoft" +"microsoft","neutral","126493220279558144","Wed Oct 19 03:01:48 +0000 2011","RT @HPC_Guru: LINQ to #HPC: #Microsoft closes in on delivering its own #Hadoop competitor http://t.co/rsRmJPob #dryad #BigData via @mary ..." +"microsoft","neutral","126488619002236928","Wed Oct 19 02:43:31 +0000 2011","228MB for what is basically a glorified chat client. Oh, Microsoft. #lync #microsoft http://t.co/oktrAysu" +"microsoft","neutral","126487011849809920","Wed Oct 19 02:37:08 +0000 2011","#Job #ICT System Engineer (m/w): planova human capital ag, Luzern http://t.co/2PJo4Dqr #Microsoft #IT" +"microsoft","neutral","126485244651126784","Wed Oct 19 02:30:06 +0000 2011","#Apple vs #Microsoft: Two Opposite Approaches to Building an OS http://t.co/ZAMoDZ9O #Windows8 #OSX #iOS5" +"microsoft","neutral","126484490049687552","Wed Oct 19 02:27:07 +0000 2011","RT @bcopher: Anyone know where I can get a free trial of #microsoft #frontpage for #mac?" +"microsoft","neutral","126483887923793920","Wed Oct 19 02:24:43 +0000 2011","#microsoft makes every surface a touch screen http://t.co/n2GC2gK4 #touch #gesture" +"microsoft","neutral","126483490911952896","Wed Oct 19 02:23:08 +0000 2011","#Microsoft CEO Steve Ballmer on Not #Buying #Yahoo: “Sometimes, You’re Lucky†http://t.co/uyDNZ4r0 #mcommerce #business" +"microsoft","neutral","126483128163373057","Wed Oct 19 02:21:42 +0000 2011","RT @Pocketlint: Ballmer on Nokia: Really looking forward to bunch of new devices next week #nokia #microsoft #wp7 http://t.co/vOH8VM8A" +"microsoft","neutral","126482131412189184","Wed Oct 19 02:17:44 +0000 2011","#Microsoft CEO Steve Ballmer on Not Buying #Yahoo: “Sometimes, You’re Lucky†http://t.co/RI8HUaLD #uncategorized" +"microsoft","neutral","126481856639143936","Wed Oct 19 02:16:39 +0000 2011","#Microsoft Office v. X for Mac [Old Version] Review: Computer Software Plus Education Computer Software Sales http://t.co/4GdWlnNM" +"microsoft","neutral","126481810803793921","Wed Oct 19 02:16:28 +0000 2011","Night at the Roxbury with Gates and Ballmer http://t.co/RSZAL40S #funny #video #gates #ballmer #microsoft" +"microsoft","neutral","126481496147111936","Wed Oct 19 02:15:13 +0000 2011","Mashable! - Microsoft CEO Steve Ballmer on Not Buying Yahoo: “Sometimes, You’re ... #yahoo #microsoft #ballmer http://t.co/tWIzjGBR" +"microsoft","neutral","126480696075227137","Wed Oct 19 02:12:02 +0000 2011","Everything Can Be A Digital Surface Now, Courtesy Microsoft, Carnegie Mellon http://t.co/6S7pKbjI #microsoft #carnegiemellon" +"microsoft","neutral","126479554217910272","Wed Oct 19 02:07:30 +0000 2011","@ShawnWildermuth #gates #ballmer #nightAtTheRoxbury #microsoft That is so wild!" +"microsoft","neutral","126479285702762496","Wed Oct 19 02:06:26 +0000 2011","Everything Can Be A Digital ... http://t.co/fG0wfyWl #microsoft #carnegiemellon" +"microsoft","neutral","126476071238508545","Wed Oct 19 01:53:39 +0000 2011","RT @GP_Jman852: Hello New Followers! Reminder: 1000 followers = a 4000 #Microsoft point #Giveaway! #Xbox360 #XBL #XBLA #MW3 #COD RETWEET!" +"microsoft","neutral","126474577411969024","Wed Oct 19 01:47:43 +0000 2011","Ballmer mocks Android, promises a “bunch of new†Nokia Windows Phones next week [video] http://t.co/euPPhD1w #microsoft #windows" +"microsoft","neutral","126470128589217795","Wed Oct 19 01:30:03 +0000 2011","#HPVirtualSystem for #Microsoft, latest from the $250 million I2A initiative #HPCI #cloudcomputing http://t.co/yx1R3WsX" +"microsoft","neutral","126467278144413696","Wed Oct 19 01:18:43 +0000 2011","#Microsoft shows 'touch screen' for any surface | Nanotech - The Circuits Blog - CNET News http://t.co/qwhoCf5T via @cnet" +"microsoft","irrelevant","126807138587783168","Wed Oct 19 23:49:12 +0000 2011","ZappTek iPDA v4.5.1 http://t.co/JBs0dUyq #ipad #microsoft #mp4" +"microsoft","irrelevant","126806596662726660","Wed Oct 19 23:47:03 +0000 2011","Saints Row: The Third and doughnuts for dinner http://t.co/tZm4fD4w #microsoft #pc" +"microsoft","irrelevant","126806460146524160","Wed Oct 19 23:46:30 +0000 2011","RT @slashdotjp: http://t.co/zK1dtFiS #マイクロソフト #microsoft Microsoftã€Skype ã®è²·åŽã‚’完了" +"microsoft","irrelevant","126801012852916224","Wed Oct 19 23:24:51 +0000 2011","#Noti #Bolivia : #Microsoft lanza sitio oficial de #Skype http://t.co/V1WeJ6z6" +"microsoft","irrelevant","126800980783271936","Wed Oct 19 23:24:44 +0000 2011","#Noticias : #Microsoft lanza sitio oficial de #Skype http://t.co/v5fAsqId" +"microsoft","irrelevant","126800969521577984","Wed Oct 19 23:24:41 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/arqlW58x desde #Bolivia" +"microsoft","irrelevant","126800228560019456","Wed Oct 19 23:21:44 +0000 2011","RT @BarbieGeek: #Wordperfect #Windows_95 #Microsoft Bill Gates llamado a declarar por un antiguo caso de monopolio contra Microsoft http ..." +"microsoft","irrelevant","126798811262763009","Wed Oct 19 23:16:07 +0000 2011","Fico impressionado como qualquer software #microsoft fode com meu #Mac" +"microsoft","irrelevant","126798048289488896","Wed Oct 19 23:13:05 +0000 2011","RT @codebit_org: Halo: Combat Evolved Anniversary #Microsoft explica cómo funcionarán el remake de #Halo y #Kinect http://t.co/moc3ofex" +"microsoft","irrelevant","126796467213058048","Wed Oct 19 23:06:48 +0000 2011","malisimo el intento de google maps de #microsoft" +"microsoft","irrelevant","126794558456610816","Wed Oct 19 22:59:13 +0000 2011","http://t.co/h1IH7FN6 Ù…Ø§ÙŠÙƒØ±ÙˆØ³ÙˆÙØª تقوم بتطوير تقنية تمكنك من استخدام يدك كهات٠+ +#Technology #Microsoft #NeW" +"microsoft","irrelevant","126794506266882048","Wed Oct 19 22:59:00 +0000 2011","Halo: Combat Evolved Anniversary #Microsoft explica cómo funcionarán el remake de #Halo y #Kinect http://t.co/moc3ofex" +"microsoft","irrelevant","126794057979670529","Wed Oct 19 22:57:13 +0000 2011","Los #DRM están construidos en el corazón de #Windows 7 » http://t.co/mBnHIHEk #microsoft #monopolio" +"microsoft","irrelevant","126792436176531456","Wed Oct 19 22:50:47 +0000 2011","#OmniTouch! me ha encantado #mobile #tactil #microsoft" +"microsoft","irrelevant","126792221730148353","Wed Oct 19 22:49:56 +0000 2011","Сижу Ñмотрю ""Живую Ñталь"", и тут внезапно... Ñ Ð²Ð¸Ð¶Ñƒ рекламу #microsoft http://t.co/vRfOSqtB" +"microsoft","irrelevant","126792156852666369","Wed Oct 19 22:49:40 +0000 2011","@juanda95 Así como creen que las computadoras solo ""funcionan"" con #Microsoft #Windows » http://t.co/6JQOz2C0" +"microsoft","irrelevant","126789413706543104","Wed Oct 19 22:38:46 +0000 2011","Cracking the Code: Using QR Codes to Engage Your Customers on Their Smartphones #smb #smallbiz http://t.co/IL1u0v8v #microsoft" +"microsoft","irrelevant","126788967692648448","Wed Oct 19 22:37:00 +0000 2011","Prototipo #Microsoft que convierte cualquier superficie en táctil.. asombroso... http://t.co/MltAwZA0" +"microsoft","irrelevant","126788442356064256","Wed Oct 19 22:34:54 +0000 2011","#Microsoft, war lang still um dich: Unified Extensible Firmware Interface #UEFI http://t.co/AdGfbm1V" +"microsoft","irrelevant","126787343704260608","Wed Oct 19 22:30:32 +0000 2011","Pelado con la tecnologia iria a pegarle una piña a #BillGates. Odio a #Microsoft." +"microsoft","irrelevant","126787220022640640","Wed Oct 19 22:30:03 +0000 2011","Analistas preven un buen inicio de año fiscal para #Microsoft - ABC.es : http://t.co/f5StriT6" +"microsoft","irrelevant","126787152708255744","Wed Oct 19 22:29:47 +0000 2011","rilasciata la CTP di Roslyn, l'API ""compiler as a service"" per C# e VB http://t.co/EfGDBBUW #microsoft #roslyn #ctp" +"microsoft","irrelevant","126786658866700289","Wed Oct 19 22:27:49 +0000 2011","Vous me demandiez comment obtenir une #certification #Virtualisation Serveur #Microsoft ? Si vous avez des questions - http://t.co/fC5LFiCL" +"microsoft","irrelevant","126786402837995521","Wed Oct 19 22:26:48 +0000 2011","Saints Row: The Third and doughnuts for dinner http://t.co/TYP67KqF #microsoft #pc #playstation #ps3" +"microsoft","irrelevant","126785814876274688","Wed Oct 19 22:24:28 +0000 2011","Equipo altamente #certificado por #Cisco y #Microsoft" +"microsoft","irrelevant","126785710920441858","Wed Oct 19 22:24:03 +0000 2011","RT @e2na: Ù…Ø§ÙŠÙƒØ±ÙˆØ³ÙˆÙØª غيرت العالم أكثر من أبل | عالم التقنية http://t.co/VXcV9cPQ +الإنصا٠ÙÙŠ هذا المقال جميل :) +#Microsoft #Apple" +"microsoft","irrelevant","126785377863340033","Wed Oct 19 22:22:44 +0000 2011","""Il colpo di genio è stato iTunes non l'iPod in sé"". Quello che #Google e #Samsung non possono capire, tanto meno #Microsoft #sapevatelo" +"microsoft","irrelevant","126785355310579712","Wed Oct 19 22:22:38 +0000 2011","#microsoft inizia a creare un ponte tra il digitale e la fisica. Incredibile. http://t.co/9yZ1vU5f" +"microsoft","irrelevant","126784813066756096","Wed Oct 19 22:20:29 +0000 2011","#Job #ICT IT-Supporter/-in 50%: Universität Zürich, Zürich http://t.co/IzFSsjgo #Microsoft #IT" +"microsoft","irrelevant","126783417152053248","Wed Oct 19 22:14:56 +0000 2011","Microsoft Security Essentialã®å®šç¾©ãƒ•ァイルãŒã‚¢ãƒƒãƒ—デートã—ã¾ã—ãŸï¼š1.115.131.0 > http://t.co/8K7uMmFZ #Microsoft" +"microsoft","irrelevant","126782732373196800","Wed Oct 19 22:12:13 +0000 2011","TypeBuilder be gone, Roslyn CTP is here! Creating types without TypeBuilder! http://t.co/cilZFP9B #Roslyn #Microsoft" +"microsoft","irrelevant","126781181466378240","Wed Oct 19 22:06:03 +0000 2011","#Microsoft: Una pantalla táctil al alcance de tu mano. http://t.co/kUAfBenM" +"microsoft","irrelevant","126780041857216512","Wed Oct 19 22:01:32 +0000 2011","Die #Apple Story heute Abend in #DOK auf #SF1 war ziemlich beeindruckend R.I.P. #Stevejobs - doch wo wäre Apple heute ohne #Microsoft ?" +"microsoft","irrelevant","126779541720010752","Wed Oct 19 21:59:32 +0000 2011","#sipcom supporting #arkadin at #ipexpo showing the advantages of #hosted #lync2010 for #enterprises #ucoms #MSFTnews #Microsoft #ucexpo #in" +"microsoft","irrelevant","126779286458871809","Wed Oct 19 21:58:31 +0000 2011","[HighTech] Microsoft PocketTouch - Multitouch Através de Tecido http://t.co/eLqI5VAB #touch #pockettouch #microsoft" +"microsoft","irrelevant","126777498716479488","Wed Oct 19 21:51:25 +0000 2011","#Skype devient officiellement une division de #Microsoft" +"microsoft","irrelevant","126775712525979648","Wed Oct 19 21:44:19 +0000 2011","#Microsoft Please check out or site and join please. http://t.co/dRR4oq1Z we need more ppl maybe u can help" +"microsoft","irrelevant","126775015021625346","Wed Oct 19 21:41:33 +0000 2011","Qué poco me gusta el botón ""Me Gusta"" http://t.co/TDZ8z1uu #web #microsoft #tecnologia #web" +"microsoft","irrelevant","126775003927678976","Wed Oct 19 21:41:30 +0000 2011","MAGIX ##Web Designer, edición visual de páginas Web http://t.co/ycafF43r #microsoft #tecnologia" +"microsoft","irrelevant","126775002082189312","Wed Oct 19 21:41:30 +0000 2011","Dropbox, el servicio de almacenamiento en la nube, por las nubes http://t.co/QJ73xjlE #web #microsoft #tecnologia" +"microsoft","irrelevant","126774982935187456","Wed Oct 19 21:41:25 +0000 2011","RT @raZcacielo: #Bill_Gates llamado a declarar por un antiguo caso de monopolio contra #Microsoft http://t.co/DZ9ZqCgX" +"microsoft","irrelevant","126774793746907137","Wed Oct 19 21:40:40 +0000 2011","Ford edge! Muy bonito 38k pero el sync ke usa es #microsoft para los cells! usara windows update? #panama http://t.co/HEJ5W0YK" +"microsoft","irrelevant","126774646715580416","Wed Oct 19 21:40:05 +0000 2011","Bill Gates llamado a declarar por un antiguo caso de... http://t.co/JReEpHNr #Wordperfect #Windows_95 #Microsoft #Novell #Bill_Gates" +"microsoft","irrelevant","126774646690414593","Wed Oct 19 21:40:05 +0000 2011","Bill Gates llamado a declarar por un antiguo caso de... http://t.co/wFrUi1nN #Wordperfect #Windows_95 #Microsoft #Novell #Bill_Gates" +"microsoft","irrelevant","126774646568783872","Wed Oct 19 21:40:05 +0000 2011","#Wordperfect #Windows_95 #Microsoft Bill Gates llamado a declarar por un antiguo caso de monopolio contra Microsoft http://t.co/dyOyrkYD" +"microsoft","irrelevant","126774645474070528","Wed Oct 19 21:40:05 +0000 2011","#Bill_Gates llamado a declarar por un antiguo caso de monopolio contra #Microsoft http://t.co/DZ9ZqCgX" +"microsoft","irrelevant","126774641044897793","Wed Oct 19 21:40:04 +0000 2011","#Bill_Gates llamado a declarar por un antiguo caso de monopolio contra #Microsoft http://t.co/JVPtsfzz" +"microsoft","irrelevant","126773569345699840","Wed Oct 19 21:35:48 +0000 2011","Nu har #Microsoft öppnat #Mango-kranen helt! http://t.co/0ZbZlMwo via @windowsblog" +"microsoft","irrelevant","126773055816085504","Wed Oct 19 21:33:46 +0000 2011","#Termine am 20. Oktober ++ #ADVA, #Nokia und #Microsoft öffnen ihre Bücher http://t.co/aoZoNUKf" +"microsoft","irrelevant","126771371954020353","Wed Oct 19 21:27:05 +0000 2011","@LorPan87 non per nulla l'ha comprato la #microsoft..." +"microsoft","irrelevant","126771131884638208","Wed Oct 19 21:26:07 +0000 2011","#Microsoft CEO Steve Ballmer anatusi Android!!!! aiii http://t.co/vO9mf1g5" +"microsoft","irrelevant","126770870709534720","Wed Oct 19 21:25:05 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/QVrWRIyd" +"microsoft","irrelevant","126770869254094849","Wed Oct 19 21:25:05 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/DdKFEWQb visto en #Bolivia" +"microsoft","irrelevant","126770866435530752","Wed Oct 19 21:25:04 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/zL951yca" +"microsoft","irrelevant","126769830568280065","Wed Oct 19 21:20:57 +0000 2011","Feliz por mi nombramiento #Microsoft Student Partner Nicaragua :D a trabajar por el desarrollo tecnológico de nuestra patria Nicaragua #fb" +"microsoft","irrelevant","126769633301774336","Wed Oct 19 21:20:10 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/FZeM9Na4" +"microsoft","irrelevant","126769627484274688","Wed Oct 19 21:20:09 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/sd1XqwSp" +"microsoft","irrelevant","126769625659748353","Wed Oct 19 21:20:08 +0000 2011","#Techno : #Microsoft lanza sitio oficial de #Skype http://t.co/ynJoTWPI" +"microsoft","irrelevant","126769625626198016","Wed Oct 19 21:20:08 +0000 2011","#Tecnologias : #Microsoft lanza sitio oficial de #Skype http://t.co/5Xmgk7Ul" +"microsoft","irrelevant","126769625420673024","Wed Oct 19 21:20:08 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/1WDCJldZ" +"microsoft","irrelevant","126769623591944192","Wed Oct 19 21:20:08 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/SioUrqUi desde #Tegnologia" +"microsoft","irrelevant","126769621394137088","Wed Oct 19 21:20:07 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/pl3kPDla" +"microsoft","irrelevant","126769618458120192","Wed Oct 19 21:20:06 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/07OjctNj" +"microsoft","irrelevant","126769617342435329","Wed Oct 19 21:20:06 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/Ra22ZJb7 desde #Tegnologia" +"microsoft","irrelevant","126769617057234944","Wed Oct 19 21:20:06 +0000 2011","#Tecnologias : #Microsoft lanza sitio oficial de #Skype http://t.co/3WNfMQNw" +"microsoft","irrelevant","126769617019486209","Wed Oct 19 21:20:06 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/fWDALlIe" +"microsoft","irrelevant","126769616826531840","Wed Oct 19 21:20:06 +0000 2011","#Techno : #Microsoft lanza sitio oficial de #Skype http://t.co/oQ2JbPS3" +"microsoft","irrelevant","126769452967669761","Wed Oct 19 21:19:27 +0000 2011","#Microsoft lanza sitio oficial de #Skype http://t.co/c3kGqpyR #JeryTecnologias #tecnología" +"microsoft","irrelevant","126768905216720896","Wed Oct 19 21:17:16 +0000 2011","RT @ShariPaul_Kofax: @tomcastiglia Great job today! Engaging presentations from @HersheyTech #Kofax and @Collabware at the #Microsoft T ..." +"microsoft","irrelevant","126768623472750592","Wed Oct 19 21:16:09 +0000 2011","#Microsoft Infografía: 340 millones de PC usan un explorador obsoleto y están en riesgo http://t.co/0ScN623o ➨ @MicrosoftLatam" +"microsoft","irrelevant","126768380945502208","Wed Oct 19 21:15:11 +0000 2011","#TaskCentre XML Webservice Tool LinkedIn discussion. http://t.co/3GWw2CXU #SAPB1 #SalesLogix #Microsoft" +"microsoft","irrelevant","126767886814543872","Wed Oct 19 21:13:14 +0000 2011","Zou #microsoft vrij van rancune zijn? Hier de reactie op de populaire Androidtel. http://t.co/TeTN3ULl" +"microsoft","irrelevant","126767288652271617","Wed Oct 19 21:10:51 +0000 2011","@tomcastiglia Great job today! Engaging presentations from @HersheyTech #Kofax and @Collabware at the #Microsoft Tech Center in Irvine." +"microsoft","irrelevant","126767180112076800","Wed Oct 19 21:10:25 +0000 2011","#microsoft Steve Ballmer, CEO da Microsoft, critica usabilidade do Android http://t.co/OzC9ncs1 #windows #tecnologia" +"microsoft","irrelevant","126765812534099969","Wed Oct 19 21:04:59 +0000 2011","Weer een klant geholpen met een Microsoft Lync implementatie voor 110 gebruikers #lync #ocs #microsoft" +"microsoft","irrelevant","126765752928829441","Wed Oct 19 21:04:45 +0000 2011","Lets play a game http://t.co/3qNIv3Nx #ie #microsoft #useragent #yourbrowsermatters @radiotux @webstandardat @heiseonline @ChrisZwitschert" +"microsoft","irrelevant","126765300904505344","Wed Oct 19 21:02:57 +0000 2011","http://t.co/ONI0JX8B +#Appleagagné #Apple versus #microsoft" +"microsoft","irrelevant","126764864197758976","Wed Oct 19 21:01:13 +0000 2011","RT @packard_bell: #SteveBallmer, CEO de #Microsoft, dice que se necesita ser un cientifico para manejar #Android http://t.co/W03j7hTA" +"microsoft","irrelevant","126764589835759616","Wed Oct 19 21:00:08 +0000 2011","Regístrate para asistir este 8 de noviembre al lanzamiento de #Microsoft Dynamics AX 2012 http://t.co/txHEoInE" +"microsoft","irrelevant","126764578695680000","Wed Oct 19 21:00:05 +0000 2011","#SteveBallmer, CEO de #Microsoft, dice que se necesita ser un cientifico para manejar #Android http://t.co/W03j7hTA" +"microsoft","irrelevant","126764341784625152","Wed Oct 19 20:59:08 +0000 2011","#Microsoft ¿Eres uno de los millones de usuarios en r#iesgo de sufrir un ataque de #malware de... http://t.co/eJkelj3l ➨ @MicrosoftLatam" +"microsoft","irrelevant","126763862409232384","Wed Oct 19 20:57:14 +0000 2011","Tem acompanhando o #MSPTechDay ?Então não perca no próx sáb 22/10 na sede da #Microsoft palestra sobre #WP7 Inscreva-se http://t.co/zNpyaD5F" +"microsoft","irrelevant","126763321541148672","Wed Oct 19 20:55:05 +0000 2011","RT @jtrezza: ¿Recuerdan que #Apple había comprado el dominio html5 .com? http://t.co/qb0pix09 pues ahora es de #Microsoft :O ¿Qué se tra ..." +"microsoft","irrelevant","126762153012236288","Wed Oct 19 20:50:27 +0000 2011","@slecluyse In iedere lesplaats komen er @bluescreens #Microsoft" +"microsoft","irrelevant","126762113980055552","Wed Oct 19 20:50:17 +0000 2011","¿Recuerdan que #Apple había comprado el dominio html5 .com? http://t.co/qb0pix09 pues ahora es de #Microsoft :O ¿Qué se traen estos?" +"microsoft","irrelevant","126761498885361664","Wed Oct 19 20:47:51 +0000 2011","RT @Brittjje: Morgen maak ik bekend wie een WindowsPhone7.5 krijgt, is ook de laatste dag dat je live mijn telefoon ziet detelefoonvanbr ..." +"microsoft","irrelevant","126761427343130624","Wed Oct 19 20:47:34 +0000 2011","@akirareiko ¿Viste que ya html5.com no es de #Apple sino de #Microsoft?" +"microsoft","irrelevant","126761080541290496","Wed Oct 19 20:46:11 +0000 2011","Ben ik de enige met serieuze bedenkingen? #Howest wordt proefschool (konijn) voor #Microsoft Office 365 http://t.co/s2GGPb0G" +"microsoft","irrelevant","126760617800515584","Wed Oct 19 20:44:21 +0000 2011","XBLA in Brief: Bejeweled 3 http://t.co/HLVuTyCa #bejeweled_3 #microsoft" +"microsoft","irrelevant","126759986780057600","Wed Oct 19 20:41:50 +0000 2011","RT @SociosMicrosoft: Director de #Microsoft admite que fue afortunado el escape de la compra de Yahoo! http://t.co/ZX3dLElI" +"microsoft","irrelevant","126759818718482432","Wed Oct 19 20:41:10 +0000 2011","#Microsoft adquiere #Skype por un monto $8500 millones - http://t.co/jZr2WTCE" +"microsoft","irrelevant","126759115488903168","Wed Oct 19 20:38:22 +0000 2011","RT @MichelEnLaRed: #linux / #ubuntu - #Windows / #microsoft » http://t.co/TteunwMj !!Que gran diferencia!! -/" +"microsoft","irrelevant","126758582631927809","Wed Oct 19 20:36:15 +0000 2011","realmente indispensável nos dias de hoje os serviços corporativos oferecidos pelo #SKYPE. #Fato #TI #Microsoft" +"microsoft","irrelevant","126757586174345216","Wed Oct 19 20:32:18 +0000 2011","Google adds default end-to-end encryption to search http://t.co/VdhjkydN via @regvulture. Ik zeg: goed bezig! Jammer dat #microsoft achter" +"microsoft","irrelevant","126756934937350144","Wed Oct 19 20:29:42 +0000 2011","#linux / #ubuntu - #Windows / #microsoft » http://t.co/TteunwMj !!Que gran diferencia!! -/" +"microsoft","irrelevant","126756701855682560","Wed Oct 19 20:28:47 +0000 2011","Ich glaube, ich habe mich gut auf die Prüfung morgen vorbereitet. #Microsoft #MCITP #HyperV" +"microsoft","irrelevant","126756084961652736","Wed Oct 19 20:26:20 +0000 2011","@uniquegens Ñ Ð¼ÐµÑ‡Ñ‚Ð°ÑŽ однажды такое Ñказать про #Microsoft" +"microsoft","irrelevant","126755997975977984","Wed Oct 19 20:25:59 +0000 2011","Warum steht bei @anked eigentlich ""ehemalige #Microsoft Managerin"" und nicht ""Zukünftige #Piraten Stadträtin""? #AnneWill" +"microsoft","irrelevant","126755662792364032","Wed Oct 19 20:24:39 +0000 2011","Steve Ballmer, CEO van #Microsoft haalt uit naar #Android. Ach, hij moet toch wat, niemand koopt zijn #phone7... http://t.co/mPpEi6ul" +"microsoft","irrelevant","126754698102439936","Wed Oct 19 20:20:49 +0000 2011","Vous rappelez vous l'époque du "" Gros Méchant #Microsoft ""? http://t.co/7bSGGCO0" +"microsoft","irrelevant","126754357671772160","Wed Oct 19 20:19:28 +0000 2011","Un pa$o más y @3devBC será el primer #Microsoft Partner Gold en Application Lifecycle Management #ALM del país :) #WIN" +"microsoft","irrelevant","126754317775552513","Wed Oct 19 20:19:18 +0000 2011","#Microsoft Engineer (VM Ware), 19/10/2011, #Vacatures, #Carrière. +http://t.co/Jct2ahOe + + +Bedrijfsprofiel: Microsoft Engineer + +Microsoft" +"microsoft","irrelevant","126753938098761728","Wed Oct 19 20:17:48 +0000 2011","#Tecnologia #Technology: La visión de #Microsoft Research sobre el control táctil de dispositivos +http://t.co/ZXwmaTxn" +"microsoft","irrelevant","126753907794919424","Wed Oct 19 20:17:41 +0000 2011","Steve Ballmer haalt uit naar #Android: http://t.co/jZWofwYc + #Microsoft haalt uit omdat zelf falen op mobiele markt?" +"microsoft","irrelevant","126752731380719616","Wed Oct 19 20:13:00 +0000 2011","#Home #Depot Gives #DIY Shoppers More Tools to Get the Job Done http://t.co/K4zeZSin #Distribution #VMI #homedepot #Microsoft #WindowsPhone" +"microsoft","irrelevant","126752590162698241","Wed Oct 19 20:12:27 +0000 2011","RT @DonChambitas: Busco más Desarrolladores C# con experiencia también en #Testing para proyecto en #Microsoft http://t.co/tNWl0OYu #net ..." +"microsoft","irrelevant","126752126880858112","Wed Oct 19 20:10:36 +0000 2011","RT @DonChambitas: Busco más Desarrolladores C# con experiencia también en #Testing para proyecto en #Microsoft http://t.co/IHKhqUQ8 #net ..." +"microsoft","irrelevant","126751890150137856","Wed Oct 19 20:09:40 +0000 2011","Busco más Desarrolladores C# con experiencia también en #Testing para proyecto en #Microsoft http://t.co/IHKhqUQ8 #net #RRHH #empleo" +"microsoft","irrelevant","126751116061974528","Wed Oct 19 20:06:35 +0000 2011","#yahoo #microsoft #Noticias Steve Ballmer da gracias al cielo porque Microsoft no compró Yahoo en 2008 http://t.co/j7hKabZB" +"microsoft","irrelevant","126749587133308928","Wed Oct 19 20:00:31 +0000 2011","Director de #Microsoft admite que fue afortunado el escape de la compra de Yahoo! http://t.co/ZX3dLElI" +"microsoft","irrelevant","126746850106675200","Wed Oct 19 19:49:38 +0000 2011","RT: RT @EvelineHorbach: Terug van evaluatie learning circles bij #microsoft onder bezielend leiderschap van… http://t.co/apo4UNhb" +"microsoft","irrelevant","126745825283031040","Wed Oct 19 19:45:34 +0000 2011","Quieres un buen antivirus para tu Compu? Aquí esta la solución: http://t.co/lNmMiWaZ Antivirus de #Microsoft" +"microsoft","irrelevant","126745438136176640","Wed Oct 19 19:44:01 +0000 2011","Temas más candentes de la semana pasada #BuzzworthyEnEspañol » http://t.co/Yn34NGed #Microsoft #BlackBerry #Skype #Starbucks" +"microsoft","irrelevant","126744626974564352","Wed Oct 19 19:40:48 +0000 2011","RT @kenbellezen: Vers 1e +grde accessibilité o dernier outil #IT. C la new orientation stratégique ke prend #Microsoft #Maroc http://t.c ..." +"microsoft","irrelevant","126743570689429504","Wed Oct 19 19:36:36 +0000 2011","RT @diegocambiaso: #Microsoft en el futuro: Kinect y su tecnología avanzada http://t.co/kjiGyuFr" +"microsoft","irrelevant","126743504675282944","Wed Oct 19 19:36:20 +0000 2011","Microsoft - Hardware, XBOX Produkte, Spiele und Software http://t.co/qWaNQyCF Profitieren Sie von den zahlreichen Vorte #Hardware #Microsoft" +"microsoft","irrelevant","126743035903094785","Wed Oct 19 19:34:29 +0000 2011","Vers 1e +grde accessibilité o dernier outil #IT. C la new orientation stratégique ke prend #Microsoft #Maroc http://t.co/C864evTj +#twittoma" +"microsoft","irrelevant","126742038048804866","Wed Oct 19 19:30:31 +0000 2011","RT @JohanBlok: #Smartsite prijsmodel voor #Microsoft #Azure cloud afgerond. Volgende week eerste site live in the cloud?" +"microsoft","irrelevant","126741924446076928","Wed Oct 19 19:30:04 +0000 2011","قال الرئيس التنÙيذي لشركة #Microsoft ستي٠بالمر اليوم ÙÙŠ مؤتمر Web 2.0 ان اجهزة الأندرويد اجهزه ممله واضا٠+= +#Android_ar #Tech_ar" +"microsoft","irrelevant","126741919500992512","Wed Oct 19 19:30:03 +0000 2011","#Microsoft en el futuro: Kinect y su tecnología avanzada http://t.co/kjiGyuFr" +"microsoft","irrelevant","126740492540055552","Wed Oct 19 19:24:22 +0000 2011","Bonsoir à toutes & Tous, voici le liien vers la rubrique #Microsoft du Paper d'aujourd'hui : Des Trucs Impressionnant : http://t.co/qHoFOxzD" +"microsoft","irrelevant","126738342900539392","Wed Oct 19 19:15:50 +0000 2011","RT @marcoagnoli: Domani a #SMAU con #Microsoft si parla di HTML5, #Kinect SDK, #WebMatrix, Private #Cloud e ci sono i lab su #Azure, htt ..." +"microsoft","irrelevant","126737787335606273","Wed Oct 19 19:13:37 +0000 2011","Ahora entiendo porque #microsoft te pide estar conectado a la luz con ServicePack, con lo que se tardan no hay batería que los aguante" +"microsoft","irrelevant","126736841872388096","Wed Oct 19 19:09:52 +0000 2011","RT @robertodesign: #SMAU Milano & #I9 domani un posto in prima fila!Vi aspetto 11:30 area sessioni tecniche #Microsoft stay tunef!" +"microsoft","irrelevant","126736717939093504","Wed Oct 19 19:09:22 +0000 2011","#Microsoft Ã¥tnjuter ju, trots allt, fortfarande en liten gnutta respekt i branschen. SÃ¥ varför är Steve Ballmer kvar?" +"microsoft","irrelevant","126736619742035968","Wed Oct 19 19:08:59 +0000 2011","RT @ZiOu333: Et Sony se fait encore pirater !! C'est la fête du slip chez eux. Faudrai peut-être acheter un pare-feu ?! #Sony Vive #Micr ..." +"microsoft","irrelevant","126736441379274752","Wed Oct 19 19:08:16 +0000 2011","http://t.co/sb8geEVB aÄŸzına sıçmaya geliyoruz #apple ve #microsoft" +"microsoft","irrelevant","126734707038756864","Wed Oct 19 19:01:23 +0000 2011","Necesito editar un archivo y no puedo, solución #Microsoft, no lo edites que estupides" +"microsoft","irrelevant","126734622263476224","Wed Oct 19 19:01:03 +0000 2011","Steve Balmer a beau être complètement barge, il a parfois des discours qui font mouche. Ici, sur le #Cloud : http://t.co/AHM1vZUg #Microsoft" +"microsoft","irrelevant","126734381309108224","Wed Oct 19 19:00:05 +0000 2011","Conector de redes sociales de #Microsoft #Outlook http://t.co/BPgPq45t" +"microsoft","irrelevant","126734290850557952","Wed Oct 19 18:59:44 +0000 2011","RT @ZekiBildirici: #pardus SecureBoot'a Karşı Duruyoruz!: http://t.co/Uiwn6fgQ #fsf #secureboot #linux #microsoft" +"microsoft","irrelevant","126733828000722944","Wed Oct 19 18:57:53 +0000 2011","RT @EvelineHorbach: Terug van evaluatie learning circles bij #microsoft onder bezielend leiderschap van @saskianijs. Veel geleerd van de ..." +"microsoft","irrelevant","126733528758095872","Wed Oct 19 18:56:42 +0000 2011","@mcastanedac ya quítale esa mugrera de #Microsoft OS. Vamos a dejarla limpia y andando, aprovecha el nuevo #Ubuntu que jala hasta en cels :)" +"microsoft","irrelevant","126733262822440960","Wed Oct 19 18:55:39 +0000 2011","@LucindaDouglas Ja mooi he Lucinda, dat doet #Microsoft goed. Blijk van #waardering en daarmee #verwachtingovertreffen" +"microsoft","irrelevant","126732894092791808","Wed Oct 19 18:54:11 +0000 2011","En full capacitación de soportes en #Microsoft Advertising, muy entretenida!!!" +"microsoft","irrelevant","126732424460767233","Wed Oct 19 18:52:19 +0000 2011","#Microsoft entdeckt den Verbraucher | @TechFieber | Smart Tech News. Hot Gadgets. http://t.co/CrQ86Fvu" +"microsoft","irrelevant","126731601584455682","Wed Oct 19 18:49:03 +0000 2011","#ALG Iniciamos formación #Microsoft M-6292. Éxitos a los participantes! El conocimiento crece cuando se comparte. www.alg.net.ve" +"microsoft","irrelevant","126731253490794496","Wed Oct 19 18:47:40 +0000 2011","#ALG Culminando formación en #Microsoft M-2778 y en #IndicadoresDeGestión. Gracias a nuestros clientes por su preferencia. www.alg.net.ve" +"microsoft","irrelevant","126731136318713856","Wed Oct 19 18:47:12 +0000 2011","RT @RoodProjecten: Leuk, ik ontving net een mooi boeket bloemen thuis van #Microsoft, als dank voor mijn bijdrage en waardevolle input. ..." +"microsoft","irrelevant","126730979485302784","Wed Oct 19 18:46:34 +0000 2011","Leuk, ik ontving net een mooi boeket bloemen thuis van #Microsoft, als dank voor mijn bijdrage en waardevolle input. #CareToShareKnowledge" +"microsoft","irrelevant","126726825505329152","Wed Oct 19 18:30:04 +0000 2011","#Microsoft verlängert Umsatzgarantien für Suchpartner #Yahoo - ZDNet.de : http://t.co/H3dseq5h" +"microsoft","irrelevant","126726459246129152","Wed Oct 19 18:28:37 +0000 2011","""@TwitGrap: #BREKEND: #Microsoft en #Linux nu beiden in de aanbieding! http://t.co/7sRtk4gy""" +"microsoft","irrelevant","126726358800936960","Wed Oct 19 18:28:13 +0000 2011","Une seconde démo pour Sonic Generations !: Bien que Sonic Generations ne soit plus très loin... http://t.co/to7a2Eih #Microsoft #Xbox360" +"microsoft","irrelevant","126726063484178432","Wed Oct 19 18:27:02 +0000 2011","Этому багу в #Word уже более 8 лет! Как не Ñтыдно, #Microsoft? http://t.co/hIgyYAvY" +"microsoft","irrelevant","126725627758915584","Wed Oct 19 18:25:18 +0000 2011","@Brittjje Als je met deze foon ook de hyvesspelletjes kunt, wil ik wel een winnaar zijn. #brit #microsoft" +"microsoft","irrelevant","126725465611304960","Wed Oct 19 18:24:40 +0000 2011","RT“@JohanBlok: #Smartsite prijsmodel voor #Microsoft #Azure cloud afgerond. Volgende week eerste site live in the cloud?†#in" +"microsoft","irrelevant","126724685315579904","Wed Oct 19 18:21:34 +0000 2011","RÄdÄs,ka #Android ir gÄjis #Microsoft pÄ“dÄs,veicot nelielu lÄ«kumu caur pakaļu." +"microsoft","irrelevant","126724454649839616","Wed Oct 19 18:20:39 +0000 2011","Morgen maak ik bekend wie een WindowsPhone7.5 krijgt, is ook de laatste dag dat je live mijn telefoon ziet detelefoonvanbritt.nl #MICROSOFT" +"microsoft","irrelevant","126724248256528385","Wed Oct 19 18:19:49 +0000 2011","EIndelijk, #Google Ice Cream Sandwich. Goodbye #Microsoft and #Apple!" +"microsoft","irrelevant","126723588324737024","Wed Oct 19 18:17:12 +0000 2011","RT @pimpjuicewow: 2pi ou pèdi onè ak respè ou so ou pap janm jwn li ankor kelkeswa app ou genyen an #Mac #Android e #Microsoft c menm ja ..." +"microsoft","irrelevant","126723323471204352","Wed Oct 19 18:16:09 +0000 2011","RT @cmaneu: #stage #Toulouse #Microsoft #net #SuperBoîte #Win8 #geeks = http://t.co/kWfptH4P (Please RT ;)" +"microsoft","irrelevant","126723066528153600","Wed Oct 19 18:15:08 +0000 2011","Terug van evaluatie learning circles bij #microsoft onder bezielend leiderschap van @saskianijs. Veel geleerd van deze co-creatie!" +"microsoft","irrelevant","126722796599521281","Wed Oct 19 18:14:03 +0000 2011","#Microsoft #OmniTouch: Convirtiendo cualquier superficie en una “pantalla táctil†http://t.co/7FvZ0IIq" +"microsoft","irrelevant","126722505128935424","Wed Oct 19 18:12:54 +0000 2011","2pi ou pèdi onè ak respè ou so ou pap janm jwn li ankor kelkeswa app ou genyen an #Mac #Android e #Microsoft c menm jan ak Vijinite'w #manzè" +"microsoft","irrelevant","126721828432519170","Wed Oct 19 18:10:12 +0000 2011","http://t.co/JVidt6U4 #Free #Software #Foundation (#FSF) #startet eine #Kampagne gegen die #Funktion #Windows8 #SecureBoot (#UEFI) #Microsoft" +"microsoft","irrelevant","126719767590604801","Wed Oct 19 18:02:01 +0000 2011","La visión de #Microsoft Research sobre el control táctil de dispositivos. http://t.co/zjlsx90P" +"microsoft","irrelevant","126719569179054080","Wed Oct 19 18:01:14 +0000 2011","#cuentaLaLeyenda que #microsoft llego a servir. Ah no! Eso nunca llego a ocurrir." +"microsoft","irrelevant","126719029921579008","Wed Oct 19 17:59:05 +0000 2011","#Smartsite prijsmodel voor #Microsoft #Azure cloud afgerond. Volgende week eerste site live in the cloud?" +"microsoft","irrelevant","126717715657396224","Wed Oct 19 17:53:52 +0000 2011","Domani a #SMAU con #Microsoft si parla di HTML5, #Kinect SDK, #WebMatrix, Private #Cloud e ci sono i lab su #Azure, http://t.co/Et5Lw9uF" +"microsoft","irrelevant","126717214211575808","Wed Oct 19 17:51:52 +0000 2011","#Microsoft expande su plataforma de datos con #SQLServer2012. Los clientes tendrás más flexibilidad y escalabilidad. http://t.co/kNRjOflf" +"microsoft","irrelevant","126715806565412864","Wed Oct 19 17:46:17 +0000 2011","Web 2.0 Summit: Ballmer habló sobre Windows Phone y sus competidores http://t.co/dCOh8vfp #eventos #microsoft" +"microsoft","irrelevant","126715672981016577","Wed Oct 19 17:45:45 +0000 2011","RT @ihaberplus: Dijitalde 4 ödül Türkiye'nin! #microsoft #bug #yahoo +http://t.co/xwm15RVQ" +"microsoft","irrelevant","126714068093509633","Wed Oct 19 17:39:22 +0000 2011","epic! #microsoft ich dachte echt, das kann doch nur fake sein! ^^ http://t.co/T4vXmNO7" +"microsoft","irrelevant","126713830184198144","Wed Oct 19 17:38:26 +0000 2011","RT @KbWorks: @gjdijkman gefelicteerd #microsoft #bizspark" +"microsoft","irrelevant","126713264582299648","Wed Oct 19 17:36:11 +0000 2011","viss jau bÅ«tu baigi labi,taÄu #Microsoft Bing bar mÅ«s kÄrtÄ“jo reizi ir apdalÄ«jis :(" +"microsoft","irrelevant","126710706354663424","Wed Oct 19 17:26:01 +0000 2011","Licentie vraag over #VMware #citrix +#Microsoft #TrendMicro #Symantec #quest #Oracle #mcafee #desktopvirtualisatie #AppSense +Ea @DataplusNL" +"microsoft","irrelevant","126710657948196864","Wed Oct 19 17:25:49 +0000 2011","RT @martijnkoster: Zeer leerzame dag vandaag #redhat en #microsoft. Morgenvroeg naar #tallgrass voor de nieuwste technieken op glasvezel ..." +"microsoft","irrelevant","126710063497887744","Wed Oct 19 17:23:27 +0000 2011","Zeer leerzame dag vandaag #redhat en #microsoft. Morgenvroeg naar #tallgrass voor de nieuwste technieken op glasvezel gebied. #" +"microsoft","irrelevant","126709780038434816","Wed Oct 19 17:22:20 +0000 2011","RT @GrantThorntonNL: #grantthornton geeft gouden tips #verkoop #onderneming. Ook workshops #philips #nobel #rabobank #microsoft #port4gr ..." +"microsoft","irrelevant","126709737222963200","Wed Oct 19 17:22:10 +0000 2011","Excelente articulo! RT @teresitacc: Interesting read - How #Apple eclipsed #Microsoft http://t.co/RhNU1VaR via @guardian (cc: @elcheicon)" +"microsoft","irrelevant","126708721328992257","Wed Oct 19 17:18:07 +0000 2011","RT @marcoagnoli: Belle le magliette che diamo a #SMAU in #Microsoft http://t.co/mU5iHlQi" +"microsoft","irrelevant","126708681239822336","Wed Oct 19 17:17:58 +0000 2011","According to a writer at @ExtremeTech #microsoft invented squared corners. Time for lawsuits! #moron" +"microsoft","irrelevant","126707839912771585","Wed Oct 19 17:14:37 +0000 2011","#SMAU Milano & #I9 domani un posto in prima fila!Vi aspetto 11:30 area sessioni tecniche #Microsoft stay tunef!" +"microsoft","irrelevant","126707460244373504","Wed Oct 19 17:13:07 +0000 2011","RT @Jadishmedia: La última campaña de Microsoft es una “sopa boba†que vende todo y nada a la vez http://t.co/xOmLCEgl #marketing #microsoft" +"microsoft","irrelevant","126705840521613312","Wed Oct 19 17:06:41 +0000 2011","#microsoft Microsoft cria sensor touchscreen através da roupa http://t.co/c4mop8OD #windows #tecnologia" +"microsoft","irrelevant","126705791527952384","Wed Oct 19 17:06:29 +0000 2011","RT “@microsoftfrance: Cette année encore, #Microsoft remporte le prix du meilleur Service Client 2012 http://t.co/fMRd5g3sâ€" +"microsoft","irrelevant","126705448438079488","Wed Oct 19 17:05:07 +0000 2011","Ballmer- #Microsoft fue afortunado de no haber adquirido Yahoo en 2008 previo al colapso financiero de aquel momento http://t.co/9yDSNrbx" +"microsoft","irrelevant","126705060708225024","Wed Oct 19 17:03:35 +0000 2011","(High-tech)- Demain sur LNT: ""#Microsoft s'implique dans le développement socioéconomique"" par @Joumany #twitoma #IT #blogoma" +"microsoft","irrelevant","126704896383787008","Wed Oct 19 17:02:56 +0000 2011","RT @AslanMarine: Vous voulez la une de LNT ? La nouvelle tribune de demain 20/10/11: #intikhabates, #PJD, #PPS, #MoroccoMall , #Boeing , ..." +"microsoft","irrelevant","126704346573455360","Wed Oct 19 17:00:44 +0000 2011","La última campaña de Microsoft es una “sopa boba†que vende todo y nada a la vez http://t.co/xOmLCEgl #marketing #microsoft" +"microsoft","irrelevant","126704316005351424","Wed Oct 19 17:00:37 +0000 2011","#Microsoft :88%di noi utilizza servizi #cloud senza saperlo. Rivelata release di #WindowsPhone e aggiornamenti #Hotmail http://t.co/vgUbrZYN" +"microsoft","irrelevant","126704236057739264","Wed Oct 19 17:00:18 +0000 2011",": #Microsoft renovará el Administrador de Tareas en #Windows8 http://t.co/6UhMnWdG vía @tuexperto" +"microsoft","irrelevant","126703003351785472","Wed Oct 19 16:55:24 +0000 2011","http://t.co/EMS9iU2E <-- en je moet nog technischer zijn om Windows stabiel te krijgen, Pikkie Balmer! #microsoft #dommemensen" +"microsoft","irrelevant","126702733834194944","Wed Oct 19 16:54:20 +0000 2011","Ðа Ñайте #microsoft по ÑÑылке http://t.co/C0MlLa07 вмеÑто Facebook напиÑано Facewbook" +"microsoft","irrelevant","126702673830481920","Wed Oct 19 16:54:06 +0000 2011","#microsoft @diggita Microsoft - Bill Gates in tribunale: sotto accusa Windows 95 (isarenas) http://t.co/nXDd3dCZ" +"microsoft","irrelevant","126702046350024704","Wed Oct 19 16:51:36 +0000 2011","Vous voulez la une de LNT ? La nouvelle tribune de demain 20/10/11: #intikhabates, #PJD, #PPS, #MoroccoMall , #Boeing , #Microsoft..." +"microsoft","irrelevant","126701862383661056","Wed Oct 19 16:50:52 +0000 2011","RT @codebit_org: #Microsoft convierte en táctil a cualquier superficie [ #VIDEO ] http://t.co/mKNOVBUS" +"microsoft","irrelevant","126701401425444864","Wed Oct 19 16:49:02 +0000 2011","#Microsoft convierte en táctil a cualquier superficie [ #VIDEO ] http://t.co/mKNOVBUS" +"microsoft","irrelevant","126699572490813441","Wed Oct 19 16:41:46 +0000 2011","Una porquería las páginas web hechas en .asp y pensar que muchas paginas del gobierno trabajan sobre esta tecnologia de #Microsoft" +"microsoft","irrelevant","126699294987259904","Wed Oct 19 16:40:40 +0000 2011","#Microsoft détaille la recherche au sein du Start Screen http://t.co/ZaElukM1" +"microsoft","irrelevant","126699236615127041","Wed Oct 19 16:40:26 +0000 2011","@joffrey c'est vrai que le message est pas très clair dans cette pub #Microsoft" +"microsoft","irrelevant","126698924709920768","Wed Oct 19 16:39:12 +0000 2011","#Microsoft #Werk Citrix-Microsoft Specialist - NB-Den Bosch, Zoek je een technisch uitdagende, high profile, groeipo... http://t.co/TsGcOIHX" +"microsoft","irrelevant","126698924621832192","Wed Oct 19 16:39:12 +0000 2011","#Microsoft #Werk Sharepoint specialist, Groningen - Het gaat om het verder uitbouwen van een portal en om SharePoint... http://t.co/Ytq1Kv5d" +"microsoft","irrelevant","126698507657678848","Wed Oct 19 16:37:32 +0000 2011","#Microsoft's #Ballmer touts #Bing; analysts still wary about its future - http://t.co/jvQWzHkd <- siehe mein Blog dazu: http://t.co/pLwVE1Iy" +"microsoft","irrelevant","126696228745523200","Wed Oct 19 16:28:29 +0000 2011","Uff mucha expectación con el Plan Nacional :-): Qué prioridades tenemos como Pais? Plan Nacional: http://t.co/rumOPWl0 #microsoft" +"microsoft","irrelevant","126695671163133952","Wed Oct 19 16:26:16 +0000 2011","Nueva tecnología táctil emula funcionalidad del monitor #Microsoft #Kinect http://t.co/QUAK6947" +"microsoft","irrelevant","126695665769250818","Wed Oct 19 16:26:15 +0000 2011","Belle le magliette che diamo a #SMAU in #Microsoft http://t.co/mU5iHlQi" +"microsoft","irrelevant","126694584322490368","Wed Oct 19 16:21:57 +0000 2011","@webespacio: CEO de #Microsoft: somos afortunados de no haber comprado Yahoo http://t.co/KFyLbmpJ #noticias" +"microsoft","irrelevant","126693348735057920","Wed Oct 19 16:17:02 +0000 2011","Bij #Microsoft zijn ze echt niet goed snik, ik was blij dat mijn mobiel gejat werd omdat er windows op zat.. #jaloezie http://t.co/fd0THHCv" +"microsoft","irrelevant","126691436744810496","Wed Oct 19 16:09:26 +0000 2011","#Polska #Apple #Torrent #Mac +Free Download!!! +#Microsoft Office 2011 Mac PL + +http://t.co/t2iC4z3F" +"microsoft","irrelevant","126690582893572096","Wed Oct 19 16:06:03 +0000 2011","#Microsoft ofrece un sistema de #codificación #MicrosoftTag http://t.co/jojh48Lg" +"microsoft","irrelevant","126690506976657408","Wed Oct 19 16:05:45 +0000 2011","RT @ricklennie: Un vistazo a Windows Server 8 http://t.co/BmcJLgDV #Microsoft" +"microsoft","irrelevant","126689129131028480","Wed Oct 19 16:00:16 +0000 2011","Un vistazo a Windows Server 8 http://t.co/BmcJLgDV #Microsoft" +"microsoft","irrelevant","126689124638932993","Wed Oct 19 16:00:15 +0000 2011","Un vistazo a Windows Server 8 http://t.co/Q0D7dJdN #Microsoft" +"microsoft","irrelevant","126689077700476929","Wed Oct 19 16:00:04 +0000 2011","Objetivo de las PC con #Windows de #Microsoft para estas vacaciones: “home honchos†y “media moderates†http://t.co/C1Nm8mHU" +"microsoft","irrelevant","126689077230698496","Wed Oct 19 16:00:04 +0000 2011","#Microsoft y Yahoo extienden su garantía de anuncios hasta el 2013 http://t.co/xAKls4gC" +"microsoft","irrelevant","126688740826550272","Wed Oct 19 15:58:44 +0000 2011","RT @dmolim: #Microsoft превратит любую поверхноÑть в тачÑкрин http://t.co/vkXG7AZ5" +"microsoft","irrelevant","126688659868106752","Wed Oct 19 15:58:24 +0000 2011","RT @dmolim: #Microsoft раÑÑледует проблемы пропажи клавиатуры в #Windows #Phone #Mango http://t.co/1JqdUCjr" +"microsoft","irrelevant","126687780943306752","Wed Oct 19 15:54:55 +0000 2011","RT @CNNEE: #Skype ya es parte de #Microsoft http://t.co/f54UXNwi" +"microsoft","irrelevant","126687120071999490","Wed Oct 19 15:52:17 +0000 2011","RT @juank3946: Nueva #Consola #Xbox en curso: #LinkedIn http://t.co/VUzmZEzv #Microsoft #Xbox360 #Xbox720 #Twitter via @ZthaeDigital" +"microsoft","irrelevant","126687048647184384","Wed Oct 19 15:52:00 +0000 2011","Sogeti y #Microsoft crearán un centro de desarrollo de soluciones basadas en Azure: http://t.co/PHH2WmM1 via @AddThis" +"microsoft","irrelevant","126686733222944768","Wed Oct 19 15:50:45 +0000 2011","Hawaii Five.0 kijken. Ruim gesponsord door #microsoft 'I will upload this on skydrive' 'I just found that on #skydrive' #productplacement" +"microsoft","irrelevant","126686455752962048","Wed Oct 19 15:49:39 +0000 2011","Nueva #actualización de #Windows Intune http://t.co/2jPOvRpJ #microsoft #distribuciondeprogramasonline" +"microsoft","irrelevant","126686446357716992","Wed Oct 19 15:49:37 +0000 2011","#Microsoft convierte cualquier cosa en tactil! Mira el video! ... - http://t.co/SAeLwlBJ" +"microsoft","irrelevant","126686301780049920","Wed Oct 19 15:49:02 +0000 2011","Ha ha #rotfl #SteveBallmer haalt uit naar #android http://t.co/RQk2AnSM achterhoede gevecht v #microsoft" +"microsoft","irrelevant","126685380681547777","Wed Oct 19 15:45:23 +0000 2011","Steve Ballmer: ""Android är komplicerat. WP7 är lätt"" http://t.co/6Kn1Ocgg #ios #microsoft #wp7" +"microsoft","irrelevant","126685141174202369","Wed Oct 19 15:44:26 +0000 2011","#Oferta #Empleo Product Manager de Soluciones #IT http://t.co/xfZicBqO #trabajo #marketing #microsoft" +"microsoft","irrelevant","126684849934303233","Wed Oct 19 15:43:16 +0000 2011","CES 2012 Keynotes erneut mit erstklassiger Besetzung (http://t.co/0R0ULAPk) #ces #Daimler #Microsoft" +"microsoft","irrelevant","126684618605867008","Wed Oct 19 15:42:21 +0000 2011","Tja steve als het te #moeilijk voor je wordt kun je maar beter stoppen #microsoft #fail #enjekomttelaatmetjetoestel http://t.co/0IBnA2aV" +"microsoft","irrelevant","126682886756777984","Wed Oct 19 15:35:28 +0000 2011","Únase el Martes 8 de Noviembre, a las 11:00 am EST al evento de lanzamiento de #Microsoft #Dynamics #AX2012 para latinoamérica" +"microsoft","irrelevant","126682505033154560","Wed Oct 19 15:33:57 +0000 2011","No pierdan la oportunidad de registrarse al evento gratuito de Lanzamiento de #Microsoft #Dynamics #AX2012 regístrese: http://t.co/G2SNZEvE" +"microsoft","irrelevant","126681644223578113","Wed Oct 19 15:30:32 +0000 2011","#Microsoft lanza #OmniTouch - http://t.co/UkxNzpyQ" +"microsoft","irrelevant","126681070413418496","Wed Oct 19 15:28:15 +0000 2011","@diegofrancesco Grazie Diego! Contattaci pure quando vuoi per qualsiasi informazione sul mondo #Microsoft. ^AC" +"microsoft","irrelevant","126680181359378432","Wed Oct 19 15:24:43 +0000 2011","RT @SociosMicrosoft: #Microsoft anuncia nuevos títulos de “juegos educativos†http://t.co/etbab2UJ" +"microsoft","irrelevant","126680158508810240","Wed Oct 19 15:24:38 +0000 2011","Alguien puede explicarme si #Android tiene un rayo de esperanza con algun otro smartphone o tablet frente a #Microsoft" +"microsoft","irrelevant","126679552310251521","Wed Oct 19 15:22:13 +0000 2011","#Microsoft anuncia nuevos títulos de “juegos educativos†http://t.co/etbab2UJ" +"microsoft","irrelevant","126679463839801344","Wed Oct 19 15:21:52 +0000 2011","#Facebook facts: algunos de lis primeros usuarios corporativos incluía a #Microsoft y #Mac." +"microsoft","irrelevant","126679060431634432","Wed Oct 19 15:20:16 +0000 2011","RT @Institut2F: RT @newsgrape_de: 3 Tricks für mehr Kontrolle über #Microsoft #Windows 7 -http://j.mp/mSOETz #hack" +"microsoft","irrelevant","126679053347467264","Wed Oct 19 15:20:14 +0000 2011","Kat in het nauw #Microsoft #WindowsPhone : flink afgeven op je concurrent #Android http://t.co/tcDtTnIu" +"microsoft","irrelevant","126678520033325057","Wed Oct 19 15:18:07 +0000 2011","Mi sueño dorado es que #Microsoft se vea envuelto en un quiebre absoluto en sus productos y servicios" +"microsoft","irrelevant","126678376277749760","Wed Oct 19 15:17:33 +0000 2011","#Microsoft fulminate de este planeta!, como puede hacer eso a Android y a todos los desarrolladores y usuarios?" +"microsoft","irrelevant","126678301539446784","Wed Oct 19 15:17:15 +0000 2011","RT @windowsespana: Lo último y lo mejor de Windows 7, todo en un único lugar. http://t.co/RqnhMFaA #windows7 #microsoft" +"microsoft","irrelevant","126677890531201024","Wed Oct 19 15:15:37 +0000 2011","Lo último y lo mejor de Windows 7, todo en un único lugar. http://t.co/RqnhMFaA #windows7 #microsoft" +"microsoft","irrelevant","126677821492961280","Wed Oct 19 15:15:20 +0000 2011","#Microsoft #Research y la Universidad #Carnegie #Mellon desarrollan #OmniTouch basado en #Kinect http://t.co/y2QzUjM7 #fb" +"microsoft","irrelevant","126677668933533696","Wed Oct 19 15:14:44 +0000 2011","Directivos de #Google, #Microsoft o #IBM reflexionarán sobre cómo vender más utilizando las nuevas tecnologías http://t.co/5ogXhe52" +"microsoft","irrelevant","126677325008994306","Wed Oct 19 15:13:22 +0000 2011","Wyszukuj jak Anja Rubik (albo Colin Farrell) z #Microsoft http://t.co/1KHSfK8k" +"microsoft","irrelevant","126676840181022720","Wed Oct 19 15:11:26 +0000 2011","RT @newsgrape_de: 3 Tricks für mehr Kontrolle über #Microsoft #Windows 7 -http://j.mp/mSOETz #hack" +"microsoft","irrelevant","126676566154555395","Wed Oct 19 15:10:21 +0000 2011","Google резко раÑкритиковала дизайн Microsoft и Apple http://t.co/jUi4nYzG #google #microsoft #apple" +"microsoft","irrelevant","126676435988512768","Wed Oct 19 15:09:50 +0000 2011","RT @EurekaStartups Un teclado táctil sobre cualquier superficie > http://t.co/EakkXn0O #Microsoft #Tecnología" +"microsoft","irrelevant","126675755026493440","Wed Oct 19 15:07:08 +0000 2011","3 Tricks für mehr Kontrolle über #Microsoft #Windows 7 -http://j.mp/mSOETz #hack" +"microsoft","irrelevant","126675459353223168","Wed Oct 19 15:05:57 +0000 2011","Spotify lanza aplicación para BlackBerry http://t.co/Sgsy2bw0 #web #microsoft #tecnologia #web" +"microsoft","irrelevant","126675434065764352","Wed Oct 19 15:05:52 +0000 2011","Démo, création d'un cluster par @GaelDuhamel #VAD #Microsoft #VisualStudio #DataMining http://t.co/M5eqlqUA" +"microsoft","irrelevant","126675392663789569","Wed Oct 19 15:05:41 +0000 2011","RT @Startpack_ru: РебÑта из #Microsoft раÑÑказывают разработчикам #Стартпак, как можно интегрироватьÑÑ Ñ #SystemCenter через веб-ÑервиÑÑ‹ ..." +"microsoft","irrelevant","126674938076725248","Wed Oct 19 15:03:53 +0000 2011","#microsoft Grupo Confidence troca software livre por plataforma Microsoft http://t.co/DqCG83WJ #windows #tecnologia" +"microsoft","irrelevant","126673983235035138","Wed Oct 19 15:00:05 +0000 2011","Mi piace! Su Twitter è presente il servizio clienti di #Microsoft ! @MicrosoftAiuta" +"microsoft","irrelevant","126673920014299137","Wed Oct 19 14:59:51 +0000 2011","РебÑта из #Microsoft раÑÑказывают разработчикам #Стартпак, как можно интегрироватьÑÑ Ñ #SystemCenter через веб-ÑервиÑÑ‹. http://t.co/ZwDHUK2W" +"microsoft","irrelevant","126673684936146944","Wed Oct 19 14:58:54 +0000 2011","RT @IEMPO: En el Curso BMT en #microsoft #mexico ... Para trabajar el modelo de negocios de @Axentit en la Nube" +"microsoft","irrelevant","126673474751172608","Wed Oct 19 14:58:04 +0000 2011","En el Curso BMT en #microsoft #mexico ... Para trabajar el modelo de negocios de @Axentit en la Nube" +"microsoft","irrelevant","126673257175855106","Wed Oct 19 14:57:12 +0000 2011","Ooh for fuck's sake!! RT @gernijkamp Ah, Steve Ballmer..! Altijd goed voor een fijn lachmomentje: htl.li/72ad7 #microsoft #iphone" +"microsoft","irrelevant","126673062258147328","Wed Oct 19 14:56:26 +0000 2011","Hoe is het zover kunnen komen: #microsoft stuurt mij nieuwsbrieven waar outlook van vastloopt ^o) #newsletters" +"microsoft","irrelevant","126671792877223936","Wed Oct 19 14:51:23 +0000 2011","#Polska #Apple #Torrent #Mac + + +Free Download!!! + +#Microsoft Office 2011 Mac PL + +http://t.co/t2iC4z3F" +"microsoft","irrelevant","126671141854134273","Wed Oct 19 14:48:48 +0000 2011","booooa tarde, sai da aula da #microsoft e agora to aqui na @Escola_Saga ; D" +"microsoft","irrelevant","126671001357529089","Wed Oct 19 14:48:14 +0000 2011","Microsoft Technology Summit 2011: Rusza druga edycja programu „Twoja firma, Twoja szansa na sukcesâ€: ... http://t.co/8veo83D9 #microsoft" +"microsoft","irrelevant","126671000736763904","Wed Oct 19 14:48:14 +0000 2011","http://t.co/7xjMqMv8 Dat het een lompe boer is : Dat wisten we al. Dat ie het echt niet snapt weten we nu ook. #fail #balmer #microsoft" +"microsoft","irrelevant","126670954112880640","Wed Oct 19 14:48:03 +0000 2011","RT @JoelCardozo: #Microsoft en el futuro: #Kinect y su tecnología avanzada http://t.co/JOVFeZRl" +"microsoft","irrelevant","126669765778485248","Wed Oct 19 14:43:20 +0000 2011","RT @ZonaBinaria: #Nokia mostrará varios teléfonos con Windows Phone la semana que viene http://t.co/qsLf5isx #noticias #microsoft" +"microsoft","irrelevant","126669192157073408","Wed Oct 19 14:41:03 +0000 2011","#Polska #Apple #Torrent #Mac + +Free Download!!! + +#Microsoft Office 2011 Mac PL + +http://t.co/t2iC4z3F" +"microsoft","irrelevant","126668863667572736","Wed Oct 19 14:39:45 +0000 2011","#Microsoft #Werk Senior IT Infrastructure Specialist Microsoft / Senior Systeembeheerder regio Amsterdam http://t.co/1erIcOhS" +"microsoft","irrelevant","126668329002872833","Wed Oct 19 14:37:37 +0000 2011","#Facebook prepara una aplicación para controlar el consumo de energía http://t.co/ritcdsD2 #Google y #Microsoft dejaron proyectos similares." +"microsoft","irrelevant","126668278386012160","Wed Oct 19 14:37:25 +0000 2011","Un nouvel accord signé entre Quanta Computer et #Microsoft + http://t.co/BHFJAtaT" +"microsoft","irrelevant","126666904764030977","Wed Oct 19 14:31:58 +0000 2011","RT @GroenewegBV: Steve Ballmer haalt uit naar #Android http://t.co/LlIxBzeV #nuandroid Dan ben ik ook een nerd, ja daaaaag, slecht zeg v ..." +"microsoft","irrelevant","126666281461096448","Wed Oct 19 14:29:29 +0000 2011","nette Führung durch die neuen Microsoft Österreich Büros gehabt; Danke an Cornelia König #dasneuearbeiten, #microsoft" +"microsoft","irrelevant","126666022517350400","Wed Oct 19 14:28:27 +0000 2011","Steve Ballmer haalt uit naar #Android http://t.co/LlIxBzeV #nuandroid Dan ben ik ook een nerd, ja daaaaag, slecht zeg van #microsoft" +"microsoft","irrelevant","126665539719409664","Wed Oct 19 14:26:32 +0000 2011","Steve (#Microsoft) haalt uit naar #Android http://t.co/nqMvktse" +"microsoft","irrelevant","126665086961065985","Wed Oct 19 14:24:44 +0000 2011","http://t.co/Ir6Ld23I ""Microsoft-Chef Ballmer lästert über Android"" #microsoft #yumad #derstandard" +"microsoft","irrelevant","126665078861869056","Wed Oct 19 14:24:42 +0000 2011","RT @alligatore: #microsoft intervista #vivido sul #cloud http://t.co/ky65puA2" +"microsoft","irrelevant","126664812947181568","Wed Oct 19 14:23:39 +0000 2011","RT @zvitaly: ÐÐ½Ñ‚Ð¸Ð²Ð¸Ñ€ÑƒÑ #Microsoft Security Essentials удалÑет из ÑиÑтемы браузер #Google #Chrome, ÑÑ‡Ð¸Ñ‚Ð°Ñ ÐµÐ³Ð¾ троÑном PWS Win32/Zbot: htt ..." +"microsoft","irrelevant","126664329335541760","Wed Oct 19 14:21:44 +0000 2011","Cachando que @markrussinovich, aparte de ser un maestro #geek, también escribe novelas http://t.co/OcTuUYlo #microsoft" +"microsoft","irrelevant","126664120274653184","Wed Oct 19 14:20:54 +0000 2011","Ah, Steve Ballmer..! Altijd goed voor een fijn lachmomentje: http://t.co/zD14Sy6e #microsoft #iphone #mango #android #ios #apple #google" +"microsoft","irrelevant","126663390302187521","Wed Oct 19 14:18:00 +0000 2011","Grattis #microsoft ! Jag mÃ¥ste alltsÃ¥ ta bort Helvetica om jag ska köra IE9 och/eller Outlook?" +"microsoft","irrelevant","126663150148911105","Wed Oct 19 14:17:02 +0000 2011","Steve Ballmer (CEO #Microsoft) heeft flink uitgehaald naar besturingssysteem #Android van #Google. Het zou te ingewikkeld en te vrij zijn" +"microsoft","irrelevant","126662722673844224","Wed Oct 19 14:15:21 +0000 2011","Microsoft: “Şanslıyız ki Yahoo’yu satın almadık†- Bilgi Çağı http://t.co/0gAQ1FpQ aracılığıyla @bilgicagi #Microsoft #SteveBallmer #Yahoo" +"microsoft","irrelevant","126662658991718400","Wed Oct 19 14:15:05 +0000 2011","RT @lyonpat25: @iguanahosting ganamos Socios Hoster del año en Enlance 2012! Ganando Juntos! #Microsoft Felicitaciones :)" +"microsoft","irrelevant","126662635033858049","Wed Oct 19 14:15:00 +0000 2011","#Microsoft превратит любую поверхноÑть в тачÑкрин http://t.co/vkXG7AZ5" +"microsoft","irrelevant","126662553316245504","Wed Oct 19 14:14:40 +0000 2011","Spot-Premiere: Microsoft feiert die Produktfamilie / Erste Arbeit von Crispin, Porter & Bogusky: Micr... http://t.co/KKUAWLc7 #microsoft" +"microsoft","irrelevant","126662533347164161","Wed Oct 19 14:14:35 +0000 2011","Microsoft Security Essentialã®å®šç¾©ãƒ•ァイルãŒã‚¢ãƒƒãƒ—デートã—ã¾ã—ãŸï¼š1.115.102.0 > http://t.co/QuqPibic #Microsoft" +"microsoft","irrelevant","126662436966236160","Wed Oct 19 14:14:12 +0000 2011","Dr. Ogalinski - El Proyecto #OmniTouch de #Microsoft: todo el mundo es un touchscreen http://t.co/qzt26Gj8" +"microsoft","irrelevant","126661775922966528","Wed Oct 19 14:11:35 +0000 2011","videohovory na facebooku jsou skvÄ›lé. #Microsoft by mohl integrovat tuto funkci do další aktualizace #windowsphone, pak to bude geniální" +"microsoft","irrelevant","126661415510614018","Wed Oct 19 14:10:09 +0000 2011","estoy agradecido con #Dios y con #Microsoft por la oportunidad que me han dado de ser parte del grupo selecto de los #msp! Gracias Señor!!!!" +"microsoft","irrelevant","126660850051321858","Wed Oct 19 14:07:54 +0000 2011","Una video intervista con la #Microsoft per ""il protagonista dell'innovazione sei tu"" :-) http://t.co/hIgNHuTR" +"microsoft","irrelevant","126660622883631104","Wed Oct 19 14:07:00 +0000 2011","Vos mises à jour ne s'installent pas! Posez vos questions sur #Microsoft Windows Update sur le site de support ici: http://t.co/tazYoDb8 ^IT" +"microsoft","irrelevant","126660026013188097","Wed Oct 19 14:04:38 +0000 2011","RT @mhumpolec: Povidani s #Microsoft o #Azure. V Evrope uz je data centrum i v Amsterodamu, chysta se v Nemecku a Rusku. V kazdem pres 2 ..." +"microsoft","irrelevant","126659873579610113","Wed Oct 19 14:04:01 +0000 2011","#Microsoft desarrolla una pantalla táctil proyectada sobre cualquier superficie http://t.co/3IrWp4ct vía @iHerreros" +"microsoft","irrelevant","126659604628242432","Wed Oct 19 14:02:57 +0000 2011","Blog Kroker's Look @ IT: Der #Apple-Schock im PC-Markt - mit unerwartetem Potenzial für #Microsoft --> http://t.co/29vCUgQz" +"microsoft","irrelevant","126657946758295552","Wed Oct 19 13:56:22 +0000 2011","OmniTouch, el nuevo invento de #Microsoft permitirá usar un teclado en cualquier superficie. http://t.co/Rf9moB8q" +"microsoft","irrelevant","126657340920438785","Wed Oct 19 13:53:57 +0000 2011","#Tecnología #Bill_Gates testificará en el #juicio por #monopolio entre #Microsoft y #Novell http://t.co/o1WGjYTO ➨ @theINQ" +"microsoft","irrelevant","126656806368968704","Wed Oct 19 13:51:50 +0000 2011","Steve Ballmer deelt flinke sneer uit naar Android. #beginvaneenstijd? #microsoft #ballmer #android http://t.co/Q3aVgQhn" +"microsoft","irrelevant","126656462566080513","Wed Oct 19 13:50:28 +0000 2011","http://t.co/Qewndku6 #Nokia workers ask,is CEO Stephen Elop a #Microsoft mole??" +"microsoft","irrelevant","126656050664443904","Wed Oct 19 13:48:50 +0000 2011","Natuuuuuuurlijk vraagt een #microsoft product om tig onnodige handmatig handelingen #fail - http://t.co/ijnZkFVJ" +"microsoft","irrelevant","126655886893649921","Wed Oct 19 13:48:11 +0000 2011","Minori investimenti e maggiore efficienza con le Soluzioni #Cloud #Microsoft - Brescia, 9 novembre --> http://t.co/6gq6frlk" +"microsoft","irrelevant","126655535545204737","Wed Oct 19 13:46:47 +0000 2011","RT @MicrosoftAiuta: #Microsoft vi aspetta a #SMAU dal 19 al 21 ottobre http://t.co/lDhDVeWx, venite a trovarci! Siamo al Pad. 4 Stand A23" +"microsoft","irrelevant","126655432361123840","Wed Oct 19 13:46:22 +0000 2011","RT @microsoftfrance: Cette année encore, #Microsoft remporte le prix du meilleur Service Client 2012 http://t.co/vWHpxkVP" +"microsoft","irrelevant","126654661322211328","Wed Oct 19 13:43:19 +0000 2011","Cette année encore, #Microsoft remporte le prix du meilleur Service Client 2012 http://t.co/vWHpxkVP" +"microsoft","irrelevant","126654651654340608","Wed Oct 19 13:43:16 +0000 2011","Schaue mir #WMI #Performance Counters auf #Deutsch an, und kriege #Schmerzen… ICH WILL #SCHMERZENSGELD VON #MICROSOFT!!! #fail" +"microsoft","irrelevant","126654309894070273","Wed Oct 19 13:41:55 +0000 2011","#Job #ICT Verkaufspezialist von Infrastrukturlösungen an Geschäftskunden (w/m): Microsoft Schweiz... http://t.co/admV6cNg #Microsoft #IT" +"microsoft","irrelevant","126654232538521600","Wed Oct 19 13:41:36 +0000 2011","ESTUDIO: #Microsoft vende más pese a alto precio http://t.co/FnFP1JWU" +"microsoft","irrelevant","126653714357432320","Wed Oct 19 13:39:33 +0000 2011","Two Extremes of Touch Interaction - +#Microsoft Research http://t.co/nqM3qF7p // #OmniTouch 坿”œå¼å¤šé»žè§¸æŽ§ç³»çµ±" +"microsoft","irrelevant","126653276040073216","Wed Oct 19 13:37:48 +0000 2011","#microsoft intervista #vivido sul #cloud http://t.co/ky65puA2" +"microsoft","irrelevant","126652806546464768","Wed Oct 19 13:35:56 +0000 2011","@iguanahosting ganamos Socios Hoster del año en Enlance 2012! Ganando Juntos! #Microsoft Felicitaciones :)" +"microsoft","irrelevant","126652711188963328","Wed Oct 19 13:35:34 +0000 2011","#OmniTouch von #Microsoft verwandelt beliebige Flächen in Touchscreens" +"microsoft","irrelevant","126652683040993280","Wed Oct 19 13:35:27 +0000 2011","סטיב ב×למר מבטיח לנו ""כמה"" מכשירי חלונות פון בשבוע ×”×‘× http://t.co/GHBZfbX3††#Nokia ‎#microsoft #windowsphone" +"microsoft","irrelevant","126652553038540800","Wed Oct 19 13:34:56 +0000 2011","Steve Ballmer haalt uit naar #Android http://t.co/Zzp06KpZ Ironisch als je bedenkt dat #Microsoft goed aan Android verdient..." +"microsoft","irrelevant","126652025181179904","Wed Oct 19 13:32:50 +0000 2011","Programa de Estágio Microsoft - Inscrava-se ou Indique para um amigo: http://t.co/r1aVtYOf #estágio #Microsoft" +"microsoft","irrelevant","126651878351183873","Wed Oct 19 13:32:15 +0000 2011","#Microsoft, GeleceÄŸin Teknolojisini AraÅŸtırıyor! http://t.co/J45Gm98h #haberleryorumlar #arayüz #kamera" +"microsoft","irrelevant","126651164325457920","Wed Oct 19 13:29:25 +0000 2011","#Reflex Online is nu ook beschikbaar op #Microsoft Marketplace. Meer hierover op ons blog: http://t.co/OZkelPhS" +"microsoft","irrelevant","126650231159922689","Wed Oct 19 13:25:42 +0000 2011","RT @NielsMoelard: En daar gaan we weer: *pakt vergrootglas erbij* zoeken naar die leuke nieuwe collega! http://t.co/jy7fBeID #vacature # ..." +"microsoft","irrelevant","126650224625205248","Wed Oct 19 13:25:41 +0000 2011","#Microsoft hat den Prototyp ihres OmniTouch vorgestellt. Dieser macht fast jede Oberfläche zum Touchscreen. http://t.co/5370kR8v" +"microsoft","irrelevant","126650108640100352","Wed Oct 19 13:25:13 +0000 2011","RT @wissekomm: #Veeam Software presenteert nworks Management Pack 5.7 voor #Microsoft System Center http://t.co/Xm9fvcPd" +"microsoft","irrelevant","126650101304262656","Wed Oct 19 13:25:11 +0000 2011","En daar gaan we weer: *pakt vergrootglas erbij* zoeken naar die leuke nieuwe collega! http://t.co/jy7fBeID #vacature #microsoft" +"microsoft","irrelevant","126650034052792321","Wed Oct 19 13:24:55 +0000 2011","¿Piensas lanzar tu propio emprendimiento? #Microsoft te acerca los mejores recursos tecnológicos. http://t.co/EBswmyUi #SoloPensamosenTI" +"microsoft","irrelevant","126649985897996288","Wed Oct 19 13:24:44 +0000 2011","Nuove #funzioni e qualche «aggiustamento»: #Microsoft rilancia su Hotmail: Microsoft continua… http://t.co/JafjT8UB" +"microsoft","irrelevant","126649791299063808","Wed Oct 19 13:23:57 +0000 2011","#Microsoft Research cumple 20 años. Una historia de importantes innovaciones tecnológicas. ¡Vamos por más! http://t.co/NWSTmMgC" +"microsoft","irrelevant","126649528924389378","Wed Oct 19 13:22:55 +0000 2011","Ð’ #Microsoft придумали, как превратить любую поверхноÑть в ÑенÑорный Ñкран http://t.co/fBROz6Mf" +"microsoft","irrelevant","126648588129419264","Wed Oct 19 13:19:11 +0000 2011","RT @StevenBouquet: BREKEND: #Microsoft en #Linux nu beiden in de aanbieding! http://t.co/oyeqR80p" +"microsoft","irrelevant","126648260352942080","Wed Oct 19 13:17:52 +0000 2011","RT @IMPULSONEGOCIOS: #Microsoft convierte en táctil a cualquier superficie [VIDEO] http://t.co/RsXmVUxL" +"microsoft","irrelevant","126648259040120832","Wed Oct 19 13:17:52 +0000 2011","Estou com tanta dó do windows phone... CHUPA ESSA #microsoft http://t.co/jkGFN9rr trem lindo de deus!!!! android 4.0" +"microsoft","irrelevant","126648114886086657","Wed Oct 19 13:17:18 +0000 2011","Ahaaa! Ich hab mein Problemkind gefunden: KB2553065 ist der Übeltäter. #microsoft #office #update" +"microsoft","irrelevant","126648049459142656","Wed Oct 19 13:17:02 +0000 2011","#Microsoft - Lanzara nueva tecnologia #Omnitouch que usa #Kinect: Si la tendencia son las pantallas touch o ""Mul... http://t.co/A4aSHlF6" +"microsoft","irrelevant","126647771821383682","Wed Oct 19 13:15:56 +0000 2011","#Microsoft convierte en táctil a cualquier superficie [VIDEO] http://t.co/RsXmVUxL" +"microsoft","irrelevant","126647390282326017","Wed Oct 19 13:14:25 +0000 2011","BREKEND: #Microsoft en #Linux nu beiden in de aanbieding! http://t.co/oyeqR80p" +"microsoft","irrelevant","126646647856955392","Wed Oct 19 13:11:28 +0000 2011","Hört einen #Vortrag von Leslie Lampert, einem #Microsoft Guru" +"microsoft","irrelevant","126646439924334592","Wed Oct 19 13:10:38 +0000 2011","Microsoft CIO Leadership Circle ganz im Zeichen des neuen Arbeitens: Wien (pts021/19.10.2011/15:05) -... http://t.co/n5HN2jJp #microsoft" +"microsoft","irrelevant","126646302032396289","Wed Oct 19 13:10:06 +0000 2011","@dryab Да, потому что только наш рынок любит #Microsoft ))" +"microsoft","irrelevant","126645036426334208","Wed Oct 19 13:05:04 +0000 2011","http://t.co/VgMYdlDS found article on Heise.de #Microsoft kündigt #offiziellen SQL Server Treiber für Linux #an" +"microsoft","irrelevant","126644764383780865","Wed Oct 19 13:03:59 +0000 2011","Wauw.. Enorm inspirerende dag gehad op het #Microsoft kantoor over #socialmedia en #erp. Dank aan @JudithEva!" +"microsoft","irrelevant","126644360434565124","Wed Oct 19 13:02:23 +0000 2011","MS社ã®ã€Œãã‚ãã‚XPã‚„ã‚ã¦ã€ã«ãƒãƒƒãƒˆä½æ°‘「ã¯ï¼Ÿã€ http://t.co/YGmidiiw #Microsoft #WindowsXP" +"microsoft","irrelevant","126643786557296640","Wed Oct 19 13:00:06 +0000 2011","Nuevo #Microsoft #PocketTouch (para usar el telefono sin sacarlo del bolsillo) http://t.co/0LdAVDnj" +"microsoft","irrelevant","126643010296487936","Wed Oct 19 12:57:01 +0000 2011","#Microsoft'un desteklediÄŸi #OmniTouch Projector, tüm yüzeyleri dokunmatik ekrana dönüştürüyor http://t.co/9yt5Y9Ab aracılığıyla @geekosystem" +"microsoft","irrelevant","126642364667269120","Wed Oct 19 12:54:27 +0000 2011","Zie ik net dat ik volgende week samen met twitterloze Karsten naar een #prospect in #Ridderkerk ga om te praten over, #Microsoft #licenties." +"microsoft","irrelevant","126641501978632192","Wed Oct 19 12:51:01 +0000 2011","obrigado #cpfl obrigado #microsoft obrigado #totvs #hellonearth" +"microsoft","irrelevant","126640207167631361","Wed Oct 19 12:45:52 +0000 2011","5 meses con una #Xbox y ya me ha corrompido el perfil 6 veces. Así como va a cambiar mi impresión de #Microsoft?" +"microsoft","irrelevant","126639750756040706","Wed Oct 19 12:44:04 +0000 2011","Casa de herrero, cuchillo de palo!!! http://t.co/xxtfy71a #DCE2005 #Microsoft <= Como NO manejar errores!!! Link: http://t.co/89Cr2txm" +"microsoft","irrelevant","126638913145159681","Wed Oct 19 12:40:44 +0000 2011","7 minutes to go!! +http://t.co/VqueOskE #ebaymobile +#ebay #office #mac #apple #microsoft #office" +"microsoft","irrelevant","126638752167759872","Wed Oct 19 12:40:06 +0000 2011","#dmo #it #cloud #Cloud_Computing #Google #Microsoft Ballmer, è il software as a service la vera sfida del momento:... http://t.co/vZICdHFW" +"microsoft","irrelevant","126638520034013184","Wed Oct 19 12:39:10 +0000 2011","RT @AvanadeSpain: RT @MuyCanal: MuyCanal: Avanade y Ferranti Computer ofrecen un #ERP basado en #Microsoft Dynamics http://t.co/Maghjzc ..." +"microsoft","irrelevant","126637475153186816","Wed Oct 19 12:35:01 +0000 2011","IT-Concern enige #Microsoft dienstverlener in Gorinchem met 1 Gold & 3 Silver statussen #pinpoint http://t.co/rIPJRmZS. Dat zegt genoeg!" +"microsoft","irrelevant","126637471676104704","Wed Oct 19 12:35:00 +0000 2011","IT-Concern enige #Microsoft dienstverlener in Gorinchem met 1 Gold & 3 Silver statussen #pinpoint http://t.co/ugzVJmmr. Dat zegt genoeg!" +"microsoft","irrelevant","126636564469121024","Wed Oct 19 12:31:24 +0000 2011","Kinect Sports 2 : Nos premières impressions http://t.co/lFXtQIVR #kinect #microsoft" +"microsoft","irrelevant","126636005922050048","Wed Oct 19 12:29:11 +0000 2011","#Microsoft раÑÑледует проблемы пропажи клавиатуры в #Windows #Phone #Mango http://t.co/1JqdUCjr" +"microsoft","irrelevant","126633706566856704","Wed Oct 19 12:20:03 +0000 2011","Estava ali na #MICROSOFT tomando um cafezinho :D" +"microsoft","irrelevant","126632414830276608","Wed Oct 19 12:14:55 +0000 2011","RT @WindowsFrance: Avec OmniTouch, faites de n'importe quelle surface un écran tactile http://t.co/kBP15kTt #Microsoft #OmniTouch" +"microsoft","irrelevant","126631816873517056","Wed Oct 19 12:12:32 +0000 2011","No sé que sería de mi sin Skype RT @GrupoTrevenque: #Skype ya es oficialmente parte de #Microsoft ¿Utilizas es… (cont) http://t.co/nhJxMcfJ" +"microsoft","irrelevant","126630944999346176","Wed Oct 19 12:09:04 +0000 2011","Wow!RT @fmlopez48 #Microsoft trabaja en tecnología con la que cualquier superficie podrá ser usada como pantalla táctil http://t.co/1qbeGAI5" +"microsoft","irrelevant","126630702392426496","Wed Oct 19 12:08:06 +0000 2011","! RT @AlbertLorente: En España, se pasa más tiempo en sites de #Microsoft que en #Facebook y en sites de #Google... http://t.co/cjNWU8rv" +"microsoft","irrelevant","126630465246466048","Wed Oct 19 12:07:10 +0000 2011","Avec OmniTouch, faites de n'importe quelle surface un écran tactile http://t.co/kBP15kTt #Microsoft #OmniTouch" +"microsoft","irrelevant","126629195546755072","Wed Oct 19 12:02:07 +0000 2011","@renatomb estou me sentindo na #Microsoft - fazendo as escolhas por mim..." +"microsoft","irrelevant","126628979636572160","Wed Oct 19 12:01:16 +0000 2011","RT @profissionaisti: http://t.co/1ioM4Fqs O que realmente pode mudar com o Windows 8 #windows8 #microsoft" +"microsoft","irrelevant","126628891929493504","Wed Oct 19 12:00:55 +0000 2011","ff Account aan Microsoft Outlook toevoegen #microsoft" +"microsoft","irrelevant","126628699402539008","Wed Oct 19 12:00:09 +0000 2011","Betere #presentatie geven? Kijk eens naar de #microsoft #powerpoint #cursussen en #trainingen: http://t.co/fxbrysGk #cursusvoor" +"microsoft","irrelevant","126628570536742912","Wed Oct 19 11:59:38 +0000 2011","http://t.co/1ioM4Fqs O que realmente pode mudar com o Windows 8 #windows8 #microsoft" +"microsoft","irrelevant","126626670181490688","Wed Oct 19 11:52:05 +0000 2011","RT @olafiolio: Hallelujah! RT @sammydekeijne Voor links van Martin #Microsoft zijn QR codes dan weer wel handig..in een publiek zoals di ..." +"microsoft","irrelevant","126626576069693440","Wed Oct 19 11:51:43 +0000 2011","Op weg 020 meeting #microsoft. Ben benieuwd naar stavaza MD traject people managers. Morgen vlieg ik USA voor reünie Class '86!!" +"microsoft","irrelevant","126626327888539648","Wed Oct 19 11:50:43 +0000 2011","Hallelujah! RT @sammydekeijne Voor links van Martin #Microsoft zijn QR codes dan weer wel handig..in een publiek zoals dit althans :) #AUGNL" +"microsoft","irrelevant","126625929215754240","Wed Oct 19 11:49:08 +0000 2011","Voor die links van Martin #Microsoft zijn QR codes dan weer wel handig.. in een publiek zoals dit althans :) #AUGNL" +"microsoft","irrelevant","126625386565087232","Wed Oct 19 11:46:59 +0000 2011","RT @MuyCanal: MuyCanal: Avanade y Ferranti Computer ofrecen un #ERP basado en #Microsoft Dynamics http://t.co/MaghjzcS @msdynamicsspain #ax" +"microsoft","irrelevant","126625317157744640","Wed Oct 19 11:46:42 +0000 2011","#microsoft tablet ziet erg goed uit, wel 3 jaar te laat #adobeusergroup" +"microsoft","irrelevant","126625265928515584","Wed Oct 19 11:46:30 +0000 2011","#OmniTouch - ekran dotykowy na dÅ‚oni od #Microsoft. Czytaj wiÄ™cej @PGSTech http://t.co/Ec6mzWiz" +"microsoft","irrelevant","126624831297949696","Wed Oct 19 11:44:47 +0000 2011","#Apple, #Google, #Microsoft. Um was streiten wir eigentlich? Wer der bessere #Abhördienst ist? Oder was?" +"microsoft","irrelevant","126623895334817792","Wed Oct 19 11:41:03 +0000 2011","RT @zonaredcom: #Microsoft tiene un proyecto secreto para Kinect con las películas de #Pixar como protagonistas: http://t.co/0HhcjEgF" +"microsoft","irrelevant","126622818220785664","Wed Oct 19 11:36:47 +0000 2011","RT @inkiworld: Zo leuk om te zien dat #Microsoft #Apple dingen na maakt en er vernieuwend in denkt te zijn :P #AUGNL" +"microsoft","irrelevant","126622165595459584","Wed Oct 19 11:34:11 +0000 2011","Zo leuk om te zien dat #Microsoft #Apple dingen na maakt en er vernieuwend in denkt te zijn :P #AUGNL" +"microsoft","irrelevant","126622031163822081","Wed Oct 19 11:33:39 +0000 2011","Microsoft verlängert Umsatzgarantien für Suchpartner Yahoo: Yahoos Interims-CEO Tim Morse zufolge sol... http://t.co/Kcetw7RT #microsoft" +"microsoft","irrelevant","126622030006202368","Wed Oct 19 11:33:39 +0000 2011","Microsoft-CEO verspricht schöne Windows-Phone-Geräte: Apple sei erfolgreich, weil schöne Smartphones ... http://t.co/I26cnxh8 #microsoft" +"microsoft","irrelevant","126621969461415936","Wed Oct 19 11:33:24 +0000 2011","@Bob_Om ey bwana! what is? hehehe! Happy birthday my broda enjoy najua mostlikely umelink na manerds @dennisbett @Mwashinsky #Microsoft" +"microsoft","irrelevant","126621883411070976","Wed Oct 19 11:33:04 +0000 2011","E dai uma empresa va me contratar pra ser modelo, e eu serie socio da #Microsoft ou da #Appel" +"microsoft","irrelevant","126621712656760832","Wed Oct 19 11:32:23 +0000 2011","RT @remcovandenhout: Meest comfortabele Touch area voor tablets, volgens onderzoek #Microsoft #augnl 2thumbsUp http://t.co/yPeRGaf1" +"microsoft","irrelevant","126621298272112643","Wed Oct 19 11:30:44 +0000 2011","RT @remcovandenhout: Meest comfortabele Touch area voor tablets, volgens onderzoek #Microsoft #augnl 2thumbsUp http://t.co/ChwcvZLE" +"microsoft","irrelevant","126620982009008129","Wed Oct 19 11:29:30 +0000 2011","Meest comfortabele Touch area voor tablets, volgens onderzoek #Microsoft #augnl 2thumbsUp http://t.co/ChwcvZLE" +"microsoft","irrelevant","126620721236545536","Wed Oct 19 11:28:27 +0000 2011","Nice. Duim navigatie voor de tablet apps :) #augnl #microsoft" +"microsoft","irrelevant","126620532060848129","Wed Oct 19 11:27:41 +0000 2011","#Microsoft en el futuro: #Kinect y su tecnología avanzada http://t.co/JOVFeZRl" +"microsoft","irrelevant","126619975518666752","Wed Oct 19 11:25:29 +0000 2011","#Microsoft y el nuevo #Omnitouch, convierte cualquier superficie en pantalla tactil, http://t.co/TkgcW6y8" +"microsoft","irrelevant","126618143966756864","Wed Oct 19 11:18:12 +0000 2011","@jordanomazzoni E pretendo em breve me certificar nas soluções de virtualização #Microsoft também, #HyperV gera muitas oportuniodades." +"microsoft","irrelevant","126618143098548224","Wed Oct 19 11:18:12 +0000 2011","Mooie mogelijkheden met #Microsoft Surface. Volgende stap: concept naar consument? http://t.co/qnqj5Ho0" +"microsoft","irrelevant","126617262722531328","Wed Oct 19 11:14:42 +0000 2011","Tomorrow a #IPv6 presentatie #NLUUG, #Microsoft dedication to #IPv6" +"microsoft","irrelevant","126616352340447233","Wed Oct 19 11:11:05 +0000 2011","[News] Touchscreen auf allen Oberflächen... http://t.co/gpT4pTr0 #touchscreen #microsoft #omnitouch" +"microsoft","irrelevant","126615874508558336","Wed Oct 19 11:09:11 +0000 2011","Nueva tecnología convierte cualquier superficie en una pantalla multitactil. http://t.co/EDibLL5V #Microsoft #omnitouch" +"microsoft","irrelevant","126615672351498240","Wed Oct 19 11:08:23 +0000 2011","#Avanade staat op 16 in de top 100 financieel krachtigste IT bedrijven! Boven moederbedrijven #Microsoft en #Accenture! http://t.co/z5k7qgux" +"microsoft","irrelevant","126615378976718848","Wed Oct 19 11:07:13 +0000 2011","Christina Hendricks et Sean Faris dans NFS: C'est aujourd'hui qu'Electronic Arts a annoncé q... http://t.co/1wh2g6Kd #Microsoft #Xbox360" +"microsoft","irrelevant","126615034007789569","Wed Oct 19 11:05:51 +0000 2011","Bij #augnl #touchme #microsoft http://t.co/MA84ARZF" +"microsoft","irrelevant","126614513784074240","Wed Oct 19 11:03:47 +0000 2011","ultiem zwaktebod: concurrentie hekelen om je eigen product te pushen. Ballmer: Android is te complex http://t.co/xq0tRp44 #microsoft #fud" +"microsoft","irrelevant","126614370150129664","Wed Oct 19 11:03:12 +0000 2011","Vanmiddag woon ik een #ETTU evenement over zaakgericht werken met #SharePoint2010 bij #microsoft op #schiphol bij" +"microsoft","irrelevant","126614145662599169","Wed Oct 19 11:02:19 +0000 2011","Microsoft entwickelt Sensor für Arbeitskleidung: Es geht nicht etwa um den Grad der Verschmutzung, so... http://t.co/rzTZ9G1P #microsoft" +"microsoft","irrelevant","126614144299446272","Wed Oct 19 11:02:19 +0000 2011","Microsoft startet globale Dachkampagne für Consumer-Produkte: Ende des Monats feiert Microsoft eine M... http://t.co/qMX8opY0 #microsoft" +"microsoft","irrelevant","126614136242180097","Wed Oct 19 11:02:17 +0000 2011","RT @SPIEGELONLINE: Wie ist es #Apple gelungen, seinen größten Rivalen #Microsoft endgültig abzuhängen? Der @guardian erklärt's http://t. ..." +"microsoft","irrelevant","126613470245437440","Wed Oct 19 10:59:38 +0000 2011","RT @pimentacom: #CPqD e #Microsoft testam opção para banda larga. Leia matéria que foi destaque no Brasil Econômico: http://t.co/wImxVTlB" +"microsoft","irrelevant","126613334098325504","Wed Oct 19 10:59:05 +0000 2011","Two more hours for SALE!! + +http://t.co/VqueOskE #ebaymobile #ebay #sale #mac #office #microsoft #apple #2011 #original #new" +"microsoft","irrelevant","126612194594000896","Wed Oct 19 10:54:34 +0000 2011","#Microsoft vi aspetta a #SMAU dal 19 al 21 ottobre http://t.co/lDhDVeWx, venite a trovarci! Siamo al Pad. 4 Stand A23" +"microsoft","irrelevant","126611679961300993","Wed Oct 19 10:52:31 +0000 2011","#Apple es la firma tecnológica más valorada por delante de #Microsoft e #IBM http://t.co/7866c2bL #tecnify" +"microsoft","irrelevant","126611107266834433","Wed Oct 19 10:50:14 +0000 2011","Que raroooo en #microsoft les funciona todo menos el departamento de bajas... Que casualidad!! Veremos mañana" +"microsoft","irrelevant","126610365852303361","Wed Oct 19 10:47:18 +0000 2011","Estoy flipando con #microsoft y #xbox no me dejan quitar mi tarjeta de credito para cobrarme la suscripcion gold!!! Maldito gates!!" +"microsoft","irrelevant","126608861808431107","Wed Oct 19 10:41:19 +0000 2011","Un teclado táctil sobre cualquier superficie · http://t.co/596vTdrm http://t.co/zhyeGWaZ #Microsoft #Tecnologia" +"microsoft","irrelevant","126607587406913536","Wed Oct 19 10:36:15 +0000 2011","Wie ist es #Apple gelungen, seinen größten Rivalen #Microsoft endgültig abzuhängen? Der @guardian erklärt's http://t.co/yE6lT3Bo (CvD)" +"microsoft","irrelevant","126606198911930368","Wed Oct 19 10:30:44 +0000 2011","#Microsoft se réjouit d’avoir loupé le rachat de #Yahoo!" +"microsoft","irrelevant","126606101671186432","Wed Oct 19 10:30:21 +0000 2011","Ballmer: ""Necesitas ser un científico para manejar Android"" http://t.co/7iEeAtNR #Microsoft #Steve_Ballmer #SteveBallmer #Windows_Phone" +"microsoft","irrelevant","126605924273111042","Wed Oct 19 10:29:39 +0000 2011","IMEã«ç§‹ç”°å¼è¾žæ›¸ãªã‚“ã¦ã‚ã£ãŸã‚‰ãŠã‚‚ã—ã‚ã„ã®ã« #Akita #IME #Microsoft" +"microsoft","irrelevant","126604075809771520","Wed Oct 19 10:22:18 +0000 2011","Terminaux Windows 8 empêchant d'installer Linux : la polémique enfle http://t.co/WIBbwi5t #Microsoft" +"microsoft","irrelevant","126601340242767872","Wed Oct 19 10:11:26 +0000 2011","#microsoft satisfait d'avoir rate #yahoo depuis quand les échecs sont des victoires? M #ballmer va falloir laisser la place a un visionnaire" +"microsoft","irrelevant","126599691881299968","Wed Oct 19 10:04:53 +0000 2011","Top 10 de páginas web en las que invertimos más tiempo los españoles http://t.co/By5CAFSD via @trecebits #microsoft #tic" +"microsoft","irrelevant","126599445168144384","Wed Oct 19 10:03:54 +0000 2011","GameFly's 'Under $20' sale has too many good values http://t.co/7eBTsjPW #3ds #deadspace2 #ds #lanoire #microsoft" +"microsoft","irrelevant","126598693351723010","Wed Oct 19 10:00:55 +0000 2011","#FailSteveJobs #Microsoft โดย ARIP ฮาๆ มีสับสนนิดโหน่ย XD cc @failinth =P http://t.co/zPzNMwVe" +"microsoft","irrelevant","126598545062105088","Wed Oct 19 10:00:19 +0000 2011","Microsoft OmniTouch, cuando cualquier superficie puede ser una ""pantalla táctil"" http://t.co/ZvZxxzUl #Microsoft #OmniTouch #evento" +"microsoft","irrelevant","126597416693665793","Wed Oct 19 09:55:50 +0000 2011","Улучшим продукты компании #Microsoft вмеÑте! http://t.co/TheQ6sG9" +"microsoft","irrelevant","126596658929733632","Wed Oct 19 09:52:50 +0000 2011","Plan Carrera #microsoft technical:Certificación desarrollador y diseñador apps empresariales #visualstudio2010 http://t.co/cVd2b2Rz Gratuito" +"microsoft","irrelevant","126596412187226112","Wed Oct 19 09:51:51 +0000 2011","#Job #ICT Sachbearbeiter Immobilienbewirtschaftung (m/w): Gfeller Treuhand und Verwaltungs AG, Dübendorf http://t.co/9ZrHM9sa #Microsoft #IT" +"microsoft","irrelevant","126596388615229441","Wed Oct 19 09:51:45 +0000 2011","#Microsoft dirà che #Google ha usato il brevetto dell'animazione del cerchio?! | Google Presentations New Version http://t.co/vmzshHUQ" +"microsoft","irrelevant","126595424810307584","Wed Oct 19 09:47:55 +0000 2011","RT @cynblackstone: Zeer enthousiaste reacties op bedrijfsbezoek aan #Microsoft voor de (interim) #secretaresse, dank Kelly en Brigitte v ..." +"microsoft","irrelevant","126592053055459328","Wed Oct 19 09:34:32 +0000 2011","Unterstützenswerte Aktion der #fsf+ http://t.co/RCOt2NWi #microsoft-" +"microsoft","irrelevant","126591243294748672","Wed Oct 19 09:31:19 +0000 2011","Steve Ballmer, CEO de #Microsoft ""Necesitas ser un científico para manejar Android"" Gracias majo! Jajjajaa" +"microsoft","irrelevant","126590333520855040","Wed Oct 19 09:27:42 +0000 2011","Un teclado tactil para cualquier superficie.. http://t.co/SQXjkJAo #microsoft" +"microsoft","irrelevant","126590035314229249","Wed Oct 19 09:26:31 +0000 2011","#Microsoft tester browser sikkerhet. Rart hvordan #IE9 plutselig ble best og langt bedre enn #Chrome og #Firefox. http://t.co/dUthF0nO" +"microsoft","irrelevant","126589888266108929","Wed Oct 19 09:25:55 +0000 2011","Wie gut, dass #Google beim #Nexus und #Android4 den selben Fehler macht wie #Microsoft. Konzentration auf Spaß, nicht auf's Business ... ^.^" +"microsoft","irrelevant","126589139150839808","Wed Oct 19 09:22:57 +0000 2011","RT @NielsMoelard: Ik zoek een paar nieuwe collega's. Interesse? Kijk op onze site voor actuele #Microsoft #vacatures bit.ly/phUYrg RT = ..." +"microsoft","irrelevant","126589085304369152","Wed Oct 19 09:22:44 +0000 2011","Ik zoek een paar nieuwe collega's. Interesse? Kijk op onze site voor actuele #Microsoft #vacatures bit.ly/phUYrg RT = fijn #Zwolle" +"microsoft","irrelevant","126588920958955521","Wed Oct 19 09:22:05 +0000 2011","Il n'y a pas qu'à la maison que #windows plante :D http://t.co/TYh0AbxL #bug #BSOD #microsoft #plantage #lieuxpublics" +"microsoft","irrelevant","126588570961068032","Wed Oct 19 09:20:41 +0000 2011","Gibt es nun auch Sicherheitslücken bei #Microsoft? Mehrere Seiten berichten über gehackte Xbox-Live-Accounts: http://t.co/4GZ6fVkU #xbox" +"microsoft","irrelevant","126586819713310720","Wed Oct 19 09:13:44 +0000 2011","Pozdrowienia z sali A #mts2011 #Microsoft #surface obecnie http://t.co/QfQkHAYd" +"microsoft","irrelevant","126586599772389376","Wed Oct 19 09:12:51 +0000 2011","sunt la http://t.co/x6icubdz #microsoft #events" +"microsoft","irrelevant","126586563147743232","Wed Oct 19 09:12:43 +0000 2011","RT @tinkengil: Ähm ja, genau #Microsoft. http://t.co/lQjuq0s7 - hatte ich auch schon ^^" +"microsoft","irrelevant","126585997814280192","Wed Oct 19 09:10:28 +0000 2011","RT @fdruel: Une nouvelle interface #haptique signée #Microsoft et #CanergieMellon : #OmniTouch. Tactile et ""wearable"" (cc @Billaut) http ..." +"microsoft","irrelevant","126585826955104256","Wed Oct 19 09:09:47 +0000 2011","Conoce Microsoft Dynamics Nav, Software una completa gestión integrada de tu negocio http://t.co/oK70bVgk #navision #microsoft" +"microsoft","irrelevant","126585200355454976","Wed Oct 19 09:07:18 +0000 2011","Une nouvelle interface #haptique signée #Microsoft et #CanergieMellon : #OmniTouch. Tactile et ""wearable"" (cc @Billaut) http://t.co/6ODVjEQG" +"microsoft","irrelevant","126583539662733312","Wed Oct 19 09:00:42 +0000 2011","Nieuw partnership #DigiProfs , #Microsoft en #IP-Randsteden voor #Office365 . 2000 Gratis intrductie cursussen ter beschikking." +"microsoft","irrelevant","126583374096764928","Wed Oct 19 09:00:02 +0000 2011","RT @bestrelations: ¿Por qué necesito jugar al golf los domingos? http://t.co/hvoEIp2Z Muy estilo USA, pero ingeniosa campaña #Microsoft" +"microsoft","irrelevant","126582476121444352","Wed Oct 19 08:56:28 +0000 2011","Povidani s #Microsoft o #Azure. V Evrope uz je data centrum i v Amsterodamu, chysta se v Nemecku a Rusku. V kazdem pres 250 000 serveru." +"microsoft","irrelevant","126581464052678656","Wed Oct 19 08:52:27 +0000 2011","Windows Phone News: Nokia, Sony und kommende Updates - Windows-News: http://t.co/BlU7a24D #wp7 #nokia #microsoft" +"microsoft","irrelevant","126579540070907904","Wed Oct 19 08:44:48 +0000 2011","CEO Microsoft: Kami Beruntung Tak Akuisisi Yahoo http://t.co/Viy6W6Z0 + #Microsoft" +"microsoft","irrelevant","126579035093479425","Wed Oct 19 08:42:48 +0000 2011","Wenn #Microsoft eine touchfähige #Office Suite für iOS rausbringen würde, wäre ich sehr entzückt!" +"microsoft","irrelevant","126578736148652032","Wed Oct 19 08:41:37 +0000 2011","#Job #ICT Windows Client Engineering im IT Forschungsumfeld!: GSI Consultants, Zürich http://t.co/0OQ1qvEn #Microsoft #IT" +"microsoft","irrelevant","126577183060799488","Wed Oct 19 08:35:26 +0000 2011","RT @ITOostNederland: #GripmetICT, 10 nov in de #GrolschVeste met oa #Citrix, #VMware, #Microsoft, #RES, #Dell. Meer info kijk op: http:/ ..." +"microsoft","irrelevant","126576827476094976","Wed Oct 19 08:34:02 +0000 2011","#Forbes fordert Steve Ballmer zum Rücktritt auf http://t.co/kpO2TPt9 via @forbes #microsoft" +"microsoft","irrelevant","126576629395898368","Wed Oct 19 08:33:14 +0000 2011","Management-Tool aus der Cloud: #Microsoft erweitert #Intune um Softwareverteilung und Remote Task http://t.co/b43OHRlQ @windows" +"microsoft","irrelevant","126576294359072768","Wed Oct 19 08:31:54 +0000 2011","RT @MichaelKroker: Kroker's Look @ IT: Der #Apple-Schock im PC-Markt - mit unerwartetem Potenzial für #Microsoft --> http://t.co/29vCUgQz" +"microsoft","irrelevant","126575943706877953","Wed Oct 19 08:30:31 +0000 2011","Kroker's Look @ IT: Der #Apple-Schock im PC-Markt - mit unerwartetem Potenzial für #Microsoft --> http://t.co/29vCUgQz" +"microsoft","irrelevant","126575853818744832","Wed Oct 19 08:30:09 +0000 2011","#grantthornton geeft gouden tips #verkoop #onderneming. Ook workshops #philips #nobel #rabobank #microsoft #port4growth http://t.co/sfciksak" +"microsoft","irrelevant","126575680585596928","Wed Oct 19 08:29:28 +0000 2011","#Microsoft ruft #Cloud 4 Society Award ins Leben http://t.co/96qcjhlI via @searchsoftware" +"microsoft","irrelevant","126575637942120448","Wed Oct 19 08:29:18 +0000 2011","RT @hectorsm: Qué prioridades tenemos como Pais? Plan Nacional: http://t.co/paeNlAnn #in #microsoft" +"microsoft","irrelevant","126575368692957184","Wed Oct 19 08:28:14 +0000 2011","Oef..#Microsoft trapt Google Toolbar uit #Skype http://t.co/m2..." +"microsoft","irrelevant","126575205048000512","Wed Oct 19 08:27:35 +0000 2011","#Microsoft trapt Google Toolbar uit #Skype http://t.co/m2jCbeJI #googletoolbar" +"microsoft","irrelevant","126574756307808256","Wed Oct 19 08:25:48 +0000 2011","Según el estudio KAR de Ipsos, #Apple es la firma tecnológica más valorada por delante de #Microsoft, #HP e #IBM http://t.co/qlyQbRyd" +"microsoft","irrelevant","126574310176468992","Wed Oct 19 08:24:01 +0000 2011","Zeer enthousiaste reacties op bedrijfsbezoek aan #Microsoft voor de (interim) #secretaresse, dank Kelly en Brigitte voor de goede ontvangst!" +"microsoft","irrelevant","126573880285466625","Wed Oct 19 08:22:19 +0000 2011","En España, se pasa más tiempo en sites de #Microsoft que en #Facebook y que en sites de #Google... http://t.co/zXYauNAq via @TreceBits" +"microsoft","irrelevant","126573790980358145","Wed Oct 19 08:21:58 +0000 2011","#SPOED! Op zoek naar een #Support #Engineer! #HBO #Microsoft #sharepoint #SQL! Ben jij analytisch/klantgerich… (cont) http://t.co/5VqqQABY" +"microsoft","irrelevant","126573784961527808","Wed Oct 19 08:21:56 +0000 2011","#SPOED! Op zoek naar een #Support #Engineer! #HBO #Microsoft #sharepoint #SQL! Ben jij analytisch/klantgerich… (cont) http://t.co/RCaS7Kph" +"microsoft","irrelevant","126573378302783488","Wed Oct 19 08:20:19 +0000 2011","La démo de Dance Central 2 est disponible http://t.co/4iA3k1Ab #Microsoft #Xbox360 #jeux #demo" +"microsoft","irrelevant","126572846272086016","Wed Oct 19 08:18:12 +0000 2011","HATAHET zeigt Microsoft-Partnern die neue Welt der Arbeit: Die ""Neue Welt der Arbeit"" oder ""das neue ... http://t.co/Jk1pXcya #microsoft" +"microsoft","irrelevant","126570427479896064","Wed Oct 19 08:08:36 +0000 2011","#Job #ICT SAP Basis Administrator - SAP Platform & SOA: BROMsolutions AG , Zürich http://t.co/lrut8lhu #Microsoft #IT" +"microsoft","irrelevant","126569812120973312","Wed Oct 19 08:06:09 +0000 2011","#GripmetICT, 10 nov in de #GrolschVeste met oa #Citrix, #VMware, #Microsoft, #RES, #Dell. Meer info kijk op: http://t.co/O78JjLC1" +"microsoft","irrelevant","126568272819793920","Wed Oct 19 08:00:02 +0000 2011","Nos vidéos de Batman Arkham City http://t.co/07Pau5TJ #sony #microsoft #buzz" +"microsoft","irrelevant","126566960979914752","Wed Oct 19 07:54:49 +0000 2011","Outlook Connectorをインストールã™ã‚‹ã¨Outlook2007ã§hotmailアカウントãŒåˆ©ç”¨ã§ãる→ブラウザを立ã¡ä¸Šã’ãªãã¦ã‚‚メールを閲覧ã§ãã‚‹ #Microsoft #Office #Outlook" +"microsoft","irrelevant","126566043928895489","Wed Oct 19 07:51:11 +0000 2011","Workshop med @technet idag. Interessant! #citrix #Microsoft #SYSIKT" +"microsoft","irrelevant","126565939075497984","Wed Oct 19 07:50:46 +0000 2011","seit #apple #facebook und #google die großen bösen sind, ist #microsoft irgendwie wieder der sympathische nerdverein http://t.co/IUvizP0T" +"microsoft","irrelevant","126565570740101120","Wed Oct 19 07:49:18 +0000 2011","Excel ã¯è¡¨è¨ˆç®—ã ã‘ã§ãªããƒãƒ£ãƒ¼ãƒˆå›³ã‚„見栄ãˆã®ã‚ˆã„文書ã€å›³å½¢ã®ä½œæˆãªã©ã«ã‚‚利用ã§ãる。 #Microsoft #Office" +"microsoft","irrelevant","126565301801320448","Wed Oct 19 07:48:14 +0000 2011","RT @druncks: In de smartphonewereld lopen 57 patentrechtszaken. De grote winnaar lijkt #Microsoft. http://t.co/m7oqSFhA #patent #depers" +"microsoft","irrelevant","126564330933194752","Wed Oct 19 07:44:22 +0000 2011","Taper sur les autres pour dire qu'on est les meilleurs... Merci #microsoft #msday" +"microsoft","irrelevant","126564298209247232","Wed Oct 19 07:44:14 +0000 2011","RT @MicrosoftSrbija: Andy Malone na #mssinergija keynoteu vodi prisutne kroz #microsoft istoriju: http://t.co/OIQFwJ4f" +"microsoft","irrelevant","126564244329218048","Wed Oct 19 07:44:01 +0000 2011","Andy Malone na #mssinergija keynoteu vodi prisutne kroz #microsoft istoriju: http://t.co/OIQFwJ4f" +"microsoft","irrelevant","126563471662915584","Wed Oct 19 07:40:57 +0000 2011","RT @RomanKrd: Ðа Microsoft Management Summit 2011 проблемы Ñ Ð¸Ð½Ñ‚ÐµÑ€Ð½ÐµÑ‚ #russia #microsoft" +"microsoft","irrelevant","126563328213516288","Wed Oct 19 07:40:23 +0000 2011","Estudio: Microsoft vende más http://t.co/1VfC5PH1 #microsoft #tecnologia #tic #IT" +"microsoft","irrelevant","126561890825543680","Wed Oct 19 07:34:40 +0000 2011","Microsoft desarrolla una pantalla táctil proyectada sobre cualquier superficie http://t.co/Mg0iFSAo #microsoft #tecnologia #tic #omnitouch" +"microsoft","irrelevant","126560506097049600","Wed Oct 19 07:29:10 +0000 2011","RT @arelance: #Microsoft presenta un prototipo que permite utilizar la mano como interfaz http://t.co/kaCzpMYo #Kinect #in #fb" +"microsoft","irrelevant","126559394136723456","Wed Oct 19 07:24:45 +0000 2011","Rijk worden met patenten. In de smartphonewereld lopen 57 patentrechtszaken. De grote winnaar lijkt #Microsoft. http://t.co/owSkWPKG #patent" +"microsoft","irrelevant","126558250194829312","Wed Oct 19 07:20:12 +0000 2011","Check this site +http://t.co/1Sh9T5u8 +For best soft weres. +#microsoft +#Prince_156" +"microsoft","irrelevant","126556805877858306","Wed Oct 19 07:14:28 +0000 2011","Microsoft Research dévoile une interface futuriste avec Kinect | via @Xboxygen.com http://t.co/sRb0qLRA #Microsoft #MS #Kinect" +"microsoft","irrelevant","126556628173598720","Wed Oct 19 07:13:46 +0000 2011","#Microsoft #Silverlight v4.0.60831 & v5.0.60818 RC | fanSte.nl http://t.co/b4IRd1qa via @pryourblog" +"microsoft","irrelevant","126555304212176897","Wed Oct 19 07:08:30 +0000 2011","#Microsoft: Jetzt wird jede Hand zum Touch-Screen http://t.co/46F2PM7v #OmniTouch" +"microsoft","irrelevant","126553180711559169","Wed Oct 19 07:00:04 +0000 2011","#Skype ya es oficialmente parte de #Microsoft ¿Utilizas este servicio #voIP? ¿Que opinión te merece esta adquisición? #tecnologia" +"microsoft","irrelevant","126553028730953730","Wed Oct 19 06:59:27 +0000 2011","Definitivní: #Skype pod #Microsoft. Co to pro nás znamená? - http://t.co/L5f00xew" +"microsoft","irrelevant","126550811894480896","Wed Oct 19 06:50:39 +0000 2011","Je moet er wel een mechanische papegaai voor op je schouder hebben, maar dan heb je ook wat! Erg gaaf! #microsoft http://t.co/sBPd8Nbg" +"microsoft","irrelevant","126549243061207040","Wed Oct 19 06:44:25 +0000 2011","Mit Ihrem Feedback #TechNet verbessern: http://t.co/vGCoTK8z #microsoft" +"microsoft","irrelevant","126547733359230976","Wed Oct 19 06:38:25 +0000 2011","Microsoft Security Essentialã®å®šç¾©ãƒ•ァイルãŒã‚¢ãƒƒãƒ—デートã—ã¾ã—ãŸï¼š1.115.68.0 > http://t.co/lZde20Vy #Microsoft" +"microsoft","irrelevant","126547233473691649","Wed Oct 19 06:36:26 +0000 2011","Nieuwe blogpost:Topsport: http://t.co/Fz5qx2cg +#datacenter #privatecloud #microsoft" +"microsoft","irrelevant","126546908142501888","Wed Oct 19 06:35:08 +0000 2011","#game #microsoft #gamer #new experience #halo 2 #microsoft insider #halo 2 hacks #beta test team http://t.co/uk8sW8HZ Halo 2 Hacks" +"microsoft","irrelevant","126546297082748928","Wed Oct 19 06:32:42 +0000 2011","Windows Phone: Microsoft bestätigt Fehler nach Update: Abstürze beim Musikgenuss und eine verschwunde... http://t.co/hbK4Fxjk #microsoft" +"microsoft","irrelevant","126546080384040960","Wed Oct 19 06:31:51 +0000 2011","#Microsoft e #Adobe i più danneggiati dalla #pirateria http://t.co/GLCjzqXG #mercato" +"microsoft","irrelevant","126544902107570176","Wed Oct 19 06:27:10 +0000 2011","NAVIGE in TerDege Nieuw 17 opgenomen in rubriek Personalia en Bedrijven als zelfstandig ondernemer #Microsoft #Dynamics…http://t.co/TAILFVUV" +"microsoft","irrelevant","126540569844523008","Wed Oct 19 06:09:57 +0000 2011","Microsoft ha migliorato la ricerca per la barra d’avvio di Windows 8: Windows 8 abbatterà... http://t.co/9LEIev0n #windows #microsoft" +"microsoft","irrelevant","126539960890306560","Wed Oct 19 06:07:32 +0000 2011","@torbenleuschner die haben was an der Waffel #ie #microsoft #wunschdenken" +"microsoft","irrelevant","126536951984689152","Wed Oct 19 05:55:34 +0000 2011","#Microsoft vend des licences d’utilisation pour des softs qui ne lui appartiennent pas. : http://t.co/bc6v9hRT #korben" +"microsoft","irrelevant","126532894272397312","Wed Oct 19 05:39:27 +0000 2011","http://t.co/k3quwgcu #url unblocker #microsoft proxy #proxy for orkut #gmail proxy microsoft proxy server unblocked proxy site freega..." +"microsoft","irrelevant","126531176243539968","Wed Oct 19 05:32:37 +0000 2011","#Novell wärmt alten Rechtsstreit mit #Microsoft auf | ITespresso.de http://t.co/JBUyD6ou" +"microsoft","irrelevant","126530398317592576","Wed Oct 19 05:29:32 +0000 2011","RT @packard_bell: Nuevo #Microsoft #PocketTouch (para usar el telefono sin sacarlo del bolsillo) http://t.co/0LdAVDnj" +"microsoft","irrelevant","126530235402424322","Wed Oct 19 05:28:53 +0000 2011","#Windows 8: #Microsoft schafft den Startbildschirm ab... http://t.co/06vsmKdm" +"microsoft","irrelevant","126527536313278465","Wed Oct 19 05:18:10 +0000 2011","RT @ricklennie: Un vistazo a Windows Server 8 http://t.co/BmcJLgDV #Microsoft #MSDP" +"microsoft","irrelevant","126526431298723841","Wed Oct 19 05:13:46 +0000 2011","#Microsoft portiert #Enterprise BI-Apps auf iOS und Android | ITespresso.de http://t.co/XUEPaaR2" +"microsoft","irrelevant","126523675364171778","Wed Oct 19 05:02:49 +0000 2011","RT @puntogeek: Hace tiempo:: Según Microsoft, Firefox es spyware http://t.co/UnFkqPaJ #Firefox #Microsoft" +"microsoft","irrelevant","126523554568224771","Wed Oct 19 05:02:20 +0000 2011","Hace tiempo:: Según Microsoft, Firefox es spyware http://t.co/UnFkqPaJ #Firefox #Microsoft" +"microsoft","irrelevant","126518913294020608","Wed Oct 19 04:43:54 +0000 2011","Un vistazo a Windows Server 8 http://t.co/BmcJLgDV #Microsoft #MSDP" +"microsoft","irrelevant","126518577263153152","Wed Oct 19 04:42:34 +0000 2011","FOR SALE!! + +http://t.co/VqueOskE #ebaymobile #ebay #microsoft #apple #office #2011 #sale #new #original #mac" +"microsoft","irrelevant","126506168184078336","Wed Oct 19 03:53:15 +0000 2011","#Google è·Ÿ #Samsung 今天發佈使用 #Android 4.0 作業系統的 #Galaxy #Nexus … æŽ¥ä¸‹ä¾†ï¼Œè˜‹æžœåŠ #Microsoft 會告兩者侵權嗎?" +"microsoft","irrelevant","126506057613848576","Wed Oct 19 03:52:49 +0000 2011","no es mucha novedad que hotmail este down todos estos dias #Microsoft" +"microsoft","irrelevant","126505970317787136","Wed Oct 19 03:52:28 +0000 2011","Parece que hotmail is down #Microsoft" +"microsoft","irrelevant","126500518515310592","Wed Oct 19 03:30:48 +0000 2011","RT @packard_bell: #Microsoft presenta un teclado #touch sobre cualquier superficie http://t.co/mU1FdIhF" +"microsoft","irrelevant","126500332078505985","Wed Oct 19 03:30:04 +0000 2011","#Microsoft presenta un teclado #touch sobre cualquier superficie http://t.co/mU1FdIhF" +"microsoft","irrelevant","126495306681548800","Wed Oct 19 03:10:05 +0000 2011","At the #Microsoft #imaginecup sri lanka in Peradeniya." +"microsoft","irrelevant","126491523020898304","Wed Oct 19 02:55:03 +0000 2011","RT @deolindoo: viva #Microsoft o #google para e copiar a #apple e copia o #wp7 de tao bom que é. Incrivel como a interface metro esta ali," +"microsoft","irrelevant","126489614272827392","Wed Oct 19 02:47:28 +0000 2011","Nueva #Consola #Xbox en curso: #LinkedIn http://t.co/VUzmZEzv #Microsoft #Xbox360 #Xbox720 #Twitter via @ZthaeDigital" +"microsoft","irrelevant","126488920329433088","Wed Oct 19 02:44:43 +0000 2011","Official plush Portal turrets deploying this December http://t.co/M7h8Orkc #mac #microsoft #pc #playstation" +"microsoft","irrelevant","126487014957785088","Wed Oct 19 02:37:09 +0000 2011","#Job #ICT System- und Netzwerk Spezialist/in: planova human capital ag, St. Gallen http://t.co/WYHjUOqr #Microsoft #IT" +"microsoft","irrelevant","126485491238436866","Wed Oct 19 02:31:05 +0000 2011","este hueon no cacha que hay mouses que funcionan sobre cualquier superficie #microsoft @clubdelacomedia" +"microsoft","irrelevant","126484927649820673","Wed Oct 19 02:28:51 +0000 2011","#Alunos, vcs podem baixar a versão de avaliação do #Windows7 #Enterprise, de graça, diretamente da #Microsoft...Vejam - http://t.co/1rXnEpYl" +"twitter","positive","126883590041640960","Thu Oct 20 04:52:59 +0000 2011","i love my background!! #twitter" +"twitter","positive","126883448173510656","Thu Oct 20 04:52:26 +0000 2011","•Take Off Make up +• Say A Little More On #twitter +• Go to Sleep <3 + +like my plan?(;" +"twitter","positive","126883416280006656","Thu Oct 20 04:52:18 +0000 2011","Okay..I feel slightly more connected to the world and have put #facebook and #twitter on my #blackberry..its not the same..but it helps" +"twitter","positive","126883364887203840","Thu Oct 20 04:52:06 +0000 2011","@loveguru_b i miss my #twitter boo" +"twitter","positive","126883290782244864","Thu Oct 20 04:51:48 +0000 2011","*singing*! everytime I try to leave something keeps pulling me back (me back) telling me I need #Twitter & all that. lol (:" +"twitter","positive","126883211006590976","Thu Oct 20 04:51:29 +0000 2011","#twitter keeps me company when i can't sleep! (:" +"twitter","positive","126883187300384768","Thu Oct 20 04:51:23 +0000 2011","Got this new #twitter app and a fresh ass theme with it! FWM" +"twitter","positive","126882971411165185","Thu Oct 20 04:50:32 +0000 2011","@ammmylewis... Did @CarsonFratus just ditch Twitter for sleep?? #Weirdo... #Twitter>Sleep" +"twitter","positive","126882662932692992","Thu Oct 20 04:49:18 +0000 2011","I'm starting to get really concerned, sending hashtags in emails :P #twitter is taking over our lives :D" +"twitter","positive","126881835463614464","Thu Oct 20 04:46:01 +0000 2011","I cnt get over that shit lol hell naw #Twitter" +"twitter","positive","126881080178507776","Thu Oct 20 04:43:01 +0000 2011","Today I was introduced as BigDealDawson at #LGFW ! O #twitter and #social media I love you! Teehee xx" +"twitter","positive","126880912754475008","Thu Oct 20 04:42:21 +0000 2011","#Twitter > #Facebook" +"twitter","positive","126880559162077184","Thu Oct 20 04:40:57 +0000 2011","@BrianCrockerJr yeah, it shows. glad you have #twitter :)" +"twitter","positive","126880385605976064","Thu Oct 20 04:40:15 +0000 2011","I've pretty much abandoned Facebook for Twitter. #twitter'slegit" +"twitter","positive","126879785908580352","Thu Oct 20 04:37:52 +0000 2011","Gotta love #Twitter - shit goes round the World like lightning-on-speed..." +"twitter","positive","126879662851887104","Thu Oct 20 04:37:23 +0000 2011","@LuCkY_2_HaVe #Twitter" +"twitter","positive","126878670685085696","Thu Oct 20 04:33:27 +0000 2011","Hello #twitter!" +"twitter","positive","126877750131818497","Thu Oct 20 04:29:47 +0000 2011","In that #twitter mood!" +"twitter","positive","126877362632667136","Thu Oct 20 04:28:15 +0000 2011","When I say I'm going to bed that really means I'm gonna sit on twitter for 30 min then go to bed! #Twitter<3" +"twitter","positive","126877263311536128","Thu Oct 20 04:27:51 +0000 2011","I lovee #Twitter #swaag!" +"twitter","positive","126877209813188608","Thu Oct 20 04:27:38 +0000 2011","Dear #Twitter, I fucking missed you today, >> internet was down :/" +"twitter","positive","126877171926040576","Thu Oct 20 04:27:29 +0000 2011","My 100 tweet goes to #twitter . Thanks for providing me with another way of keeping me busy during school!" +"twitter","positive","126877056578486272","Thu Oct 20 04:27:02 +0000 2011","#Twitter TEAMO♥ Chao!" +"twitter","positive","126876600083025920","Thu Oct 20 04:25:13 +0000 2011","@Garr_Williams isn't this place great? +#twitter" +"twitter","positive","126876125107462144","Thu Oct 20 04:23:20 +0000 2011","RT @EverettColdwell: @SusanFelicity One really good thing with #twitter is that most people appear to be on the progressive, left, socia ..." +"twitter","positive","126876107881455616","Thu Oct 20 04:23:16 +0000 2011","RT @MattMarcheski: @AlexMufferi #twitter>facebook" +"twitter","positive","126875441217798144","Thu Oct 20 04:20:37 +0000 2011","There is no social media without #Twitter... Who could be #Twitterless?" +"twitter","positive","126875378013843456","Thu Oct 20 04:20:22 +0000 2011","@AlexMufferi #twitter>facebook" +"twitter","positive","126874748469788672","Thu Oct 20 04:17:51 +0000 2011","Guess I'm addicted to #Twitter! Writing the answers to 15 marks University exam questions in less than 140 characters! :P" +"twitter","positive","126874346873556993","Thu Oct 20 04:16:16 +0000 2011","@SusanFelicity One really good thing with #twitter is that most people appear to be on the progressive, left, social side. This scares gov." +"twitter","positive","126873707066048513","Thu Oct 20 04:13:43 +0000 2011","Apples to oranges maybe, but #Twitter is way more engaging than #FB. Content vs. Stalkerismo" +"twitter","positive","126873518385274882","Thu Oct 20 04:12:58 +0000 2011","Laying down on my bed and I'm on my favorite app. #twitternation #twitter > #facebook" +"twitter","positive","126873128348561409","Thu Oct 20 04:11:25 +0000 2011","@TheMarketaire This whole #Facebook change makes more nervous than excited. What about privacy, etc.? I much prefer #Twitter" +"twitter","positive","126872791197814784","Thu Oct 20 04:10:05 +0000 2011","#Twitter #twitter :)" +"twitter","positive","126872175490764802","Thu Oct 20 04:07:38 +0000 2011","#Twitter <33333" +"twitter","positive","126870551032643584","Thu Oct 20 04:01:11 +0000 2011","RT@jeffbullas Twitters impressive numbers http://t.co/VJRSsfYn #Twitter #SMM #SocialMedia" +"twitter","positive","126870402751397889","Thu Oct 20 04:00:35 +0000 2011","fuck that facebook bullshit. #twitter bitch" +"twitter","positive","126869964144644097","Thu Oct 20 03:58:51 +0000 2011","#Twitter is hella coo... like farreal i love twitter." +"twitter","positive","126869134238679042","Thu Oct 20 03:55:33 +0000 2011","FUCK #facebook #twitter is way better #teamfollowback #followme" +"twitter","positive","126868475892338688","Thu Oct 20 03:52:56 +0000 2011","Haven't been on this Twitter shit in a while man. Here because looks like #AboutThatLife be haven hella fun on #Twitter" +"twitter","positive","126868330098331648","Thu Oct 20 03:52:21 +0000 2011","twitter just helped me find the song from the end of a TV show i just watched. #Twitter > ..." +"twitter","positive","126867350476697601","Thu Oct 20 03:48:28 +0000 2011","#twitter addict :)" +"twitter","positive","126865422174785536","Thu Oct 20 03:40:48 +0000 2011","RT @drewbezanson: You can literally go back and forth between #Facebook, #twitter, #text and #email all do. #technology is too good." +"twitter","positive","126864056366804992","Thu Oct 20 03:35:22 +0000 2011","@tas82011 Thnx. Isn't #Twitter pretty damn amazing! Hope yr enjoying it. Fast like you and I!" +"twitter","positive","126863821594832897","Thu Oct 20 03:34:26 +0000 2011","Dear #twitter I have missed you. Promise i'll keep in touch more often" +"twitter","positive","126863766334873600","Thu Oct 20 03:34:13 +0000 2011","#Twitter is interesting 24hrs a day. #Facebook is interesing 2hrs a day." +"twitter","positive","126862820578050048","Thu Oct 20 03:30:28 +0000 2011","RT @derryXOwners: I like #twitter" +"twitter","positive","126862735953768448","Thu Oct 20 03:30:07 +0000 2011","But fb still sucks & #twitter beats it anyday!" +"twitter","positive","126862443275235328","Thu Oct 20 03:28:58 +0000 2011","I love #twitter #twitterswag" +"twitter","positive","126862309497905152","Thu Oct 20 03:28:26 +0000 2011","I like #twitter" +"twitter","positive","126862124201947136","Thu Oct 20 03:27:42 +0000 2011","This twitter shit is funny and entertaining ... I haven't did shit all day but my fingers are hurting #twitter" +"twitter","positive","126861364227608577","Thu Oct 20 03:24:40 +0000 2011","RT @TreyHob21: #twitter gives everyone that voice that they believe should be heard by people... unlike real life... lol" +"twitter","positive","126860944352612353","Thu Oct 20 03:23:00 +0000 2011","#twitter i love you!" +"twitter","positive","126860415085973504","Thu Oct 20 03:20:54 +0000 2011","@KylieMessier yeahh you did! It was about time. Caught the #twitter bug and now i'm hooked!" +"twitter","positive","126860046981279744","Thu Oct 20 03:19:26 +0000 2011","@science_alex hashtags are used here #twitter>facebook" +"twitter","positive","126859858443112449","Thu Oct 20 03:18:41 +0000 2011","I keep forgettin how much i really like #Twitter lol" +"twitter","positive","126859371094360064","Thu Oct 20 03:16:45 +0000 2011","#twitter :)" +"twitter","positive","126858606695030784","Thu Oct 20 03:13:43 +0000 2011","#facebook, #Twitter , #spongebob, #nirvana. Great way to spend the night. :D" +"twitter","positive","126858477942476800","Thu Oct 20 03:13:12 +0000 2011","RT @ahhfuckitsguss: #twitter can be so useful on letting out your feeling when you cant in the real world ." +"twitter","positive","126858393909608448","Thu Oct 20 03:12:52 +0000 2011","My 3 biggest obsessions: #twitter, #dancemoms, and #desperatehousewives. <3" +"twitter","positive","126857095088840706","Thu Oct 20 03:07:43 +0000 2011","#twitter is jumpin as usual :)" +"twitter","positive","126857082199744513","Thu Oct 20 03:07:39 +0000 2011","My Facebook messed up and I had to make a new one so... add me! Haha at least #twitter is reliable" +"twitter","negative","126883562652844033","Thu Oct 20 04:52:53 +0000 2011","#Twitter y #Facebook OFF" +"twitter","negative","126883300227817472","Thu Oct 20 04:51:51 +0000 2011","#Twitter are you freaking kidding me #wth... http://t.co/zKn2bu5R" +"twitter","negative","126882964582838272","Thu Oct 20 04:50:30 +0000 2011","RT @mainey_maine: RT @ItalianJoya i better be able to see my RT's tomorrow #twitter and tell that lil blue ass bird, (cont) http://t.co/ ..." +"twitter","negative","126882934568390656","Thu Oct 20 04:50:23 +0000 2011","Just hit my hourly usage limit on #twitter. How does that even happen? All I'm doing is listing people...and I was almost done! #ugh" +"twitter","negative","126882761733705728","Thu Oct 20 04:49:42 +0000 2011","RT @FuckingShinez: #Twitter = #Dead ""this is why im never on it now""" +"twitter","negative","126881698783834112","Thu Oct 20 04:45:28 +0000 2011","I hate #twitter right now.." +"twitter","negative","126881658854064128","Thu Oct 20 04:45:19 +0000 2011","I'm not getting all of my @ replies #twitter if it wasn't for my emails telling me that some1 replied to me I wouldn't even know it #nobueno" +"twitter","negative","126881376074076161","Thu Oct 20 04:44:12 +0000 2011","Wtf is a tweet , sounds like tha dam cartoon network #tweeted #tweeter #twitter #twitterer" +"twitter","negative","126881010301419520","Thu Oct 20 04:42:44 +0000 2011","#TWITTER SUCKS FOR HAVING A #FOLLOW LIMIT =\" +"twitter","negative","126880978185625600","Thu Oct 20 04:42:37 +0000 2011","@ZackSiezmagraff A few people are reporting #twitter retweets not working. Probably a technical problem." +"twitter","negative","126880813991202816","Thu Oct 20 04:41:58 +0000 2011","So my mentions don't work? And my TL don't be wanting to update. Is #twitter moving servers or doing maintenance?" +"twitter","negative","126879988602519552","Thu Oct 20 04:38:41 +0000 2011","Well... @Paging_Dr_A has gotten back on #twitter.. there goes my TL lol" +"twitter","negative","126879964619485185","Thu Oct 20 04:38:35 +0000 2011","Anyone in #twitter land know if retweets are broken? Haven't seen any since Tuesday." +"twitter","negative","126878518310223874","Thu Oct 20 04:32:50 +0000 2011","RT @ItalianJoya i better be able to see my RT's tomorrow #twitter and tell that lil blue ass bird, (cont) http://t.co/xGHoev8k" +"twitter","negative","126878448575717376","Thu Oct 20 04:32:34 +0000 2011","RT @ItalianJoya: i better be able to see my RT's tomorrow #twitter and tell that lil blue ass bird, and fat ass over capacity whale to!" +"twitter","negative","126877484271665152","Thu Oct 20 04:28:44 +0000 2011","#Twitter ain't showing my current mentions or my tweets retweeted #Ughh" +"twitter","negative","126877335399051264","Thu Oct 20 04:28:08 +0000 2011","i better be able to see my RT's tomorrow #twitter and tell that lil blue ass bird, and fat ass over capacity whale to!" +"twitter","negative","126877245347348480","Thu Oct 20 04:27:47 +0000 2011","#Twitter, will you please fix this RT problem?" +"twitter","negative","126877135926337537","Thu Oct 20 04:27:21 +0000 2011","#Twitter = #Dead ""this is why im never on it now""" +"twitter","negative","126876956443688960","Thu Oct 20 04:26:38 +0000 2011","Im gonna need #Twitter to get all these problems fixed ASAP." +"twitter","negative","126876682207502336","Thu Oct 20 04:25:32 +0000 2011","#Twitter'sMalfunctioningAgain" +"twitter","negative","126876493153452032","Thu Oct 20 04:24:47 +0000 2011","#Twitter is blowing me ; I can't see my retweets ;(" +"twitter","negative","126876140269862912","Thu Oct 20 04:23:23 +0000 2011","Man #Twitter Is Gettin #BORING" +"twitter","negative","126876046028050432","Thu Oct 20 04:23:01 +0000 2011","#twitter app also doesn't show me the conversation when I touch a tweet." +"twitter","negative","126875653210521600","Thu Oct 20 04:21:27 +0000 2011","Anyone else having trouble with the (i)Pad #twitter app not updating @'s? I get notified but can't see them in the app." +"twitter","negative","126874389210861568","Thu Oct 20 04:16:26 +0000 2011","@Licia0321 Damn what the hell, I think my #Twitter might need a damn #Exorcist!!" +"twitter","negative","126873912624693249","Thu Oct 20 04:14:32 +0000 2011","#Twitter got me all messed up everytime i compose a text msg i get stressed I only have 140 caracteres! all dis networkin is gettin confusin" +"twitter","negative","126873860745330689","Thu Oct 20 04:14:20 +0000 2011","@8Sunni Yeah shit happens, #Twitter is a real whore sometimes!" +"twitter","negative","126872492118769664","Thu Oct 20 04:08:53 +0000 2011","#Twitter is Fawkin up!!!" +"twitter","negative","126872316142559232","Thu Oct 20 04:08:12 +0000 2011","#twitter show my fuckin retweets bitch" +"twitter","negative","126872265328562176","Thu Oct 20 04:07:59 +0000 2011","RT @FucknWithFatty: #Twitter needs to get their shit together cause they fucking up! I'm not getting my RT's or mentions. -______________-" +"twitter","negative","126871878886363136","Thu Oct 20 04:06:27 +0000 2011","#twitter is sooo trash ritenow with all dezz #highscoolmemories -__-" +"twitter","negative","126871286545788928","Thu Oct 20 04:04:06 +0000 2011","WHY DFF IS #TWITTER STILL LAGGIN ON SHOWIN WHO RETWEETS UR SHITT" +"twitter","negative","126870923591692288","Thu Oct 20 04:02:39 +0000 2011","People should be more interesting on #facebook and #twitter at midnight. I'm getting bored." +"twitter","negative","126870920018137088","Thu Oct 20 04:02:39 +0000 2011","Tried explaining #Twitter to my mom, but her ""why would you want to do that?"" argument was pretty bulletproof. #igiveup" +"twitter","negative","126870745258266626","Thu Oct 20 04:01:57 +0000 2011","#twitter is a fad. Was addicted now I don't rly care." +"twitter","negative","126870358816067584","Thu Oct 20 04:00:25 +0000 2011","RT @mixedchik22: #Twitter.... Side Affects include: Procrastination, No Sleep, Unable to Focus on ANYTHING, Subtweeting, RT, TT, and Low ..." +"twitter","negative","126870162510057473","Thu Oct 20 03:59:38 +0000 2011","Okay #Twitter, it'd be muchhh appreciated if you could start working on my computer soon." +"twitter","negative","126869855621218304","Thu Oct 20 03:58:25 +0000 2011","#Twitter.... Side Affects include: Procrastination, No Sleep, Unable to Focus on ANYTHING, Subtweeting, RT, TT, and Low Batteries" +"twitter","negative","126869842769870848","Thu Oct 20 03:58:22 +0000 2011","RT @nagoul1: #Retweets section of my #twitter account has not been working for 17 hours! Is Everyone else having this problem? #RT #RTs" +"twitter","negative","126869706639544320","Thu Oct 20 03:57:49 +0000 2011","#Retweets section of my #twitter account has not been working for 17 hours! Is Everyone else having this problem? #RT #RTs" +"twitter","negative","126869466054275073","Thu Oct 20 03:56:52 +0000 2011","for some reason #twitter isnt allowing me to see my tweets that got retweeted." +"twitter","negative","126869063023607808","Thu Oct 20 03:55:16 +0000 2011","Good lord #twitter send my bloody tweets! #ihateyourightnow" +"twitter","negative","126869039233511425","Thu Oct 20 03:55:10 +0000 2011","I feel all alone on #twitter. #itstolate" +"twitter","negative","126868993263943682","Thu Oct 20 03:54:59 +0000 2011","So im about to make another #twitter account since this one fucked up" +"twitter","negative","126868622026080256","Thu Oct 20 03:53:31 +0000 2011","My DM's Come in So Fucking Late, Damn #Twitter" +"twitter","negative","126868404182319105","Thu Oct 20 03:52:39 +0000 2011","Yep it's official, my tweets that get RT'd aren't showing me when they get RT'd. Dammit #Twitter, sometimes it's #Love/Hate thing we have!!" +"twitter","negative","126867983455879168","Thu Oct 20 03:50:59 +0000 2011","@MsTapoutBarbie Yep figured my shit is jacked. Dammit #Twitter!!" +"twitter","negative","126867876115259394","Thu Oct 20 03:50:33 +0000 2011","dear @twitter / #twitter FIX YOUR SHIT. we cant see our retweets or mentions!" +"twitter","negative","126867543955738624","Thu Oct 20 03:49:14 +0000 2011","#Twitter got DEAD as fuck all of a sudden!!!" +"twitter","negative","126867446220062720","Thu Oct 20 03:48:50 +0000 2011","I HATE #TWITTER" +"twitter","negative","126866560756363264","Thu Oct 20 03:45:19 +0000 2011","My #Twitter acting funnyyyyyyy...." +"twitter","negative","126866003094290434","Thu Oct 20 03:43:06 +0000 2011","still not quite sure how to use this/what the point of it is... #twitter? #idontunderstand" +"twitter","negative","126865431142219776","Thu Oct 20 03:40:50 +0000 2011","#twitter why must you be so difficult?" +"twitter","negative","126864648577351681","Thu Oct 20 03:37:43 +0000 2011","Thank you #twitter for not giving me the 10 tweets that have been tweeted at me in the past 3 days! Lol #blameitontheiphone #ineedtoupdate" +"twitter","negative","126864610446942209","Thu Oct 20 03:37:34 +0000 2011","#Twitter is messed up followers keep switching to same numbers -___- #FIXIT!" +"twitter","negative","126864575508381696","Thu Oct 20 03:37:26 +0000 2011","I need to get off #twitter" +"twitter","negative","126864510194683904","Thu Oct 20 03:37:10 +0000 2011","RT @Prettynesz: #twitter still not showin my mothafuckin #retweets . ." +"twitter","negative","126864059206336513","Thu Oct 20 03:35:23 +0000 2011","#Twitter timeline and mentions up the shit :(" +"twitter","negative","126862065649459200","Thu Oct 20 03:27:28 +0000 2011","So much garbage. #Twitter" +"twitter","negative","126861410864087042","Thu Oct 20 03:24:51 +0000 2011","Who the hell uses #twitter on a tv man. #panasonic #epicfail." +"twitter","negative","126861382078578688","Thu Oct 20 03:24:45 +0000 2011","#twitter still not showin my mothafuckin #retweets . ." +"twitter","negative","126861309185761280","Thu Oct 20 03:24:27 +0000 2011","#stupid fucking #Twitter #Twitterapp give me my Damn #mentions NOOWWWWWW!!!!! UGH! #DRAWLIN AS ALWAYS #TMW #PROMOALLDAY #FOLLOWLIMITISGAY TO" +"twitter","negative","126861228797722624","Thu Oct 20 03:24:08 +0000 2011","RT @JolieeSharmeda #twitter be fucking up" +"twitter","negative","126859898897174528","Thu Oct 20 03:18:51 +0000 2011","@KushKitty420 you, #Facebook #twitter and the #tv are all #distractions whenever I wanna study ;(" +"twitter","negative","126858852976181250","Thu Oct 20 03:14:42 +0000 2011","RT @VanillaLatte_xD: #omfg .. #Twitter , Why fxck up ? I wanna see my Retweets ""/" +"twitter","negative","126858607789740032","Thu Oct 20 03:13:43 +0000 2011","RT @nicolebanyots: #twitter UH, SHOW ME MY RETWEETS, OR ILL GO BACK TO FACEBOOK. K? K." +"twitter","neutral","126883719368818688","Thu Oct 20 04:53:30 +0000 2011",".@chrispirillo Re. #twitter 1,402,239 positive opinions http://t.co/uUd7ea3v? vs. 688,546 negative http://t.co/Q7Kf5xKz?" +"twitter","neutral","126883711131201536","Thu Oct 20 04:53:28 +0000 2011","""@ClearlyCassi_: I'll never forget the day we met, or how we kissed the first time."" /// I met you on #twitter and I haven't kiss you yet :p" +"twitter","neutral","126883672682004480","Thu Oct 20 04:53:19 +0000 2011","RT @Sour_D_Reyyy: @obsexxed69 got one of the dopest blogs on #Twitter and #Tumblr...#RealTalk" +"twitter","neutral","126883640671076352","Thu Oct 20 04:53:11 +0000 2011","RT @Str8Talk123: Need to step my #twitter game! Where is #teamfollowback #miami #ff @JAYSCANDRETH @WhatupHOE1211" +"twitter","neutral","126883630369882112","Thu Oct 20 04:53:09 +0000 2011","Caramba, a mais de 4 dias ninguem fala em mim ou de mim aki no #twitter. Nem mesmo minha noiva. #Fuieskecido..." +"twitter","neutral","126883597239066625","Thu Oct 20 04:53:01 +0000 2011","@codytigernord Just a reminder that you fail on #twitter" +"twitter","neutral","126883517706674176","Thu Oct 20 04:52:42 +0000 2011","RT @ThisIsYunGFRESH: JOIN OUR #TT ON #TWITTER FOLLOW @ThisIsYungFRESH & @macDej #TT #RatchetSymptoms" +"twitter","neutral","126883457614884864","Thu Oct 20 04:52:28 +0000 2011","Sneaky, sneaky. Seven ways to spy on your competition, using #Twitter - http://t.co/RxopvNGQ - via @ruhanirabin @AskAaronLee" +"twitter","neutral","126883452820783104","Thu Oct 20 04:52:27 +0000 2011","People on #Twitter n #Facebook all day, wat are you really doin wit yo life....#ImJusAskin" +"twitter","neutral","126883438954422274","Thu Oct 20 04:52:23 +0000 2011","My cuz @Robbase101 is new to #twitter drop him a follow and show him some love. Esp if you like #Canes #Jets #Yankees & #Knicks #sports" +"twitter","neutral","126883335875203072","Thu Oct 20 04:51:59 +0000 2011","#ThingsWeAllHate false advertisement via #twitter lol" +"twitter","neutral","126883224587739136","Thu Oct 20 04:51:32 +0000 2011","RT......zZzZzZzZzZzZzZzZzZzZ...... #Twitter off" +"twitter","neutral","126883185396170752","Thu Oct 20 04:51:23 +0000 2011","Ha! Even #twitter is telling me we're meant to be!" +"twitter","neutral","126883158942695425","Thu Oct 20 04:51:17 +0000 2011","Sleep time. #twitter off" +"twitter","neutral","126883124595527681","Thu Oct 20 04:51:08 +0000 2011","#Twitter should have Emoticons" +"twitter","neutral","126882970811379712","Thu Oct 20 04:50:32 +0000 2011","JOIN OUR #TT ON #TWITTER FOLLOW @ThisIsYungFRESH & @macDej #TT #RatchetSymptoms" +"twitter","neutral","126882885553758208","Thu Oct 20 04:50:11 +0000 2011","Welp! Meh nd @RenaeDaniels Bout tah Call it ah Niqht Nite #Twitter" +"twitter","neutral","126882832319651840","Thu Oct 20 04:49:59 +0000 2011","I wanna make the goose bump on your inner thigh show +#twitter after dark ;)" +"twitter","neutral","126882800585539585","Thu Oct 20 04:49:51 +0000 2011","Going in :( #work. Break at 2:30 and 5:30 #twitter time. See ya'll in the #am" +"twitter","neutral","126882730154803200","Thu Oct 20 04:49:34 +0000 2011","01:50 no #msn e no #twitter +e #Facebook." +"twitter","neutral","126882726061146112","Thu Oct 20 04:49:33 +0000 2011","@kaiylw0lf So #Tumblr is way more addictive than #Twitter... but i still love both of them. and they can coincide with each other! #bromance" +"twitter","neutral","126882653893967872","Thu Oct 20 04:49:16 +0000 2011","Boaa noite #twitter *-*" +"twitter","neutral","126882617843924992","Thu Oct 20 04:49:08 +0000 2011","#TroyHopson needs a #twitter. Swear" +"twitter","neutral","126882559522111488","Thu Oct 20 04:48:54 +0000 2011","@D_REALRogers BE SLEEP N WHO NOEZ WHERE I BE BUT HOW IT's it U so called sleep but every 5 sec u got a new #twitter post up #btfu" +"twitter","neutral","126882542610690049","Thu Oct 20 04:48:50 +0000 2011","My #twitter age is 1 year 19 days 13 hours 37 minutes 17 seconds. Find out yours at http://t.co/XhRUA9Dz #twittertime" +"twitter","neutral","126882493059170304","Thu Oct 20 04:48:38 +0000 2011","#Twitter Off!" +"twitter","neutral","126882453943103488","Thu Oct 20 04:48:29 +0000 2011","@OFA_IA @BarackObama #itheperson wld like all gov reps 2 have the date and state they reside in posted on their #twitter pic. 4 #wethepeople" +"twitter","neutral","126882427661582336","Thu Oct 20 04:48:22 +0000 2011","Yooooooo #twitter ! @NowThatsBASIC_ is the new #AmberCole who wanna go 1st ? ayyeee she does this shit for a living ." +"twitter","neutral","126882349588815873","Thu Oct 20 04:48:04 +0000 2011","RT @yaksys: My #twitter age is 1 year 243 days 22 hours 31 minutes 44 seconds. Find out yours at http://t.co/dniOkchP #twittertime" +"twitter","neutral","126882248644493312","Thu Oct 20 04:47:40 +0000 2011","@PrettyFuckn_Lee Welcum too #Twitter Cuz ;)" +"twitter","neutral","126882244982878208","Thu Oct 20 04:47:39 +0000 2011","1*Birthday* Shout-Out to 2 of my favorite people on #twitter!! @lizpope & @TheTinyJEWELBox Happy Birthday girls!" +"twitter","neutral","126882193967550464","Thu Oct 20 04:47:27 +0000 2011","@ThatsRealTalkin I'm doing the Tallman #Twitter list for fun!! I just wanna see how many men 6'3 and up are on Twitter! It's just for fun :)" +"twitter","neutral","126882122077184000","Thu Oct 20 04:47:09 +0000 2011","RT @zaibatsu: Should You Follow Back On Twitter? http://t.co/4Wwdy96I #sm #twitter" +"twitter","neutral","126882090343079937","Thu Oct 20 04:47:02 +0000 2011","A nigga can't even go to sleep nd wake up good ppl be trip'n over #Twitter" +"twitter","neutral","126882054259474432","Thu Oct 20 04:46:53 +0000 2011","I really dnt like to #follow people who never on #twitter" +"twitter","neutral","126881996629753856","Thu Oct 20 04:46:40 +0000 2011","“@JTBrownn: #Twitter are freaking kidding #wth .. http://t.co/Mgn9GzZ1†+Haha wow" +"twitter","neutral","126881960416120832","Thu Oct 20 04:46:31 +0000 2011","8am dentist appointment, i really should be going to bed...#makesnomovetogetupfromcomputer #twitter" +"twitter","neutral","126881887049351168","Thu Oct 20 04:46:13 +0000 2011","#Twitter'OFF" +"twitter","neutral","126881736364785664","Thu Oct 20 04:45:37 +0000 2011","My #twitter age is 1 year 65 days 18 hours 47 minutes 24 seconds. Find out yours at http://t.co/tQMSBDrx #twittertime" +"twitter","neutral","126881682266652672","Thu Oct 20 04:45:25 +0000 2011","RT @D_SNAPS: RT @Charlielace Instead of updating your #Twitter why don't you update your life!" +"twitter","neutral","126881626583076864","Thu Oct 20 04:45:11 +0000 2011","@MauroGarcia92 dale ok..!solo #twitter !!" +"twitter","neutral","126881596086288385","Thu Oct 20 04:45:04 +0000 2011","Good Night #Twitter." +"twitter","neutral","126881589606105089","Thu Oct 20 04:45:02 +0000 2011","RT @twinkleG_ray: Quit acting all hard. It's only #twitter" +"twitter","neutral","126881580521234432","Thu Oct 20 04:45:00 +0000 2011","@ManwitPURPOSE50 what are you doing now on #twitter????" +"twitter","neutral","126881523755528192","Thu Oct 20 04:44:47 +0000 2011","@IDGAFBoutYou llssss.yea I kno.lol.I ain't been fuckin wit dis joint.I guess #twitter missed me tho huh.slsss.lol" +"twitter","neutral","126881380503273472","Thu Oct 20 04:44:13 +0000 2011","Why do I always blow up #twitter at like 1am? #sorryfollowers" +"twitter","neutral","126881317894893568","Thu Oct 20 04:43:58 +0000 2011","@sarahhjessica your seriously addicted 2 #twitter" +"twitter","neutral","126881309015539712","Thu Oct 20 04:43:56 +0000 2011","WHTS GOIN ON #TWITTER FAM" +"twitter","neutral","126881203642040320","Thu Oct 20 04:43:30 +0000 2011","Raza Abbas NumeroUno :: (RazaNumeroUno) : thanks for #following me on #Twitter! Happy Tweeting !!!" +"twitter","neutral","126881167541665792","Thu Oct 20 04:43:22 +0000 2011","RT @smart__reMarks RT @Its_kcheese: some stuff you only find on #TWITTER lolz <---- LMAO" +"twitter","neutral","126881136398962688","Thu Oct 20 04:43:14 +0000 2011","Lls. && im actually tlkin about #twitter" +"twitter","neutral","126881090446163968","Thu Oct 20 04:43:03 +0000 2011","Thanks 2 my new #Twitter connections! @ecogreenvalet1,@brunodgama,@inez260,@moniqueprothero,@leviti09,@ChristyJBPL,@TonyMicheal, Chk Em Out!" +"twitter","neutral","126881073366958080","Thu Oct 20 04:42:59 +0000 2011","Yu cant answer , But yu can b on #Fb Nd #Twitter" +"twitter","neutral","126881072167399425","Thu Oct 20 04:42:59 +0000 2011","Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] http://t.co/h8geayT5 #twitter" +"twitter","neutral","126880978273697792","Thu Oct 20 04:42:37 +0000 2011","I don't see the point of following a news station on #twitter if there's breaking news people are gunna tweet about it anyway #obvs" +"twitter","neutral","126880926268526592","Thu Oct 20 04:42:24 +0000 2011","on oovoo with @_LoveIsThe_Ki dumb ass #excuseme #twitter" +"twitter","neutral","126880883822166017","Thu Oct 20 04:42:14 +0000 2011","RT @Its_kcheese: some stuff you only find on #TWITTER lolz" +"twitter","neutral","126880854361391104","Thu Oct 20 04:42:07 +0000 2011","Quit acting all hard. It's only #twitter" +"twitter","neutral","126880672190185472","Thu Oct 20 04:41:24 +0000 2011","@DylawnWard 100 tweets already? Damn son #twitter #whore #twitwhore #gi #get #it?" +"twitter","neutral","126880656352481280","Thu Oct 20 04:41:20 +0000 2011","""@socialfans: Top Twitter Related Sites http://t.co/JoDmoruF #design"" #twitter" +"twitter","neutral","126880621170659328","Thu Oct 20 04:41:12 +0000 2011","#Follow my main #Twitter @AikoChristine need more #followers help me get to 500" +"twitter","neutral","126880571233280000","Thu Oct 20 04:41:00 +0000 2011","High as a mother fucker I'm addicted to #Twitter can't even go to sleep lol" +"twitter","neutral","126880481361920000","Thu Oct 20 04:40:38 +0000 2011","RT @BigMelo216_TSA: On da phone wit @AllThisASS & she was HAPPY AF!!! she got out #Twitter JAIL! thirsty lol" +"twitter","neutral","126880429256093696","Thu Oct 20 04:40:26 +0000 2011","RT @MonicaaC_: Damn tired! .. Need some rest! Night #twitter" +"twitter","neutral","126880399912742912","Thu Oct 20 04:40:19 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/Xm2blIfC" +"twitter","neutral","126880329913995264","Thu Oct 20 04:40:02 +0000 2011","Can an algorithm be wrong? #Twitter Trends, the specter of #censorship, and our faith in the algorithms around us http://t.co/3NxZT7hd" +"twitter","neutral","126880253145657344","Thu Oct 20 04:39:44 +0000 2011","SO I HAD HER MAKE A #TWITTER SHE'S HOT & SEXY EVERY ONE FOLLOW ONE 1 MA NEW FOLLOWER'S @Rocioo08 LEGOOO! ! !" +"twitter","neutral","126880223433195520","Thu Oct 20 04:39:37 +0000 2011","@STARANDDRAMA & JAMMERS302 BE #KILLIN #TWITTER #FOLLOW US" +"twitter","neutral","126880178705141762","Thu Oct 20 04:39:26 +0000 2011","Zzzzzzzzzzzz... #night #Twitter #GodBless" +"twitter","neutral","126880108718989313","Thu Oct 20 04:39:09 +0000 2011","Adult swim #twitter" +"twitter","neutral","126880102226206720","Thu Oct 20 04:39:08 +0000 2011","RT ""@RobyQQ: Your #Twitter followers either like you for your sense of humor or sex appeal."" -- I guess, I'm funny then... LOL!" +"twitter","neutral","126880068252336128","Thu Oct 20 04:39:00 +0000 2011","Did a couple of changes..#Twitter" +"twitter","neutral","126880036488880128","Thu Oct 20 04:38:52 +0000 2011","So im chillin at work finally gettin a chance 2 be on #Twitter wat I miss so far?¿" +"twitter","neutral","126880035507412992","Thu Oct 20 04:38:52 +0000 2011","My #twitter age is 1 year 243 days 22 hours 31 minutes 44 seconds. Find out yours at http://t.co/dniOkchP #twittertime" +"twitter","neutral","126880030486822912","Thu Oct 20 04:38:51 +0000 2011","#TWITTER OFF." +"twitter","neutral","126880007741128704","Thu Oct 20 04:38:45 +0000 2011","RT @alibabaoglan: Presentation: 7 Best Ideas to Use #Twitter More Efficiently - http://t.co/AYMeFITC #howto" +"twitter","neutral","126879867731062784","Thu Oct 20 04:38:12 +0000 2011","Need to study vocabb :/ +#twitter addicted" +"twitter","neutral","126879759866142720","Thu Oct 20 04:37:46 +0000 2011","Nite #twitter" +"twitter","neutral","126879759316697088","Thu Oct 20 04:37:46 +0000 2011","@SGarney I put the picture up on #twitter. I did it cause everyone on twitter is cool and your not. #getsamanthaofftwittereveryone" +"twitter","neutral","126879729189011457","Thu Oct 20 04:37:39 +0000 2011","My #twitter age is 1 year 325 days 10 hours 18 minutes 7 seconds. Find out yours at http://t.co/0zAtZhKP #twittertime" +"twitter","neutral","126879677737480192","Thu Oct 20 04:37:27 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 21" +"twitter","neutral","126879608120418305","Thu Oct 20 04:37:10 +0000 2011","yeah im gon #twitter but fuck wit me...." +"twitter","neutral","126879548687130624","Thu Oct 20 04:36:57 +0000 2011","@simonsays_payme @KaleMurphy @chatoney #twitter addict.. Tweeting while being a Lil BITCH #jerkoffmore #KYS #SMILE http://t.co/7JDvu514" +"twitter","neutral","126879506521792513","Thu Oct 20 04:36:46 +0000 2011","#JustBecause you got a few followers on #twitter don't make you a celebrity" +"twitter","neutral","126879491124506624","Thu Oct 20 04:36:42 +0000 2011","People that do this "" #This #Is #My #Twitter "" and pound everything. Needs to delete their Twitter." +"twitter","neutral","126879484736585729","Thu Oct 20 04:36:41 +0000 2011","RT @Jackiebaaby_: #'s are #for #twitter #NOT #facebook. Duuuuhhh" +"twitter","neutral","126879463450488832","Thu Oct 20 04:36:36 +0000 2011","@_ahooten23 welcomee to #twitter" +"twitter","neutral","126879462041206784","Thu Oct 20 04:36:35 +0000 2011","Finally off to bed..Nighty night #twitter world!" +"twitter","neutral","126879413013987328","Thu Oct 20 04:36:24 +0000 2011","Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] #twitter #occupywallstreet #teamfollowback #autofollow http://t.co/IRYn5Vis" +"twitter","neutral","126879328712658944","Thu Oct 20 04:36:03 +0000 2011","RT @Gaabisfc_: My #twitter age is 219 days 12 hours 26 minutes 28 seconds. Find out yours at http://t.co/eOCFm1kD #twittertime" +"twitter","neutral","126879257975734272","Thu Oct 20 04:35:47 +0000 2011","According to #twitter I have the same amount of followers as followees....#notgoingtolast" +"twitter","neutral","126879221724356608","Thu Oct 20 04:35:38 +0000 2011","#twitter is trippin" +"twitter","neutral","126879149003509760","Thu Oct 20 04:35:21 +0000 2011","Is there a way that I can post voice notes on #twitter??" +"twitter","neutral","126879122298372097","Thu Oct 20 04:35:14 +0000 2011","RT @TweezardCom: #twitter Adam Levine Takes Aim at 'Evil' Fox News, Starts Twitter Spat http://t.co/qcYTszff #tweezard" +"twitter","neutral","126879106347433984","Thu Oct 20 04:35:10 +0000 2011","GN #Twitter Homies" +"twitter","neutral","126879073220829184","Thu Oct 20 04:35:03 +0000 2011","I can't believe @mr_malie504 is not on #Twitter" +"twitter","neutral","126879068800024576","Thu Oct 20 04:35:01 +0000 2011","@95hm You can directly share your #Facebook photos on #Twitter with http://t.co/wYD5RnxN" +"twitter","neutral","126879028706672640","Thu Oct 20 04:34:52 +0000 2011","@StaceHendrix Pls let us know if you need help w/ music marketing promo. We got #Twitter & #Youtube on lock. http://t.co/EWFffl35" +"twitter","neutral","126879022188724224","Thu Oct 20 04:34:50 +0000 2011","@obsexxed69 got one of the dopest blogs on #Twitter and #Tumblr...#RealTalk" +"twitter","neutral","126878948561924096","Thu Oct 20 04:34:33 +0000 2011","i aint did shit on #twitter untill every girl of #http://t.co/STr7gO6W following me #goalset lol" +"twitter","neutral","126878824574095360","Thu Oct 20 04:34:03 +0000 2011","RT @igorlilmonster: My #twitter age is 2 years 153 days 5 hours 6 minutes 25 seconds. Find out yours at http://t.co/NjTHu7tM #twittertime" +"twitter","neutral","126878811009728513","Thu Oct 20 04:34:00 +0000 2011","#Twitter Off n.n" +"twitter","neutral","126878688682835968","Thu Oct 20 04:33:31 +0000 2011","ALRIGHT I'm going to sleep. #GoodNight #Twitter" +"twitter","neutral","126878622509309952","Thu Oct 20 04:33:15 +0000 2011","@JakeDizzleWins We Need To Speak Sum Real Shit On #Twitter" +"twitter","neutral","126878620953231360","Thu Oct 20 04:33:15 +0000 2011","@PuraGenteBienAv me podrian dar el #face o #twitter del chavito qe siempre traee lentess transparentes le dicn #chicken little x fiiss :)" +"twitter","neutral","126878545338310656","Thu Oct 20 04:32:57 +0000 2011","woke up dis morning so i'm stil alive #twitter" +"twitter","neutral","126878539487252480","Thu Oct 20 04:32:55 +0000 2011","3,2,1,0 #Twitter OFF" +"twitter","neutral","126878426312351744","Thu Oct 20 04:32:28 +0000 2011","Just because you follow me on #twitter does NOT mean we cool. As you can see I didnt follow yo thirsty ass back. #dummy -.- xD" +"twitter","neutral","126878409023426560","Thu Oct 20 04:32:24 +0000 2011","getn off gn #twitter" +"twitter","neutral","126878311497474048","Thu Oct 20 04:32:01 +0000 2011","@KylisInATL HEY! Get Back On #Twitter :-)" +"twitter","neutral","126878176063389696","Thu Oct 20 04:31:29 +0000 2011","RT @Charlielace Instead of updating your #Twitter why don't you update your life!" +"twitter","neutral","126878062846554114","Thu Oct 20 04:31:02 +0000 2011","#Occupy #Twitter...Let's make Twitter a Social Media not a just a Bulletin Board. Engage! Interact! Socialize!" +"twitter","neutral","126878058278952960","Thu Oct 20 04:31:01 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 1" +"twitter","neutral","126877998115852288","Thu Oct 20 04:30:46 +0000 2011","Your #Twitter followers either like you for your sense of humor or sex appeal." +"twitter","neutral","126877997717405697","Thu Oct 20 04:30:46 +0000 2011","facebook oof , #twitter of... night night every1!" +"twitter","neutral","126877971637211136","Thu Oct 20 04:30:40 +0000 2011","#twitter Adam Levine Takes Aim at 'Evil' Fox News, Starts Twitter Spat http://t.co/dZmPhbut #tweezard" +"twitter","neutral","126877965064740864","Thu Oct 20 04:30:38 +0000 2011","#Niggas on here talking bout sharing one condom with his boy to run a train on the same girl only on #twitter you #nasty muthafucka" +"twitter","neutral","126877936514105344","Thu Oct 20 04:30:32 +0000 2011","@jacobstanton2 #twitter has kind of turned into our #lives" +"twitter","neutral","126877900610875393","Thu Oct 20 04:30:23 +0000 2011","I wanna RT a lot of tweets.. but my app won't work. What's the best #Twitter #App??" +"twitter","neutral","126877729856557056","Thu Oct 20 04:29:42 +0000 2011","#Twitter FDP" +"twitter","neutral","126877666040225792","Thu Oct 20 04:29:27 +0000 2011","@Marlorific well, I feel great about being your first #twitter friend :)" +"twitter","neutral","126877629600108544","Thu Oct 20 04:29:18 +0000 2011","How to tweet... like a #celeb ;) http://t.co/cQfsjDwB #twitter" +"twitter","neutral","126877612214726657","Thu Oct 20 04:29:14 +0000 2011","RT @an_anonyma: U can't see who RTed Ur stuff, & this is because of #twitter #censorship? Jeez, people. It's #binaryfaires teaching your ..." +"twitter","neutral","126877585488609282","Thu Oct 20 04:29:08 +0000 2011","Hey I'm doing this, at this place, around this time (you should come). #Twitter" +"twitter","neutral","126877557550354433","Thu Oct 20 04:29:01 +0000 2011","I need 2 unfollow some of these wannabe G's! RT @SteveGr8ofTeeZ: RT @The_Kase Do real thugs have #twitter? #confusedbymytimeline //Hell no." +"twitter","neutral","126877527674322944","Thu Oct 20 04:28:54 +0000 2011","U can't see who RTed Ur stuff, & this is because of #twitter #censorship? Jeez, people. It's #binaryfaires teaching your vain ass a lesson!" +"twitter","neutral","126877467507040257","Thu Oct 20 04:28:40 +0000 2011","#Twitter must be sharing, caring, interacting, socializing...Don't K+ does who ignore you...Give a K+ to thise who truly are #social" +"twitter","neutral","126877420178522113","Thu Oct 20 04:28:28 +0000 2011","#goodmourning #twitter" +"twitter","neutral","126877416630136832","Thu Oct 20 04:28:28 +0000 2011","got my blade for these #twitter streets third shift" +"twitter","neutral","126877186689998848","Thu Oct 20 04:27:33 +0000 2011","RT @callmePinoc: RT @Crazy_Golfer: ""If you lose followers on #Twitter don't worry even Jesus lost followers"" ~Unknown #quote via @smqueue" +"twitter","neutral","126877128204627970","Thu Oct 20 04:27:19 +0000 2011","Ppl emphasize to make thyself look greater then they are! #twitter!" +"twitter","neutral","126877032704507904","Thu Oct 20 04:26:56 +0000 2011","~~~~> RT @RealJaythoven #twitter and #Facebook gone get sumbody beat up mane everybody b so reckless" +"twitter","neutral","126876948206075904","Thu Oct 20 04:26:36 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 25" +"twitter","neutral","126876905696796672","Thu Oct 20 04:26:26 +0000 2011","@The_DJ_Show #welcome to #twitter my #friend. #hashtags" +"twitter","neutral","126876843272974337","Thu Oct 20 04:26:11 +0000 2011","#Twitter #Followers #Free: http://t.co/63VeKuZr" +"twitter","neutral","126876834548822017","Thu Oct 20 04:26:09 +0000 2011","M'kiite #twitter Off" +"twitter","neutral","126876823723315200","Thu Oct 20 04:26:06 +0000 2011","Wishing I can sleep all day 2morrow but I got practice... GN #Twitter" +"twitter","neutral","126876778806509568","Thu Oct 20 04:25:55 +0000 2011","@Bthompson16 if I was #toocool for #twitter then I wouldn't go on it and as a result my life would be boring so I don't think I am" +"twitter","neutral","126876760062181376","Thu Oct 20 04:25:51 +0000 2011","“@RealJaythoven #twitter and #Facebook gone get sumbody beat up mane everybody b so recklessâ€" +"twitter","neutral","126876750788567040","Thu Oct 20 04:25:49 +0000 2011","RT @Charlielace: Instead of updating your #Twitter why don't you update your life!" +"twitter","neutral","126876736137871361","Thu Oct 20 04:25:45 +0000 2011","Karate kid, skittles and cranberry juice. Goodnight #twitter" +"twitter","neutral","126876733877133312","Thu Oct 20 04:25:45 +0000 2011","you got 18 mentions go click on them , run , run , run :::::::: only 1 new ::::: ha ha ha ha #Twitter was kidding with u #Twit2alsh" +"twitter","neutral","126876684925415425","Thu Oct 20 04:25:33 +0000 2011","#Twitter OFF ☑" +"twitter","neutral","126876682928926720","Thu Oct 20 04:25:33 +0000 2011","RT @SharonHayes: Top 50 #Twitter Acronyms, Abbreviations and Initialisms http://t.co/EQGBHNq6 /via @ruhanirabin" +"twitter","neutral","126876593531518976","Thu Oct 20 04:25:11 +0000 2011","Thank you, me too. Looking forward to reviewing your book @WarrenWhitlock #twitter #authors #socialmedia" +"twitter","neutral","126876559977095168","Thu Oct 20 04:25:03 +0000 2011","#Twitter is hella funny when got people on here you grew up with....lmao" +"twitter","neutral","126876501755957249","Thu Oct 20 04:24:49 +0000 2011","@THEmattbruening Took your #Twitter virginity. #bitch" +"twitter","neutral","126876490364223488","Thu Oct 20 04:24:47 +0000 2011","Instead of updating your #Twitter why don't you update your life!" +"twitter","neutral","126876467631104000","Thu Oct 20 04:24:41 +0000 2011","Top thing you would change about #twitter - go!" +"twitter","neutral","126876452762296321","Thu Oct 20 04:24:38 +0000 2011","RT @Te_AmoTam: #twitter is not #eharmony" +"twitter","neutral","126876394339831808","Thu Oct 20 04:24:24 +0000 2011","RT @jeffbullas: Twitter Reveals It’s Latest Growth Numbers http://t.co/SujtK4fS #Twitter #SMM #SocialMedia" +"twitter","neutral","126876099786444800","Thu Oct 20 04:23:14 +0000 2011","My #twitter age is 2 years 160 days 2 hours 28 minutes 32 seconds. Find out yours at http://t.co/3YI2oDVK #twittertime" +"twitter","neutral","126876078592638976","Thu Oct 20 04:23:09 +0000 2011","WordPress Plugin - 2-Klicks-Button -... http://t.co/UjIvD1xu #2-Klicks-Button #Facebook #Google+ #Like #Socialshareprivacy #Twitter" +"twitter","neutral","126876009797656576","Thu Oct 20 04:22:52 +0000 2011","@alexbabb9 Alexthe_ASSholeBabb should be your new #Twitter name" +"twitter","neutral","126876004730933250","Thu Oct 20 04:22:51 +0000 2011","#twitter and #Facebook gone get sumbody beat up mane everybody b so reckless" +"twitter","neutral","126875958929145856","Thu Oct 20 04:22:40 +0000 2011","Facebook and #Twitter Off" +"twitter","neutral","126875958694260736","Thu Oct 20 04:22:40 +0000 2011","@jacobstanton2 you're actually #toocool for #twitter :)" +"twitter","neutral","126875943284379649","Thu Oct 20 04:22:36 +0000 2011","#twitter is not #eharmony" +"twitter","neutral","126875894177480705","Thu Oct 20 04:22:25 +0000 2011","RT @The_Kase Do real thugs have #twitter? #confusedbymytimeline // Hell no." +"twitter","neutral","126875887093293056","Thu Oct 20 04:22:23 +0000 2011","wow good old classic #HoodMemories some #Classic moments there i had to share on #Twitter" +"twitter","neutral","126875872165769216","Thu Oct 20 04:22:19 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 26" +"twitter","neutral","126875805505691648","Thu Oct 20 04:22:03 +0000 2011","RT @Lecko_alx: My #twitter age is 2 years 94 days 1 hour 40 minutes 33 seconds. Find out yours at http://t.co/spItzFcx #twittertime" +"twitter","neutral","126875787818319872","Thu Oct 20 04:21:59 +0000 2011","#Twitter&FBOff" +"twitter","neutral","126875761469689856","Thu Oct 20 04:21:53 +0000 2011","My #twitter age is 2 years 153 days 5 hours 6 minutes 25 seconds. Find out yours at http://t.co/NjTHu7tM #twittertime" +"twitter","neutral","126875734005399552","Thu Oct 20 04:21:46 +0000 2011","RT @Crazy_Golfer: ""If you lose followers on #Twitter don't worry even Jesus lost followers"" ~Unknown #quote via @smqueue" +"twitter","neutral","126875692855078912","Thu Oct 20 04:21:37 +0000 2011","Got to do it!RT @ElderMLTillerJr: I am cracking up at @BishopWSThomas advertising for #Twitter LOL" +"twitter","neutral","126875612341207040","Thu Oct 20 04:21:17 +0000 2011","I tried to explain why you would do ""The #Twitter "" : “The Twitter”: http://t.co/opjnrXlb @jefferypjacobs" +"twitter","neutral","126875579172663296","Thu Oct 20 04:21:09 +0000 2011","YAAAAAAAAY @BoogieDeBeast made my Tallman #Twitter list standing 6'6 BaYbeeeee" +"twitter","neutral","126875562663874560","Thu Oct 20 04:21:06 +0000 2011","Damn tired! .. Need some rest! Night #twitter" +"twitter","neutral","126875457265205248","Thu Oct 20 04:20:40 +0000 2011","RT @Xo_Rosibel_: HEY EVERYBODY THAT HAS A #TWITTER FOLLOW ME NOWW @Aye_Cassandra @TeenSwagQuotess @_FcknJane @autumn_tarbania @Ayoo_Mari ..." +"twitter","neutral","126875416760815616","Thu Oct 20 04:20:31 +0000 2011","#twitter that's wassup ! lol!" +"twitter","neutral","126875301157404672","Thu Oct 20 04:20:03 +0000 2011","@jacobstanton2 @Ryan_Burke4 doesn't take much to be cool on #twitter" +"twitter","neutral","126875280018112512","Thu Oct 20 04:19:58 +0000 2011","Getting some blank rows in my #Twitter feed on the website. Are you?" +"twitter","neutral","126875274901065728","Thu Oct 20 04:19:57 +0000 2011","if u couldnt tweet from your phone a lot of people wouldnt be on #twitter -- #broke #fact" +"twitter","neutral","126875267674284032","Thu Oct 20 04:19:55 +0000 2011","RT @_Shelly: mm but yeo .. #twitter .. free up @_tashaaiWESTEND ! free up @_tashaaiWESTEND ! free up @_tashaaiWESTEND !" +"twitter","neutral","126875204772311040","Thu Oct 20 04:19:40 +0000 2011","My #twitter age is 1 year 148 days 1 hour 15 minutes 56 seconds. Find out yours at http://t.co/hcR3FSpD #twittertime" +"twitter","neutral","126875183725297664","Thu Oct 20 04:19:35 +0000 2011","- #twitter please forgive my last slew of tweets I'm having a rough night , but I pray you'll forgive me .#tosleepigo" +"twitter","neutral","126875160623058944","Thu Oct 20 04:19:30 +0000 2011","Where is #Twitter" +"twitter","neutral","126875159800987649","Thu Oct 20 04:19:29 +0000 2011","Goodmorning #twitter! :)" +"twitter","neutral","126875080084033536","Thu Oct 20 04:19:10 +0000 2011","RT @LIL_HAM05: @Nikki_BoBo #Twitter after dark" +"twitter","neutral","126874943341330432","Thu Oct 20 04:18:38 +0000 2011","bye bye #twitter ;)" +"twitter","neutral","126874922017488896","Thu Oct 20 04:18:33 +0000 2011","@Nikki_BoBo #Twitter after dark" +"twitter","neutral","126874909933711361","Thu Oct 20 04:18:30 +0000 2011","@natemarozzi that's what #Twitter is for" +"twitter","neutral","126874886378496000","Thu Oct 20 04:18:24 +0000 2011","Hello !!! Centuries past till to write on #twitter !!! +Love you !!! :)" +"twitter","neutral","126874883207610368","Thu Oct 20 04:18:24 +0000 2011","@LOvely_MsToya relaxin, my mind! And #twitter" +"twitter","neutral","126874847614734336","Thu Oct 20 04:18:15 +0000 2011","my phone is off......thank GOD.# FUCK SPRINT . IM ON #TWITTER 2NITE +#LINAS" +"twitter","neutral","126874811296251904","Thu Oct 20 04:18:06 +0000 2011","RT @netteo81: RT @nine_oh SIMPLY DA BEST ON #TWITTER MY TWEEPS @jbdancingmachin @EpicQuotesOrg @gowhitedawg (cont) http://t.co/rcpe86Bw" +"twitter","neutral","126874799099219969","Thu Oct 20 04:18:03 +0000 2011","RT @Luisfitq: Ty!RT @nine_oh: SIMPLY DA BEST ON #TWITTER MY TWEEPS @MsCarieFWR @dizzleb19 @LBN_Prod @Federal_and_Co @KimDeanMusic @mscre ..." +"twitter","neutral","126874783756455936","Thu Oct 20 04:18:00 +0000 2011","I figgured if my #twitter Lady-Gems friends get new one's that I can too! =) My New Boo http://t.co/dXN6NpIB" +"twitter","neutral","126874662268452864","Thu Oct 20 04:17:31 +0000 2011","Ohemgee. Be a follower..... #twitter" +"twitter","neutral","126874568655777793","Thu Oct 20 04:17:09 +0000 2011","How to Keep Up With Over 100,000 #Twitter Followers http://t.co/XD5GST4L for those of you with this many tweeps rock on, i'm not there YET=)" +"twitter","neutral","126874543770976256","Thu Oct 20 04:17:03 +0000 2011","RT @MyHoopDreams: Im Chillin Man Trying To Stay #Humble Its Just Hard When Niggas Hatin And Pump Fakin Via #Twitter" +"twitter","neutral","126874532555399168","Thu Oct 20 04:17:00 +0000 2011","How strategic is our technology agenda? http://t.co/bmeiy5rq #Twitter #Strategy" +"twitter","neutral","126874503455318016","Thu Oct 20 04:16:53 +0000 2011","RT @iGainFollows: Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autop ..." +"twitter","neutral","126874467824697344","Thu Oct 20 04:16:45 +0000 2011","#uberSocial on #blackberry #crackberry is better than the actual #twitter site. It's wack on the iphone. #CoolStoryBro" +"twitter","neutral","126874421829971968","Thu Oct 20 04:16:34 +0000 2011","Do real thugs have #twitter? #confusedbymytimeline" +"twitter","neutral","126874384139948032","Thu Oct 20 04:16:25 +0000 2011","YAAAAAAAAY @Freakbull_ made my Tallman #Twitter standing 6'4 BaYbeeeee" +"twitter","neutral","126874374077816832","Thu Oct 20 04:16:22 +0000 2011","finally logged back in!!!! what's up #twitter!!!" +"twitter","neutral","126874332176711681","Thu Oct 20 04:16:12 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 13" +"twitter","neutral","126874244629016576","Thu Oct 20 04:15:51 +0000 2011","#Twitter lies RT @amerikusl I lose all my friends once they get pregnant :-(" +"twitter","neutral","126874228296396800","Thu Oct 20 04:15:47 +0000 2011","RT @nagoul1: Can anyone please explain to us why the #Retweet ""issue"" is happening to many, but not to others? #twitter #tweeting #tweeps" +"twitter","neutral","126874200253276160","Thu Oct 20 04:15:41 +0000 2011","#twitter&facebookOff good night everyone bye bye" +"twitter","neutral","126874174722539520","Thu Oct 20 04:15:35 +0000 2011","Can anyone please explain to us why the #Retweet ""issue"" is happening to many, but not to others? #twitter #tweeting #tweeps" +"twitter","neutral","126874159606267904","Thu Oct 20 04:15:31 +0000 2011","Why everytime I spazz on #twitter yall think some chick done hurt my feelings..I aint w/ Annette no more.. Lol" +"twitter","neutral","126874138836074497","Thu Oct 20 04:15:26 +0000 2011","Niggas be in they feelings on #twitter hella funny" +"twitter","neutral","126874058062184448","Thu Oct 20 04:15:07 +0000 2011","Wowww | Can watching #Twitter trends help predict the future? http://t.co/2bRzlsGc via @zite" +"twitter","neutral","126874038164393984","Thu Oct 20 04:15:02 +0000 2011","#twitter off :) good night for everybody !" +"twitter","neutral","126873977284079616","Thu Oct 20 04:14:48 +0000 2011","Facebook is becoming trashier and trashier #twitter" +"twitter","neutral","126873903552405504","Thu Oct 20 04:14:30 +0000 2011","My #twitter age is 2 years 0 days 3 hours 31 minutes 17 seconds. Find out yours at http://t.co/xNCmtWWb #twittertime" +"twitter","neutral","126873883172274176","Thu Oct 20 04:14:25 +0000 2011","RT @SharonHayes: Top 50 #Twitter Acronyms, Abbreviations and Initialisms http://t.co/nEqHcJsY /via @ruhanirabin" +"twitter","neutral","126873866575425536","Thu Oct 20 04:14:21 +0000 2011","«@BigBellyBigCuz RT @Free_Nuttball #fuckyoumean #Pittsburgh don't got the best #twitter.»" +"twitter","neutral","126873786715873280","Thu Oct 20 04:14:02 +0000 2011","I don't know why people choose to talk to me on #Twitter! .when you have my number ..!!!!" +"twitter","neutral","126873686987902976","Thu Oct 20 04:13:38 +0000 2011","GoodNight #Twitter:)" +"twitter","neutral","126873680654516224","Thu Oct 20 04:13:37 +0000 2011","watching a ""new"" interview with @L1LD3BB13 giving props to #twitter glad to see her out on her own doing her own thang;" +"twitter","neutral","126873665601146882","Thu Oct 20 04:13:33 +0000 2011","#Facebook Y #Twitter Off xd" +"twitter","neutral","126873546017357825","Thu Oct 20 04:13:05 +0000 2011","Some1 shld create a Business where they take ppl's #twitter streams over a period of time & make them into a nice book. #keepsake #urwelcome" +"twitter","neutral","126873484746952705","Thu Oct 20 04:12:50 +0000 2011","THANX ! @DesertBeacon @oldivory @bennettgina @yywhy #TWITTER Love and early #FF in case i cant see ! KEEP IT GOING ! #P2" +"twitter","neutral","126873420192423936","Thu Oct 20 04:12:35 +0000 2011","#Twitter Introduces Free Twitter Web Analytics Tool http://t.co/HdLjWzaw via @HubSpot #reporting" +"twitter","neutral","126873418846044160","Thu Oct 20 04:12:34 +0000 2011","Sleep. Finally. #Twitter" +"twitter","neutral","126873366304010240","Thu Oct 20 04:12:22 +0000 2011","Trying to identify a #constantcontact expert - anyone on #twitter?" +"twitter","neutral","126873364173299712","Thu Oct 20 04:12:21 +0000 2011","Bedtime good night #twitter #goodnightworld" +"twitter","neutral","126873323014590464","Thu Oct 20 04:12:12 +0000 2011","I'm goin 2 bed #TWITTER dead tonight" +"twitter","neutral","126873153015263233","Thu Oct 20 04:11:31 +0000 2011","Top 50 #Twitter Acronyms, Abbreviations and Initialisms http://t.co/nEqHcJsY /via @ruhanirabin" +"twitter","neutral","126873062439260160","Thu Oct 20 04:11:09 +0000 2011","@SincerelytiGG_ I guess dat wat #twitter does lol" +"twitter","neutral","126873003920330752","Thu Oct 20 04:10:55 +0000 2011","Tired A'f >.< #Twitter Off" +"twitter","neutral","126873001198239744","Thu Oct 20 04:10:55 +0000 2011","Goodnight #twitter " +"twitter","neutral","126872982315474945","Thu Oct 20 04:10:50 +0000 2011","Ok so my #EX girlfriend hacked my #TWITTER this should be funny" +"twitter","neutral","126872958068207616","Thu Oct 20 04:10:45 +0000 2011","RT @Free_Nuttball: #fuckyoumean #Pittsburgh don't got the best #twitter." +"twitter","neutral","126872890007236608","Thu Oct 20 04:10:28 +0000 2011","RT @Free_Nuttball #fuckyoumean #Pittsburgh don't got the best #twitter." +"twitter","neutral","126872886748266496","Thu Oct 20 04:10:28 +0000 2011","#Recruiting Tip: Ask yourself: why am I not building my personal brand on #LinkedIn, #Twitter, and #Facebook?" +"twitter","neutral","126872872294694912","Thu Oct 20 04:10:24 +0000 2011","“@Free_Nuttball #fuckyoumean #Pittsburgh don't got the best #twitter.â€" +"twitter","neutral","126872871254491137","Thu Oct 20 04:10:24 +0000 2011","RT @iLoveMyTats: Lmao. RT @MissAyunnaaa Well that's my que, bye #Twitter , #oomf finna start quotin lines from #ATL lol" +"twitter","neutral","126872821375827968","Thu Oct 20 04:10:12 +0000 2011","""Straight Outta Hak Mob"" RT @Jhon_Doe_Smokes: Out of my 7 months of actually gettin on #Twitter. I've only bee… (cont) http://t.co/qDM3X8BB" +"twitter","neutral","126872777130123264","Thu Oct 20 04:10:01 +0000 2011","#fuckyoumean #Pittsburgh don't got the best #twitter." +"twitter","neutral","126872773925679105","Thu Oct 20 04:10:01 +0000 2011","@DJ_Nasty16 I know. You my only compo. #we be #reppin #twitter" +"twitter","neutral","126872528235921409","Thu Oct 20 04:09:02 +0000 2011","#Twitter Of" +"twitter","neutral","126872511492263937","Thu Oct 20 04:08:58 +0000 2011","pinto el sueño #Twitter #OFF" +"twitter","neutral","126872465635938304","Thu Oct 20 04:08:47 +0000 2011","RT @wbasinger: RT @TeachHub: 50 Ways to Use #Twitter in the Classroom http://t.co/AFHx3m2n" +"twitter","neutral","126872432517709825","Thu Oct 20 04:08:39 +0000 2011","#Twitter after dark! #GoodNight!! an yall thought a was abt to get #Nasty!!lol" +"twitter","neutral","126872371901640705","Thu Oct 20 04:08:25 +0000 2011","People aren't the same as they wanna be perceived on #twitter...Please believe I'm that crazy bitch at work who slaps everyones ass in line" +"twitter","neutral","126872357099945984","Thu Oct 20 04:08:21 +0000 2011","@serendipityjane is the ultimate #twitter bff! She uses #EFT to turn your frown upsidedown!#Follow her & learn about her VIP class.It rocks!" +"twitter","neutral","126872338158452736","Thu Oct 20 04:08:17 +0000 2011","even if I skip the newspaper, I don't worry coz I get all news updates at #Twitter. :)" +"twitter","neutral","126872320043257857","Thu Oct 20 04:08:12 +0000 2011","I'm on one tonight ...#tripping ! Lol eff yal ...its #twitter !" +"twitter","neutral","126872299709280256","Thu Oct 20 04:08:08 +0000 2011","RT @Ashu180489: Progress is a Process , Surprised to see a sensible topic trending worldwide in #twitter" +"twitter","neutral","126872290968338432","Thu Oct 20 04:08:06 +0000 2011","@SusanFelicity Many people pay little attention to important issues. I strongly believe social media, like #twitter, is changing this." +"twitter","neutral","126872267257950209","Thu Oct 20 04:08:00 +0000 2011","@BenFolds p.s. I would have never used the word ""playa"" over ""player"" if #Twitter wasn't so restrictive..." +"twitter","neutral","126872221292576768","Thu Oct 20 04:07:49 +0000 2011","#Twitter'OFF :| GN'♥" +"twitter","neutral","126872201663229952","Thu Oct 20 04:07:44 +0000 2011","RT @MinisterOnline: its ok to put up bible verses & encouraging post about God on #Twitter, but make sure you stand by your words, cause ..." +"twitter","neutral","126872199838699520","Thu Oct 20 04:07:44 +0000 2011","Hitting the sack early. G-night #twitter" +"twitter","neutral","126872156763201536","Thu Oct 20 04:07:34 +0000 2011","RT @KanvasKings: 1,500 #followers on #Twitter and 2,500 #fans on #Facebook? Takeover imminent? #HellYeah. ALL SHIRTS JUST $15 til Monday!" +"twitter","neutral","126872143081390080","Thu Oct 20 04:07:30 +0000 2011","Follow me on #Twitter @CatchASet_ #TeamFollow <3" +"twitter","neutral","126872051242893312","Thu Oct 20 04:07:08 +0000 2011","Progress is a Process , Surprised to see a sensible topic trending worldwide in #twitter" +"twitter","neutral","126872004652580864","Thu Oct 20 04:06:57 +0000 2011","@glaad I already purple mine!!1 Both accounts on #Facebook and #Twitter" +"twitter","neutral","126871971827953664","Thu Oct 20 04:06:49 +0000 2011","its ok to put up bible verses & encouraging post about God on #Twitter, but make sure you stand by your words, cause your an example." +"twitter","neutral","126871955419836417","Thu Oct 20 04:06:45 +0000 2011","These Hoes Follow Me Like #Twitter !" +"twitter","neutral","126871924059013120","Thu Oct 20 04:06:38 +0000 2011","I just installed Twidroyd for #Twitter on my #Android Phone - It's #Free and you can mute users :) http://t.co/eM2yDlRs" +"twitter","neutral","126871857277308930","Thu Oct 20 04:06:22 +0000 2011","RT @ChrisPirillo: Tired of those spammy Twitter DMs? http://t.co/atMIoMoO #twitter #spam" +"twitter","neutral","126871758757306368","Thu Oct 20 04:05:59 +0000 2011","#twitter OFF<3" +"twitter","neutral","126871669871620096","Thu Oct 20 04:05:37 +0000 2011","Shout out to all the single mothers on #Twitter" +"twitter","neutral","126871663299137536","Thu Oct 20 04:05:36 +0000 2011","im tired gonna get to bed peace out #twitter" +"twitter","neutral","126871626133417985","Thu Oct 20 04:05:27 +0000 2011","Well...#twitter Off,,,," +"twitter","neutral","126871606952853504","Thu Oct 20 04:05:22 +0000 2011","Aye @Me_Cedez get my bay out da way @Ful1time ( swirlZ) gotta #Twitter" +"twitter","neutral","126871562707144704","Thu Oct 20 04:05:12 +0000 2011","Let's go see what is trending on #Twitter tonight?" +"twitter","neutral","126871562098982912","Thu Oct 20 04:05:12 +0000 2011","He Like Kissing The Little Pink Puss Lls #Twitter This After Dark" +"twitter","neutral","126871520785072129","Thu Oct 20 04:05:02 +0000 2011","#Twittertip: How to Keep Up With Over 100,000 #Twitter Followers http://t.co/o3oH3YPB RT @anntran_" +"twitter","neutral","126871511326924800","Thu Oct 20 04:05:00 +0000 2011","#Twitter (of)" +"twitter","neutral","126871458998788096","Thu Oct 20 04:04:47 +0000 2011","RT“@reimeraaron: #twitter keeps me from sleeping! #soaddictedâ€" +"twitter","neutral","126871458726162432","Thu Oct 20 03:58:32 +0000 2011","Shout ou to all my followers! #TWitTER" +"twitter","neutral","126871437322629120","Thu Oct 20 04:04:42 +0000 2011","RT @TeachHub 50 Ways to Use #Twitter in the Classroom http://t.co/lJiN0See #education #ntchat" +"twitter","neutral","126871372164120576","Thu Oct 20 04:04:26 +0000 2011","@Slater_Boy Needs to take his ass to sleep... Its grown folks hour on #twitter" +"twitter","neutral","126871299741069312","Thu Oct 20 04:04:09 +0000 2011","#Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] http://t.co/mTdvRoIj" +"twitter","neutral","126871291998371840","Thu Oct 20 04:04:07 +0000 2011","#wendywilliams just said she was a woman bitch please stop lien on #twitter and on #nationaltv we all know your a man" +"twitter","neutral","126871201778905088","Thu Oct 20 04:03:46 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 18" +"twitter","neutral","126871143431938048","Thu Oct 20 04:03:32 +0000 2011","U put w/eva comes to mind on #twitter but let me try n do it ima be a rat or something smh the nerve of dese #NIGGAS!!!!'" +"twitter","neutral","126871134439346177","Thu Oct 20 04:03:30 +0000 2011","GN #Twitter.......z.z.z.z.z.z Z Z z z z" +"twitter","neutral","126871086863355905","Thu Oct 20 04:03:18 +0000 2011","Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] http://t.co/E44Qd1u2 #Twitter" +"twitter","neutral","126871066760065024","Thu Oct 20 04:03:14 +0000 2011","i've been slackin on my #twitter game" +"twitter","neutral","126871041225134080","Thu Oct 20 04:03:08 +0000 2011","RT @AHennessyWorld: #Twitter I apologize 4 being out of line regardless what sum1 does I promise 2 remain humble keep it from public ey ..." +"twitter","neutral","126871013937000450","Thu Oct 20 04:03:01 +0000 2011","Brilliant (if you have a spare 12mins to watch): Have I Shared Too Much? http://t.co/tEwNhhUk #Twitter #LinkedIn #Facebook #in" +"twitter","neutral","126871003132465152","Thu Oct 20 04:02:58 +0000 2011","#twitter keeps me from sleeping! #soaddicted" +"twitter","neutral","126870943489466368","Thu Oct 20 04:02:44 +0000 2011","#twitter be making my ass stay up later than planed" +"twitter","neutral","126870941992091648","Thu Oct 20 04:02:44 +0000 2011","Follow your interests. #twitter" +"twitter","neutral","126870940570226688","Thu Oct 20 04:02:44 +0000 2011","Lmao. RT @MissAyunnaaa Well that's my que, bye #Twitter , #oomf finna start quotin lines from #ATL lol" +"twitter","neutral","126870931040768000","Thu Oct 20 04:02:41 +0000 2011","@twitter + @tumblr = Drugs are severe. #twitter #tumblr" +"twitter","neutral","126870815877771264","Thu Oct 20 04:02:14 +0000 2011","RT @extremejohn Stuck on Facebook? I’m so sorry. http://t.co/aGHbaQmn #Twitter #Google #Facebook" +"twitter","neutral","126870807191363584","Thu Oct 20 04:02:12 +0000 2011","GN #twitter off." +"twitter","neutral","126870792960086018","Thu Oct 20 04:02:08 +0000 2011","why am i #rambling on #facebook i keep forgetting i have #twitter.. then remember and come back.. every 3 seconds.. -_- ... o_O" +"twitter","neutral","126870788564467713","Thu Oct 20 04:02:07 +0000 2011","#Twitter I apologize 4 being out of line regardless what sum1 does I promise 2 remain humble keep it from public eye!iloveya#HennessyNation" +"twitter","neutral","126870756926816256","Thu Oct 20 04:02:00 +0000 2011","17 Things You Didn’t Know About #Twitter: http://t.co/LL61rx2R via @TweetSmarter RT @sharonhayes @ChoiceZnewZ #facts" +"twitter","neutral","126870706943295489","Thu Oct 20 04:01:48 +0000 2011","#goodnight #twitter" +"twitter","neutral","126870699108339712","Thu Oct 20 04:01:46 +0000 2011","I think @SchwastySmack needs to cool her roll on her #twitter game." +"twitter","neutral","126870651112927232","Thu Oct 20 04:01:35 +0000 2011","Things that #heatmeup : when people put the # on facebookposts when they dont even have a #twitter. #getaclue" +"twitter","neutral","126870623086592000","Thu Oct 20 04:01:28 +0000 2011","goodnite #twitter (:" +"twitter","neutral","126870593764208640","Thu Oct 20 04:01:21 +0000 2011","Out of #curiosity, #doesanyoneknow if it's against the #Twitter #TOS to use #hashtags so #eggregiously as I seem to be doing #tonight?" +"twitter","neutral","126870454299398144","Thu Oct 20 04:00:48 +0000 2011","RT @JonJars: #twitter; for those statuses you'd never post on #facebook" +"twitter","neutral","126870356563722240","Thu Oct 20 04:00:24 +0000 2011","@TheRealJedaD If her name ain't #Twitter then ur not bcuz u been on here for a minute goin HAM...lol, I'm Just Sayin!!" +"twitter","neutral","126870255703306241","Thu Oct 20 04:00:00 +0000 2011","Tired of those spammy Twitter DMs? http://t.co/atMIoMoO #twitter #spam" +"twitter","neutral","126870119363260416","Thu Oct 20 03:59:28 +0000 2011","@bhammz Welcome to #Twitter" +"twitter","neutral","126870091341115392","Thu Oct 20 03:59:21 +0000 2011","its funny how you can take time out your precious #twitter time to monitor or judge what in saying" +"twitter","neutral","126869939536674816","Thu Oct 20 03:58:45 +0000 2011","#Twitter honey i don't freaking know anymore ><""" +"twitter","neutral","126869876588552192","Thu Oct 20 03:58:30 +0000 2011","@cpmorris congrats on joining #twitter" +"twitter","neutral","126869850751631361","Thu Oct 20 03:58:24 +0000 2011","@Adaddy101 @johnodame .. all this flirting over #twitter is starting to make me think you guys got a crush on each other #bromance." +"twitter","neutral","126869816584839168","Thu Oct 20 03:58:16 +0000 2011","#Twitter it's been a pleasure but I have to go exercise and eat dinner! Possibly be back b4 I sleep? We'll c! I am #BLESSED & we all are! =)" +"twitter","neutral","126869769608642560","Thu Oct 20 03:58:04 +0000 2011","#OkUPrettyBut you never talk shit on #Twitter @HannahMcCall1 :)" +"twitter","neutral","126869765049434112","Thu Oct 20 03:58:03 +0000 2011","#ihateitwhen i think of a cool twit,by da tym i grab da fone & load #twitter i would have forgotten it!!! #dumbass" +"twitter","neutral","126869751476654080","Thu Oct 20 03:58:00 +0000 2011","Lol itss ben a #year since i had a #twitter but i jus start using it" +"twitter","neutral","126869749098487809","Thu Oct 20 03:57:59 +0000 2011","RT @KiLLiN_EmSoftly: who follows ppl then unfollows them?!.. #YourLameAsHell tryna make it seem like your somebody IMPORTANT!! it's onl ..." +"twitter","neutral","126869742068842496","Thu Oct 20 03:57:58 +0000 2011","I want to be a #Twitter Cypher." +"twitter","neutral","126869644534489088","Thu Oct 20 03:57:35 +0000 2011","RT Is @Wahooly the long awaited way to make clear the ROI of #Twitter? http://t.co/rjY5WGr0 #socialmedia /via @socialmedia2day #gosocial" +"twitter","neutral","126869604348862464","Thu Oct 20 03:57:25 +0000 2011","It's #real when you tell ur man how ur gfs roasted your ex on #twitter and he's like good job babe lol" +"twitter","neutral","126869604214652929","Thu Oct 20 03:57:25 +0000 2011","#twitter; for those statuses you'd never post on #facebook" +"twitter","neutral","126869575202643968","Thu Oct 20 03:57:18 +0000 2011","@msleamichele @NayaRivera @DiannaAgron @MsAmberPRiley please could u somehow convince #hemo to join #twitter we need her awesomeness!! #glee" +"twitter","neutral","126869488007262208","Thu Oct 20 03:56:57 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/eKh5JmGp" +"twitter","neutral","126869448069095424","Thu Oct 20 03:56:48 +0000 2011","Its 11:56 &, im going to sleep now >.< O'well :) good night #Twitter ." +"twitter","neutral","126869378452037632","Thu Oct 20 03:56:31 +0000 2011","Getting ready 4 bed... School tomorrow. We have an evacuation at school! Night #Twitter =]" +"twitter","neutral","126869302887464960","Thu Oct 20 03:56:13 +0000 2011","#Twitter off! See ya :D @justinbieber Love u and have #SweetDreams. ♥" +"twitter","neutral","126869296994451457","Thu Oct 20 03:56:12 +0000 2011","Goodnight everyone #twitter off" +"twitter","neutral","126869175053467649","Thu Oct 20 03:55:43 +0000 2011","i want him to make a #Twitter" +"twitter","neutral","126868990730575872","Thu Oct 20 03:54:59 +0000 2011","Good Night #Twitter && #TwitterTweeters :)" +"twitter","neutral","126868950637219840","Thu Oct 20 03:54:49 +0000 2011","RT @Hirudineajigane: #Twitter Thank you.Read those great tweets.helps live better! :) Bows. http://t.co/FNKBW24F" +"twitter","neutral","126868911361757185","Thu Oct 20 03:54:40 +0000 2011","@mpeagler http://t.co/1EYtmczJ @GetGlue #Twitter" +"twitter","neutral","126868888074989568","Thu Oct 20 03:54:34 +0000 2011","#twitter/off" +"twitter","neutral","126868868512743425","Thu Oct 20 03:54:30 +0000 2011","#Twitter Thank you.Read those great tweets.helps live better! :) Bows." +"twitter","neutral","126868831493820416","Thu Oct 20 03:54:21 +0000 2011","I might call it a night goodnight #twitter" +"twitter","neutral","126868783431303168","Thu Oct 20 03:54:09 +0000 2011","search #facebook for ""FACEBOOK/TWITTER INDIE ARTIST MUSIC NETWORK (WORLDWIDE) +"" add yourself for more #twitter followers! network here!" +"twitter","neutral","126868754033426434","Thu Oct 20 03:54:02 +0000 2011","RT @iTweetTechNews: Famous wrestler Razor Ramon is trending on #Twitter." +"twitter","neutral","126868751533617152","Thu Oct 20 03:54:02 +0000 2011","RT @nine_oh: SIMPLY DA BEST ON #TWITTER MY TWEEPS @Mui_Dziak @Ecofriendly211 @wstein @imCHOKOLATEwhoU @SoDear2MyHeart @sscoop4 @PyrexPress" +"twitter","neutral","126868627109584896","Thu Oct 20 03:53:32 +0000 2011","Gonna Crash Out Pretty Tired, Night #Twitter<3" +"twitter","neutral","126868570226425856","Thu Oct 20 03:53:18 +0000 2011","#Twitter: What's happening? #Facebook: What's on your mind? #Myspace: Where is everyone?" +"twitter","neutral","126868533991849988","Thu Oct 20 03:53:10 +0000 2011","@FATBOYFRESH87 Hey brotha I forgot you had a #twitter" +"twitter","neutral","126868465377226752","Thu Oct 20 03:56:01 +0000 2011","So what #twitter needs is a ""save tweet"" text function. So I could ""st @xyz 5"" to save the 5th most recent msg on twitter site to read later" +"twitter","neutral","126868429864046592","Thu Oct 20 03:52:45 +0000 2011","RT @SomalyMam: You follow me on #twitter, Like my #Foundation on #facebook http://t.co/3kGgPh7z! Together we can stop #humantrafficking" +"twitter","neutral","126868429796933632","Thu Oct 20 03:52:45 +0000 2011","I think I'm addicted to checking my #email, #twitter and #facebook" +"twitter","neutral","126868397282689025","Thu Oct 20 03:52:37 +0000 2011","#Retweet #RT #BabyMamaDrama #BabyDaddyDrama #Twitter #S/O to everyone who read my blog www.cpc504.blogspot.com" +"twitter","neutral","126868239203573760","Thu Oct 20 03:51:59 +0000 2011","Thanks Irma @TellJuicy ""TWITTER 4 BUSINESS"" November 16 http://t.co/gOrDf5dj @KingstonCC #Twitter #Melbourne #Business" +"twitter","neutral","126868233084080130","Thu Oct 20 03:51:58 +0000 2011","@RashadHurst Pls let us know if you need help w/ music marketing promo. We got #Twitter & #Youtube on lock. http://t.co/fcPBcqLo" +"twitter","neutral","126868090687459328","Thu Oct 20 03:51:24 +0000 2011","#IReallyHateWhenPeople TyP3 L!k3 tH!S Or #Use #Hashtags #In #Every #Word #On #Twitter" +"twitter","neutral","126868017106780160","Thu Oct 20 03:51:07 +0000 2011","#Twitter Buzz Builds for the #Occupy_Wall_Street Movement [CHARTS] http://t.co/gHCez2Yk" +"twitter","neutral","126868008902737920","Thu Oct 20 03:51:05 +0000 2011","enjoy #Twitter Buzz Builds for the #Occupy_Wall_Street Movement [CHARTS] http://t.co/vKjwATSr great" +"twitter","neutral","126867948471189504","Thu Oct 20 03:50:50 +0000 2011","RT @jeffbullas: What Is A #Facebook “Like†Worth For Your Business? http://t.co/BsT25W2u #SMM #SocialMedia #Twitter" +"twitter","neutral","126867947418427393","Thu Oct 20 03:50:50 +0000 2011","Shes a freaked out philosopher on #twitter lol girl bye" +"twitter","neutral","126867879382614016","Thu Oct 20 03:50:34 +0000 2011","gotta get back on my #twitter #grind" +"twitter","neutral","126867786680111104","Thu Oct 20 03:50:12 +0000 2011","Best night inna while :) #newbestfriends wish you kids had #Twitter #amaturehour" +"twitter","neutral","126867774738927617","Thu Oct 20 03:50:09 +0000 2011","RT @BucBoyy: - Ima Addicted To #Twitter." +"twitter","neutral","126867767914799104","Thu Oct 20 03:50:07 +0000 2011","#LOVATICS! Wanna be in a fan book for @ddlovato let me know and ill send wat needs2b in it via #twitter" +"twitter","neutral","126867760964841473","Thu Oct 20 03:50:05 +0000 2011","Off to sleeep , #goodnight #twitter" +"twitter","neutral","126867711807598592","Thu Oct 20 03:49:54 +0000 2011","To much #twitter taking a break. ♣" +"twitter","neutral","126867680966881280","Thu Oct 20 03:49:46 +0000 2011","these #chickz on #twitter stay with they #mommy" +"twitter","neutral","126867577925406721","Thu Oct 20 03:49:22 +0000 2011","Wow jus made a #TWITTER i reallly dnt kno how o2 use it so i neea alittle help" +"twitter","neutral","126867506127310848","Thu Oct 20 03:49:05 +0000 2011","#twitter should i upload tweetdeck for my laptop?!" +"twitter","neutral","126867487659786240","Thu Oct 20 03:49:00 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 23" +"twitter","neutral","126867439177842688","Thu Oct 20 03:48:49 +0000 2011","Some people take themselves way too seriously...especially on #Twitter. It's amusing...to say the least. Bless your heart..." +"twitter","neutral","126867400250490880","Thu Oct 20 03:48:39 +0000 2011","#twitter goin hammer" +"twitter","neutral","126867371364319232","Thu Oct 20 03:48:33 +0000 2011","@IamTinaThompson @JudgeGregMathis is on #twitter also!!" +"twitter","neutral","126867320005066752","Thu Oct 20 03:48:20 +0000 2011","Welcome to #twitter @jackpot15!!" +"twitter","neutral","126867304549072896","Thu Oct 20 03:48:17 +0000 2011","Hello! Welcome to #twitter RT “@beabea210: @ACCU_MED ....hello Accu-Med!!!â€" +"twitter","neutral","126867275113447424","Thu Oct 20 03:48:10 +0000 2011","@karth_vader_ when did you change your nick again? I no longer see any of your tweets on my timeline. Anything wrong with #twitter?" +"twitter","neutral","126867260106227713","Thu Oct 20 03:48:06 +0000 2011","- Ima Addicted To #Twitter." +"twitter","neutral","126867201776037888","Thu Oct 20 03:47:52 +0000 2011","Everyone be sure to follow me on #twitter @mellee_love and check out my boy @TheRealDWI CT stand up!!" +"twitter","neutral","126867198642884608","Thu Oct 20 03:47:51 +0000 2011","RT @OKadeState: @Paigemorse1 tweeted! Its official she is still alive #Twitter" +"twitter","neutral","126867164253798400","Thu Oct 20 03:47:43 +0000 2011","Good morning #twitter." +"twitter","neutral","126867147673714688","Thu Oct 20 03:47:39 +0000 2011","Ways in which you can use #Twitter to your professional advantage http://t.co/E3xK1w67 @writeychap" +"twitter","neutral","126867119592837121","Thu Oct 20 03:47:33 +0000 2011","@winnipeg_rt @andkatmy #Winnipeg needs better local news coverage, tired of not being in the know Thanks #twitter" +"twitter","neutral","126867067776405506","Thu Oct 20 03:47:20 +0000 2011","BRB #twitter;p" +"twitter","neutral","126867012336095232","Thu Oct 20 03:47:07 +0000 2011","Thank you @logoninjas for following us on #twitter - awesome name btw!" +"twitter","neutral","126866981377941504","Thu Oct 20 03:47:00 +0000 2011","Good night #Twitter Off(:" +"twitter","neutral","126866968912478208","Thu Oct 20 03:46:57 +0000 2011","RT @frizzy_drizzy: @iTaylor_Simpson u post on #twitter to much like ery 5 sec #Damn" +"twitter","neutral","126866968753086464","Thu Oct 20 03:46:57 +0000 2011","@king_fizzy & @_Bigrob step your #twitter game up @elijahcrago is starting to catch up" +"twitter","neutral","126866861521502208","Thu Oct 20 03:46:31 +0000 2011","Good Morning #Twitter! Have a nice day. :}" +"twitter","neutral","126866854240202753","Thu Oct 20 03:46:29 +0000 2011","SO IT'S BEEN A LOOONG MINUTE +BUT #TWITTER, IM BACK +& WITH A CLOTHING LINE! + +#SWAGG" +"twitter","neutral","126866804575440897","Thu Oct 20 03:46:17 +0000 2011","@iTaylor_Simpson u post on #twitter to much like ery 5 sec #Damn" +"twitter","neutral","126866628859281408","Thu Oct 20 03:45:36 +0000 2011","#Everybody on #twitter go in tha.#bathroom and say #bloodymary3 times wit tha light off" +"twitter","neutral","126866575700664320","Thu Oct 20 03:45:23 +0000 2011","Thought about changing my #twitter profile to I'm tall, dark & single, but don't think I could take the responses if any! Lol. Night folks." +"twitter","neutral","126866562111123456","Thu Oct 20 03:45:20 +0000 2011","Follow your interests #twitter" +"twitter","neutral","126866557325426688","Thu Oct 20 03:45:18 +0000 2011","Out of my 7 months of actually gettin on #Twitter. I've only been in #TwitterJail lik 3 times.... #ThugLife! They were long sentences! Lol:)" +"twitter","neutral","126866520486846465","Thu Oct 20 03:45:10 +0000 2011","What Is A #Facebook “Like†Worth For Your Business? http://t.co/BsT25W2u #SMM #SocialMedia #Twitter" +"twitter","neutral","126866446323171328","Thu Oct 20 03:44:52 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 27" +"twitter","neutral","126866438517567488","Thu Oct 20 03:44:50 +0000 2011","Dear #twitter, Now that we all know I'm awesome could you write me a new ""all about me"" for my profile because really you do know me best." +"twitter","neutral","126866422012981248","Thu Oct 20 03:44:46 +0000 2011","bout to pull up... he see me on #twitter his ass gone flip" +"twitter","neutral","126866421719384065","Thu Oct 20 03:44:46 +0000 2011","Shout out to my man @swaggination for reaching 1000 tweets #twitterswag #twitterdeity #twittermilestone #twitter #twitter #tweets" +"twitter","neutral","126866413053939712","Thu Oct 20 03:44:44 +0000 2011","Goin to bed #GOODNIGHT #TWITTER" +"twitter","neutral","126866411275554816","Thu Oct 20 03:44:44 +0000 2011","Being able to share our thoughts with the world is awesome>>Hooray for #Twitter" +"twitter","neutral","126866376899035136","Thu Oct 20 03:44:35 +0000 2011","@grahamgoingham it's a true delight to see you on #Twitter. P.S. you rocked the house tonight." +"twitter","neutral","126866353561927680","Thu Oct 20 03:44:30 +0000 2011","Goodnight #Twitter :)" +"twitter","neutral","126866303138013184","Thu Oct 20 03:44:18 +0000 2011","RT @SomalyMam: You follow me on #twitter, Like my #Foundation on #facebook http://t.co/acaP03y9! Together we can stop #humantrafficking" +"twitter","neutral","126866277213016064","Thu Oct 20 03:44:12 +0000 2011","nitee +#Twitter & #Followers" +"twitter","neutral","126866187790450688","Thu Oct 20 03:43:50 +0000 2011","Time for bed good night #twitter" +"twitter","neutral","126866177778655233","Thu Oct 20 03:43:48 +0000 2011","wanted to say nite #twitter" +"twitter","neutral","126866083708801024","Thu Oct 20 03:43:26 +0000 2011","Everybody a somebody on #twitter!!!!!! FAKE ASSES" +"twitter","neutral","126866034048241664","Thu Oct 20 03:43:14 +0000 2011","RT @nataliajones: Use #Twitter to generate traffic to your website http://t.co/72YmN3kD" +"twitter","neutral","126865968021516288","Thu Oct 20 03:42:58 +0000 2011","who follows ppl then unfollows them?!.. #YourLameAsHell tryna make it seem like your somebody IMPORTANT!! it's only #TWITTER! smh.." +"twitter","neutral","126865954645884928","Thu Oct 20 03:42:55 +0000 2011","#Followme dijo #Twitter :D" +"twitter","neutral","126865903521505280","Thu Oct 20 03:42:43 +0000 2011","That shit lame trynna fuck with somebody through a DM on #twitter thoo ?" +"twitter","neutral","126865888724004864","Thu Oct 20 03:42:39 +0000 2011","If @Mr_A_Johnson is up. #twitter is gona have some technical difficulties" +"twitter","neutral","126865881069391872","Thu Oct 20 03:42:37 +0000 2011","RT @Nawwaf91: Some people take ' following & unfollowing ' personally. #Twitter" +"twitter","neutral","126865879848853505","Thu Oct 20 03:42:37 +0000 2011","coming back soon!!!! #twitter..." +"twitter","neutral","126865837800951808","Thu Oct 20 03:42:27 +0000 2011","Damn girl I'll follow u to the end of the road + +#twitter" +"twitter","neutral","126865802434580480","Thu Oct 20 03:42:19 +0000 2011","twitter people when someone blocks u can they see ur twitts direct to them? #twitter" +"twitter","neutral","126865779349127170","Thu Oct 20 03:42:13 +0000 2011","#twitter I'm off to bed with two crazy pups. #lovethem http://t.co/FSLFpMOb" +"twitter","neutral","126865691923062784","Thu Oct 20 03:41:52 +0000 2011","Lets not throw CJ under the bus yet.. Hes still a 100 million dollar #twitter guy!" +"twitter","neutral","126865511878361090","Thu Oct 20 03:41:09 +0000 2011","need #facebook fans, #twitter followers and #youtube views by nvida2: i need… http://t.co/tK6IHH2B #freelance #job" +"twitter","neutral","126865453221027843","Thu Oct 20 03:40:55 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 10" +"twitter","neutral","126865436590604288","Thu Oct 20 03:40:51 +0000 2011","Gud morning #twitter" +"twitter","neutral","126865419830177794","Thu Oct 20 03:40:47 +0000 2011","I didnt know there wad a test..RT ""@DashBurst: Downloadable #Twitter Cheat Sheet - http://t.co/bHXpqYo5""" +"twitter","neutral","126865247465254912","Thu Oct 20 03:40:06 +0000 2011","RT @x_JustLucky_x: I don't know why I even bother checking FB anymore. I stare for two minutes and just log off lol #Twitter>Fb" +"twitter","neutral","126865215915687936","Thu Oct 20 03:39:59 +0000 2011","Boa noite #Twitter (:" +"twitter","neutral","126865145812107264","Thu Oct 20 03:39:42 +0000 2011","#twitter off'" +"twitter","neutral","126865121938116608","Thu Oct 20 03:39:36 +0000 2011","Swaagg me outt #twitter" +"twitter","neutral","126865091026100224","Thu Oct 20 03:39:29 +0000 2011","1,500 #followers on #Twitter and 2,500 #fans on #Facebook? Takeover imminent? #HellYeah. ALL SHIRTS JUST $15 til Monday!" +"twitter","neutral","126865038479867904","Thu Oct 20 03:39:16 +0000 2011","I had my #Twitter account for like 3 years now.....and I only made 4,783 tweets? o_O" +"twitter","neutral","126865005009309696","Thu Oct 20 03:39:08 +0000 2011","RT @extremejohn Stuck on Facebook? I’m so sorry. http://t.co/JcJBfZ8W #Twitter #Google #Facebook" +"twitter","neutral","126864987078660097","Thu Oct 20 03:39:04 +0000 2011","#Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] http://t.co/DAr3Ef5T #uncategorized" +"twitter","neutral","126864979432456193","Thu Oct 20 03:39:02 +0000 2011","Twitter's Thinking About Paying People to Tweet - Technology - http://t.co/zA9h5o5U #PayMe #Twitter" +"twitter","neutral","126864974097293312","Thu Oct 20 03:39:01 +0000 2011","So sleepy and tired, going to bed! Nigh #Twitter !" +"twitter","neutral","126864886402777088","Thu Oct 20 03:38:40 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/UGPTuUcZ" +"twitter","neutral","126864870032408576","Thu Oct 20 03:38:36 +0000 2011","@iamRoyaltyFRESH Pls let us know if you need help w/ music marketing promo. We got #Twitter & #Youtube on lock. http://t.co/U0HzoVQF" +"twitter","neutral","126864861576704000","Thu Oct 20 03:38:34 +0000 2011","What is the difference between #twitter and #googleplus??" +"twitter","neutral","126864793373122560","Thu Oct 20 03:38:18 +0000 2011","@thiagozito #Twitter, #MsN, #Orkut #Facebook & o Principal #Musicaa Haah'" +"twitter","neutral","126864745587412992","Thu Oct 20 03:38:07 +0000 2011","i just realized... if i have a ridiculously long name in #twitter, no one could reply to me. *hmm* the possibilities." +"twitter","neutral","126864657817407490","Thu Oct 20 03:37:46 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/aik96UXm" +"twitter","neutral","126864641174417408","Thu Oct 20 03:37:42 +0000 2011","RT @CNBC: The World's 10 Most Tweeted Moments - Link to Story http://t.co/iKZiM8i5 #Twitter" +"twitter","neutral","126864610958647296","Thu Oct 20 03:37:34 +0000 2011","booiiiii +me feeel like some slp a come on +Gd nite #twitter" +"twitter","neutral","126864491890749440","Thu Oct 20 03:37:06 +0000 2011","#Twitter #NYERRR" +"twitter","neutral","126864475247742977","Thu Oct 20 03:37:02 +0000 2011","#twitter is great once you get the hang of it“@HaxxDubbz: @CBMonarch @Bran_Day I told her to get one to be a cool kid lolâ€" +"twitter","neutral","126864423037046784","Thu Oct 20 03:36:50 +0000 2011","#twitter #OFF" +"twitter","neutral","126864404196241408","Thu Oct 20 03:36:45 +0000 2011","@MindPhucked you changed your #twitter name?!" +"twitter","neutral","126864301293182977","Thu Oct 20 03:36:21 +0000 2011","wait... i don't really care what people are doing in their daily lives #twitter" +"twitter","neutral","126864271501041664","Thu Oct 20 03:36:14 +0000 2011","Tweet tweet #twitter" +"twitter","neutral","126864244716208129","Thu Oct 20 03:36:07 +0000 2011","@ChinkiiBeauty y u up..#Twitter" +"twitter","neutral","126864237200023553","Thu Oct 20 03:36:05 +0000 2011","why tha fuck ppl post everything on #twitter u cant #im on here what is it for someone tell me please" +"twitter","neutral","126864231718076417","Thu Oct 20 03:36:04 +0000 2011","@BADDchikkENT See What I Mean #Twitter o_O" +"twitter","neutral","126864167343894529","Thu Oct 20 03:35:49 +0000 2011","#twitter off '" +"twitter","neutral","126864141561507840","Thu Oct 20 03:35:43 +0000 2011","The Power of #Twitter http://t.co/wtVvZ8G4" +"twitter","neutral","126864136226357248","Thu Oct 20 03:35:41 +0000 2011","CAN SOMEONE EXPLAIN TAGGED TO ME LIKE DONT GET MAD AT ME CAUSE #TWITTER AN FB MY SHIT!!" +"twitter","neutral","126864131289661441","Thu Oct 20 03:35:40 +0000 2011","Somebody help me? Is apple not on Twitter? That seems very weird. #apple #twitter" +"twitter","neutral","126864105347878912","Thu Oct 20 03:35:34 +0000 2011","Ugh idk how to use this thing #twitter #helpme" +"twitter","neutral","126864096388849665","Thu Oct 20 03:35:32 +0000 2011","Who will be our #Twitter #winner 4 #daytimewdonna giveaway for $100 this Fri. Tweet about our show, our guests, why you LOVE us use #dwd100" +"twitter","neutral","126864070249947136","Thu Oct 20 03:35:26 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 29" +"twitter","neutral","126864043154751489","Thu Oct 20 03:35:19 +0000 2011","#Oomf dnt start dis sh*t on #twitter" +"twitter","neutral","126864007784185856","Thu Oct 20 03:35:11 +0000 2011","@garbage Welcome to #Twitter!" +"twitter","neutral","126863975223795712","Thu Oct 20 03:35:03 +0000 2011","Good night #Twitter and #TheLegionoftheFallen. 5:45am cimes awfully early!" +"twitter","neutral","126863972778508289","Thu Oct 20 03:35:02 +0000 2011","Tips for Reaching Out to Twitter's 50 Million Daily Users http://t.co/IF3IVLWN #twitter #mediagofer #socialmedia @SMsavvy @PRmate @KMiones" +"twitter","neutral","126863957767110656","Thu Oct 20 03:34:59 +0000 2011","What do u think of Location Lock's new #twitter #logo? Can u see the cross hairs for when GPS tracking locks on 2 ur fleet vehicle?" +"twitter","neutral","126863949584023552","Thu Oct 20 03:34:57 +0000 2011","GN #TWITTER" +"twitter","neutral","126863942634057728","Thu Oct 20 03:34:55 +0000 2011","#Twitter #Melbourne #Business ""TWITTER 4 BUSINESS"" November 16 http://t.co/wiZ4omU0 @KingstonCC @KeithKeller" +"twitter","neutral","126863921465393152","Thu Oct 20 03:34:50 +0000 2011","Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] http://t.co/3xZgtWFx #twitter" +"twitter","neutral","126863918646820864","Thu Oct 20 03:34:49 +0000 2011","Use #Twitter to generate traffic to your website http://t.co/72YmN3kD" +"twitter","neutral","126863876066254848","Thu Oct 20 03:34:39 +0000 2011","I just installed Twidroyd for #Twitter on my #Android Phone - It's #Free and you can mute users :) http://t.co/kIHs1HRm" +"twitter","neutral","126863870689165312","Thu Oct 20 03:34:38 +0000 2011","#twitter off!!*" +"twitter","neutral","126863814619709441","Thu Oct 20 03:34:25 +0000 2011","RT @WinklrSprinklr: @JenWeedin listen here, @ShaunaWalters8 is new to #twitter and needs practice. support her here brat! #goodsamarit ..." +"twitter","neutral","126863776476708864","Thu Oct 20 03:34:15 +0000 2011","Personally,i dont have time for that and dont really care about what everyone in the world on #facebook and #twitter care about how i look." +"twitter","neutral","126863772877996034","Thu Oct 20 03:34:15 +0000 2011","I just installed Twidroyd for #Twitter on my #Android Phone - It's #Free and you can mute users :) http://t.co/JlrUWRf6" +"twitter","neutral","126863571912114177","Thu Oct 20 03:33:27 +0000 2011","I can live without facebook. just #twitter and #tumblr and #youtube proven and tested. LOL give me back my facebook nowww! LOL" +"twitter","neutral","126863525481156608","Thu Oct 20 03:33:16 +0000 2011","@SeanL44 It may be a little much. Haha #droptheknife and add @frankhenchUSA to the picture like on your #Twitter account. #growuppeterpan" +"twitter","neutral","126863470397366272","Thu Oct 20 03:33:03 +0000 2011","Can you say #twitter those #followers who are #following me??" +"twitter","neutral","126863457642483712","Thu Oct 20 03:32:59 +0000 2011","@HectorAlteto welcome to the #twitter" +"twitter","neutral","126863409680625664","Thu Oct 20 03:32:48 +0000 2011","RT @BucBoyy: - It's So Funny Because She Got A #Twitter Now Lol." +"twitter","neutral","126863392764989440","Thu Oct 20 03:32:44 +0000 2011","@dellmanning Its always on this #Twitter shit doe" +"twitter","neutral","126863275450310656","Thu Oct 20 03:32:16 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 19" +"twitter","neutral","126863240041996289","Thu Oct 20 03:32:08 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/dN3qwkkb" +"twitter","neutral","126863216046374912","Thu Oct 20 03:32:02 +0000 2011","@Angelique_lique yooo my nigga i sooo blowed i forgot bout #twitter but im bkkkkkk" +"twitter","neutral","126863212762247168","Thu Oct 20 03:32:01 +0000 2011","time to change my twitter app, what's the best one for my iphone? Help please? #twitterapps : #twitter vs. #tweetcaster vs. #ubersoical ?" +"twitter","neutral","126863190691811328","Thu Oct 20 03:31:56 +0000 2011","Man i love #Twitter!!" +"twitter","neutral","126863104280760320","Thu Oct 20 03:31:35 +0000 2011","RT @Juani_2: You doing a lot of hating guy RT @Blue_Chipperz: This nigga need to go to #Twitter Jail" +"twitter","neutral","126863072269840384","Thu Oct 20 03:31:28 +0000 2011","RT @MontrelCox: It's twitter. Get over it or go back to Facebook and unfollow me. This is my free forum to say exactly what thee fuck I ..." +"twitter","neutral","126863060794224640","Thu Oct 20 03:31:25 +0000 2011","Well I'm kinda really #bored right now.... time to waste some time reading #quotes on #twitter" +"twitter","neutral","126862999720951808","Thu Oct 20 03:31:10 +0000 2011","@JenWeedin listen here, @ShaunaWalters8 is new to #twitter and needs practice. support her here brat! #goodsamaritanswag" +"twitter","neutral","126862947346694144","Thu Oct 20 03:30:58 +0000 2011","RT @Grazitti: #Twitter to Reformat All URLs With T.co http://t.co/jnmnbs9W" +"twitter","neutral","126862939159412740","Thu Oct 20 03:30:56 +0000 2011","Get your #Twitter #marketing #campaigns built by an #SEO expert: http://t.co/7FttDde4" +"twitter","neutral","126862902325022720","Thu Oct 20 03:30:47 +0000 2011","37 Ways to Use #Twitter for Business http://t.co/HGhMpoSE" +"twitter","neutral","126862899804246016","Thu Oct 20 03:30:46 +0000 2011","37 Ways to Use #Twitter for Business http://t.co/MT9RdrCz" +"twitter","neutral","126862897639993344","Thu Oct 20 03:30:46 +0000 2011","#Twitter to Reformat All URLs With T.co http://t.co/WQh2pHEA" +"twitter","neutral","126862892128677888","Thu Oct 20 03:30:45 +0000 2011","#Twitter to Reformat All URLs With T.co http://t.co/jnmnbs9W" +"twitter","neutral","126862842052874240","Thu Oct 20 03:30:33 +0000 2011","Do you Share More #jokes #quotes #music #photos or #news #articles on #Facebook or #Twitter?" +"twitter","neutral","126862821635002368","Thu Oct 20 03:30:28 +0000 2011","RT @JLachance04: What a day @davislachance and @braedonrice get #twitter @BigDenMoments" +"twitter","neutral","126862767520096257","Thu Oct 20 03:30:15 +0000 2011","@GForbz3 ma g wass good with you!! Didn't even kno you was on #twitter when was the last time you touched down in the city??" +"twitter","neutral","126862734863241217","Thu Oct 20 03:30:07 +0000 2011","Don’t seek followers unless you’re ready to treat them like the real people they are http://t.co/qa1JFSka #Twitter RT @TweetSmarter" +"twitter","neutral","126862728181714944","Thu Oct 20 03:30:06 +0000 2011","@Katieec7 lmao I love it please be on #twitter all the time so I have another friend to @mention" +"twitter","neutral","126862726311059457","Thu Oct 20 03:30:05 +0000 2011","gooodnight #twitter" +"twitter","neutral","126862714957078528","Thu Oct 20 03:30:02 +0000 2011","so #twitter who wants to buy me #lunch?" +"twitter","neutral","126862636922044417","Thu Oct 20 03:29:44 +0000 2011","RT @elishiawindfohr: #Twitter: Each second there are 2900 new tweets, for a total of 250 million per day and 7.5 billion per month. Rate ..." +"twitter","neutral","126862618836221954","Thu Oct 20 03:29:39 +0000 2011","Hates facebook whores #twitter" +"twitter","neutral","126862573697114112","Thu Oct 20 03:29:29 +0000 2011","#Twitter #Off'" +"twitter","neutral","126862552025137152","Thu Oct 20 03:29:24 +0000 2011","Let's be honest #twitter, nothing's #trending en #español." +"twitter","neutral","126862547214286848","Thu Oct 20 03:29:22 +0000 2011","I hate when my phne do what it want on #twitter" +"twitter","neutral","126862517216612354","Thu Oct 20 03:29:15 +0000 2011","Waz Sup #Twitter" +"twitter","neutral","126862494437351425","Thu Oct 20 03:29:10 +0000 2011","AHHHH!!!!! FUCK IT!!! Goodnight!! +#twitter" +"twitter","neutral","126862407309082625","Thu Oct 20 03:28:49 +0000 2011","alrite back to this lab report. Later #Twitter" +"twitter","neutral","126862391924375552","Thu Oct 20 03:28:45 +0000 2011","#twitter and fb# off" +"twitter","neutral","126862244427468800","Thu Oct 20 03:28:10 +0000 2011","#twitter I hope all of you never stop dreaming! Because dreams fuel your soul! I love dreaming big and seeing what happens!" +"twitter","neutral","126862244075151361","Thu Oct 20 03:28:10 +0000 2011","Ahhh!! Why is #Twitter so #addicting" +"twitter","neutral","126862170502860800","Thu Oct 20 03:27:53 +0000 2011","@deferom lol omg you should write #none #twitter #foreveralone. Don't forget the hashtags." +"twitter","neutral","126862130136879104","Thu Oct 20 03:27:43 +0000 2011","@teggyboy00 oh wow lmao thank no wonder why no one answered me !! addicted #twitter" +"twitter","neutral","126862039225352192","Thu Oct 20 03:27:21 +0000 2011","- It's So Funny Because She Got A #Twitter Now Lol." +"twitter","neutral","126862030979334144","Thu Oct 20 03:27:19 +0000 2011","RT @AmySimendinger: @rachiecrewz dude you can't just makeup your own fucking hashtags...learn how to use #twitter" +"twitter","neutral","126862000981671938","Thu Oct 20 03:27:12 +0000 2011","Put your twitter on autopilot and let it gain followers for you!! http://t.co/hze1TqAl #TeamFollowBack #Twitter #autopilot #1000aday 9" +"twitter","neutral","126861997127110657","Thu Oct 20 03:27:11 +0000 2011","Goodnight #twitter see you in the morning for another #successful day!" +"twitter","neutral","126861952369700865","Thu Oct 20 03:27:01 +0000 2011","#Twitter'Offf :D" +"twitter","neutral","126861941372239872","Thu Oct 20 03:26:58 +0000 2011","@Obamaphile @vegascharlieb your not in #twitter jail again r u ?" +"twitter","neutral","126861916286103552","Thu Oct 20 03:26:52 +0000 2011","RT @mmurrayphoto: Sounds like #CJ20K is going down sooner than later. #twitter #lookout" +"twitter","neutral","126861895436206081","Thu Oct 20 03:26:47 +0000 2011","Well that's my que, bye #Twitter , #oomf finna start quotin lines from #ATL lol" +"twitter","neutral","126861880194109440","Thu Oct 20 03:26:43 +0000 2011","#Twitter-on-" +"twitter","neutral","126861879598530561","Thu Oct 20 03:26:43 +0000 2011","AND HOW EVERYTIME I #GAIN A #FOLLOWER IT SAY I #LOSE ONE? #FUCKINUP #TWITTER VERY HEAVILY......#PROMOALLDAY #TMW #FOLLOWLIMITISGAY #STEPITUP" +"twitter","neutral","126861823797493760","Thu Oct 20 03:26:30 +0000 2011","@ZackSiam you just might be the most influential man in the #twitter universe" +"twitter","neutral","126861820169437184","Thu Oct 20 03:26:29 +0000 2011","RT @aDUBatSDUB: #facebook, #Twitter , #spongebob, #nirvana. Great way to spend the night. :D" +"twitter","neutral","126861776133431296","Thu Oct 20 03:26:19 +0000 2011","Funny how social networks prod you to be always on ya keypad, #twitter-what's happening, #Fb-what's on your mind?" +"twitter","neutral","126861734144245760","Thu Oct 20 03:26:09 +0000 2011","Going to bed now #goodnight #twitter. Sleeping in tomorrow cuz it's fall break #winning" +"twitter","neutral","126861715752222720","Thu Oct 20 03:26:04 +0000 2011","RT @JOMANERH: My #twitter age is 234 days 13 hours 41 minutes 16 seconds. Find out yours at http://t.co/MT0VAJwd #twittertime" +"twitter","neutral","126861701168631808","Thu Oct 20 03:26:01 +0000 2011","Sounds like #CJ20K is going down sooner than later. #twitter #lookout" +"twitter","neutral","126861637931118592","Thu Oct 20 03:25:46 +0000 2011","@Presidential_FU do me wrong #twitter" +"twitter","neutral","126861630695940096","Thu Oct 20 03:25:44 +0000 2011","@HOLLYcash mann u can have ah olelady but them hoes for everybody son I been doin dis u stop catchin feelings its #twitter" +"twitter","neutral","126861453209767936","Thu Oct 20 03:25:02 +0000 2011","Being Choosy With Twitter: The Follow-Back Conundrum http://t.co/K2rRC2ir #socialmedia #twitter #mediagofer @media_guerilla @SMsavvy" +"twitter","neutral","126861421408567296","Thu Oct 20 03:24:54 +0000 2011","Back in the #twitter game" +"twitter","neutral","126861418078277632","Thu Oct 20 03:24:53 +0000 2011","NIGGAS B TALK REACLESS OVA #TWITTER LIKE I DNT GOT DAT TOOL N MI TRUCK #BASKETBALL OV COURSE" +"twitter","neutral","126861321995173890","Thu Oct 20 03:24:30 +0000 2011","ok man down... I will own #TWITTER 2morrow!" +"twitter","neutral","126861285307584514","Thu Oct 20 03:24:22 +0000 2011","#twitter" +"twitter","neutral","126861195058757634","Thu Oct 20 03:24:00 +0000 2011","Novo #Nick in #twitter. Hehe' (:" +"twitter","neutral","126861181108498432","Thu Oct 20 03:23:57 +0000 2011","Sometimes I get so lost in #twitter that I end up living vicariously through other people #massconfusion" +"twitter","neutral","126861149726715904","Thu Oct 20 03:23:49 +0000 2011","“@JennyOrWuuut: I wishh #twitter would tell me who retweets my stuff.â€it does" +"twitter","neutral","126861036803473408","Thu Oct 20 03:23:22 +0000 2011","RT @JennyOrWuuut: I wishh #twitter would tell me who retweets my stuff." +"twitter","neutral","126861011813810176","Thu Oct 20 03:23:16 +0000 2011","#Twitter Buzz Builds for the Occupy Wall Street Movement [CHARTS] #SocialMedia #SMM http://t.co/X55u6i4w" +"twitter","neutral","126860964992794624","Thu Oct 20 03:23:05 +0000 2011","How about @MattSloyer on the #twitter!" +"twitter","neutral","126860933988483073","Thu Oct 20 03:22:58 +0000 2011","@briannaa___ Pls let us know if you need help w/ music marketing promo. We got #Twitter & #Youtube on lock. http://t.co/8HFfjJv5" +"twitter","neutral","126860932881186817","Thu Oct 20 03:22:58 +0000 2011","Night #twitter lord please let that girl get some sleep" +"twitter","neutral","126860898567589888","Thu Oct 20 03:22:49 +0000 2011","“@KekeLauren @IGoHard_NoBONER Yooooo ""Mena Malia"" lOl I love your #Twitter nameâ€" +"twitter","neutral","126860835560755200","Thu Oct 20 03:22:34 +0000 2011","Its new #Theme after #Updates have to b use with it. Though I like #Old theme of #Twitter for #Android" +"twitter","neutral","126860802392195072","Thu Oct 20 03:22:26 +0000 2011","I wishh #twitter would tell me who retweets my stuff." +"twitter","neutral","126860800978722816","Thu Oct 20 03:22:26 +0000 2011","Told myself I was in the bed #early tonite & somehow I end up back in the computer lab on #Twitter...smh guess it's the #Gemini in me" +"twitter","neutral","126860744913469440","Thu Oct 20 03:22:13 +0000 2011","If u say u 100% REAL all the time...KILL YOURSELF ASAP!..Everybody have their fake moments so don't Front for #Twitter -----_________-------" +"twitter","neutral","126860714118885376","Thu Oct 20 03:22:05 +0000 2011","#twitter is like therapy for sum people say wat you feel & get alot off yo chest.. And don't care wat the next person say" +"twitter","neutral","126860700915208193","Thu Oct 20 03:22:02 +0000 2011","Whats a good #twitter ap for #iOS that links to #FB accounts to post links? #help" +"twitter","neutral","126860691255721984","Thu Oct 20 03:22:00 +0000 2011","#Twitter Buzz Builds for the #Occupy_Wall_Street Movement [CHARTS] http://t.co/znFYtK9P @mashable #social" +"twitter","neutral","126860597013917697","Thu Oct 20 03:21:37 +0000 2011","#2000tweets !!!! #milestone #twitter" +"twitter","neutral","126860563740495872","Thu Oct 20 03:21:30 +0000 2011","#twitter off" +"twitter","neutral","126860527497515008","Thu Oct 20 03:21:21 +0000 2011","Woooow I just log out from #twitter and there's more than 60 #tweets on my #timeline" +"twitter","neutral","126860492261167104","Thu Oct 20 03:21:12 +0000 2011","#twitter Scientists Use Twitter To Track Flu Epidemics, How to Stay Healthy This Flu Season (RT This) http://t.co/ZhgyinIi" +"twitter","neutral","126860373117775872","Thu Oct 20 03:20:44 +0000 2011","@Teah2015 I had to inform my #twitter account that you weren't a stranger and give you permission to see my stuff. Not sure why that is.." +"twitter","neutral","126860341941518336","Thu Oct 20 03:20:37 +0000 2011","RT @SomalyMam: You follow me on #twitter, Like my #Foundation on #facebook http://t.co/A1zgaK5l! Together we can stop #humantrafficking" +"twitter","neutral","126860267178049536","Thu Oct 20 03:20:19 +0000 2011","@IGoHard_NoBONER Yooooo ""Mena Malia"" lOl I love your #Twitter name" +"twitter","neutral","126860173053669376","Thu Oct 20 03:19:56 +0000 2011","Goodnight #twitter" +"twitter","neutral","126860038525562880","Thu Oct 20 03:19:24 +0000 2011","groupies follow me like #twitter.....lol jk" +"twitter","neutral","126859978941276161","Thu Oct 20 03:19:10 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/nvNns7VR" +"twitter","neutral","126859887404777472","Thu Oct 20 03:18:48 +0000 2011","RT @loveable_letty8: Night #twitter" +"twitter","neutral","126859856782163968","Thu Oct 20 03:18:41 +0000 2011","RT @Capo_Young: ONE SERIOUS STATEMENT THO ! +#Twitter BLOCKED the #Troy Davis #TT +#Twitter was COOL w #AmberCole +in my opinion .... BU ..." +"twitter","neutral","126859833088552960","Thu Oct 20 03:18:35 +0000 2011","Some users are reporting problems with missing mentions. More info from #Twitter : http://t.co/dTQ23G5G" +"twitter","neutral","126859789883015168","Thu Oct 20 03:18:25 +0000 2011","- My Cute Friend @MelodyBtcc Finally Got A #Twitter :)" +"twitter","neutral","126859782601703424","Thu Oct 20 03:18:23 +0000 2011","From my blog: ##changingdiapers Party #Summary & Winners List http://t.co/QfZHtNJs #twitter #upcomingparty" +"twitter","neutral","126859745154957312","Thu Oct 20 03:18:14 +0000 2011","Bout to tweet about tweeting #twitter #Seahawkswag" +"twitter","neutral","126859710740701185","Thu Oct 20 03:18:06 +0000 2011","Goodnight, #Twitter." +"twitter","neutral","126859623671152640","Thu Oct 20 03:17:45 +0000 2011","AbOut TO BAMMER On #Twitter" +"twitter","neutral","126859623532732417","Thu Oct 20 03:17:45 +0000 2011","it runs the world #twitter" +"twitter","neutral","126859604985511937","Thu Oct 20 03:17:41 +0000 2011","Ain't really been on all day! Suppppp #Twitter" +"twitter","neutral","126859530305929216","Thu Oct 20 03:17:23 +0000 2011","#Twitter how do you know that I think that @SofiaVergara is the funniest member of the #ModernFamily cast?!" +"twitter","neutral","126859485322035200","Thu Oct 20 03:17:12 +0000 2011","GOODNITEE #TWITTER!!!" +"twitter","neutral","126859432511537152","Thu Oct 20 03:17:00 +0000 2011","Meu twitter tá ficando velho já, My #twitter age is 285 days 8 hours 52 minutes 40 seconds" +"twitter","neutral","126859340622725120","Thu Oct 20 03:16:38 +0000 2011","My #twitter age is 285 days 8 hours 52 minutes 38 seconds. Find out yours at http://t.co/q5mvW7VF #twittertime" +"twitter","neutral","126859326294999041","Thu Oct 20 03:16:34 +0000 2011","ONE SERIOUS STATEMENT THO ! +#Twitter BLOCKED the #Troy Davis #TT +#Twitter was COOL w #AmberCole +in my opinion .... BULLSHIT ." +"twitter","neutral","126859246213136384","Thu Oct 20 03:16:15 +0000 2011","#Twitter-off" +"twitter","neutral","126859124016300032","Thu Oct 20 03:15:46 +0000 2011","I OUT #Twitter Till 2mar" +"twitter","neutral","126859115657043968","Thu Oct 20 03:15:44 +0000 2011","#twitter off!!..." +"twitter","neutral","126859053757501440","Thu Oct 20 03:15:30 +0000 2011","RT @MizzSpicey: Now tuned in to http://t.co/DYMDmU3z with @deejaydenco! #WorldWide #Twitter #Facebook join me!!! #RT" +"twitter","neutral","126859044756520960","Thu Oct 20 03:15:27 +0000 2011","#Twitter Im coming home Im coming home tell the world that Im coming home lls" +"twitter","neutral","126858999512580096","Thu Oct 20 03:15:17 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @rudybren @chtvn @ulchky @anibalfreitas45" +"twitter","neutral","126858961159864320","Thu Oct 20 03:15:07 +0000 2011","Wordd ""@BblowPoundz This #TT was well needed cuz I thought #Twitter was dien." +"twitter","neutral","126858958894931968","Thu Oct 20 03:15:07 +0000 2011","Thanks for the #follow don't forget to add us to your #Twitter list" +"twitter","neutral","126858953673027584","Thu Oct 20 03:15:06 +0000 2011","#Retail friends, follow @MicrosoftHelps on #Twitter. A GREAT resource and the official account for @Microsoft #CustomerService & Support!" +"twitter","neutral","126858718762639360","Thu Oct 20 03:14:10 +0000 2011","Mo'fucka i dont give a fuck it its #FaceBook or #Twitter i express my self how ever da fuck i can..so go suck a dick u bitch.! Haha" +"twitter","neutral","126858698520932352","Thu Oct 20 03:14:05 +0000 2011","I wonder if I was to get at a celebrity's neck on #twitter. Will I be on the news or radio. #planForFame lmfao" +"twitter","neutral","126858281867149312","Thu Oct 20 03:12:25 +0000 2011","I Feel Like #Promoting Anyones #Twitter Page (♦MENTION ME IF YOU WANT TO GET #PROMOTED♦) [ #PromoteBack ] +#Followback" +"twitter","neutral","126858276339056640","Thu Oct 20 03:12:24 +0000 2011","#bedtime #goodnight #twitter" +"twitter","neutral","126858233032871937","Thu Oct 20 03:12:14 +0000 2011","Night #Twitter" +"twitter","neutral","126858194046816256","Thu Oct 20 03:12:05 +0000 2011","the marijuana loud so the hoes follow like #Twitter" +"twitter","neutral","126858149859831808","Thu Oct 20 03:11:54 +0000 2011","This #TT was well needed cuz I thought #Twitter was dien." +"twitter","neutral","126857916631355393","Thu Oct 20 03:10:58 +0000 2011","@asshsmith never #twitter is where i #speakmymind #hahahahahaha" +"twitter","neutral","126857676134166528","Thu Oct 20 03:10:01 +0000 2011","Twhistler makes Twitter sing! #twitter http://t.co/wwNZ6dx6" +"twitter","neutral","126857475034071040","Thu Oct 20 03:09:13 +0000 2011","“@Blue_Chipperz: This nigga need to go to #Twitter Jailâ€DAAAYUM DOG !! lOl" +"twitter","neutral","126857044677505024","Thu Oct 20 03:07:30 +0000 2011","RT @J_Roc616: She lovin #JohnnyVegas!! But no #Twitter" +"twitter","neutral","126856873738633216","Thu Oct 20 03:06:50 +0000 2011","I love my #twitter name." +"twitter","neutral","126856848778342402","Thu Oct 20 03:06:44 +0000 2011","Something something qanda. Something something the world is serious #twitter" +"twitter","neutral","126856541453291520","Thu Oct 20 03:05:31 +0000 2011","This nigga need to go to #Twitter Jail" +"twitter","neutral","126856421907243009","Thu Oct 20 03:05:02 +0000 2011","#Tweetdeck working but not #Twitter for #Android" +"twitter","neutral","126856387211960320","Thu Oct 20 03:04:54 +0000 2011","#TWITTER #OFF DO #BrUnODeLuCaS :ã€" +"twitter","neutral","126856150980374528","Thu Oct 20 03:03:57 +0000 2011","gud mrng #twitter & all my frnds too. ;)" +"twitter","neutral","126856031367204865","Thu Oct 20 03:03:29 +0000 2011","Increase your #twitter followers and or your #fan count via -http://bit.ly/qMc4K7" +"twitter","neutral","126855856414404608","Thu Oct 20 03:02:47 +0000 2011","☼ #SOLAR ☺ @RhondaParsons ☼ @SOLar_sister ( A simpLe @ #symboL and AN #underscore ) This is #twitter WHERE WE #twEEt ☼" +"twitter","neutral","126855838047547392","Thu Oct 20 03:02:43 +0000 2011","@madtruckman 'Modern Day Autograph"", I like the way you put that. #twitter" +"twitter","neutral","126855191571070976","Thu Oct 20 03:00:09 +0000 2011","62 Ways to Use #Twitter for Business: http://t.co/jYXK7OkC #tweets #socialmedia" +"twitter","neutral","126854358817181696","Thu Oct 20 02:56:50 +0000 2011","Log off #Facebook On #Twitter , But I Think i'm bout to going to sleep..." +"twitter","neutral","126853667738497025","Thu Oct 20 02:54:05 +0000 2011","""#twitter's dumb, I don't like it."" Hush up, Justin." +"twitter","neutral","126853298996252674","Thu Oct 20 02:52:37 +0000 2011","It's almost 4:20. Where is your bong? Is it packed! Let's rip a bowl over the time space continuum that is #Twitter" +"twitter","irrelevant","126883777938067457","Thu Oct 20 04:53:44 +0000 2011","Noches #Twitter fue un dia bonito :)" +"twitter","irrelevant","126883741481177088","Thu Oct 20 04:53:36 +0000 2011","@EddDarkfawn Eso digo yo! pa estar uno emboltijado viendo pelis de terror y no bajando los canastos en #Twitter! ajajajajaja" +"twitter","irrelevant","126883512073719808","Thu Oct 20 04:52:41 +0000 2011","@isa13mp27 jajajaja isaaa ..!!lo mas en #twitter !!" +"twitter","irrelevant","126883431308197888","Thu Oct 20 04:52:22 +0000 2011","oh. kita anak baru ni ya... ceritanya di #twitter ni .." +"twitter","irrelevant","126883226760384512","Thu Oct 20 04:51:33 +0000 2011","Ya ni en #Twitter estoy a salvo" +"twitter","irrelevant","126883122519343105","Thu Oct 20 04:51:08 +0000 2011","No se por que #twitter saca mi lado filosófico" +"twitter","irrelevant","126883074888826880","Thu Oct 20 04:50:57 +0000 2011","PartyLiveFree #TWITTER FOLLOWERS, #FACEBOOK LIKES, YOUTUBE VIEWS, EMAIL BLASTS, ETC. http://t.co/fJ8BigTe" +"twitter","irrelevant","126882987244662784","Thu Oct 20 04:50:36 +0000 2011","Ù†ÙØ³ÙŠ ÙŠÙˆÙ… يعدي علي تويتر من غير مشاكل Ùنية #Twitter" +"twitter","irrelevant","126882787029553153","Thu Oct 20 04:49:48 +0000 2011","uuchas... #twitter menso! dormish bye! xD" +"twitter","irrelevant","126882743819833345","Thu Oct 20 04:49:38 +0000 2011","awwwns Qeriido #Twitter! Grasiias poqe en ti puedo desahoqar miis indirectaaas!:D" +"twitter","irrelevant","126882703000879105","Thu Oct 20 04:49:28 +0000 2011","#Twitter, #Facebook, #Google+ ,#YouTube, #Msn ,#Gmail #OFF A dormir se ha dicho =)" +"twitter","irrelevant","126882629365661696","Thu Oct 20 04:49:10 +0000 2011","#Twitter, #Facebook, #Google+ ,YouTube, #Msn ,#Gmail #OFF A dormir se ha dicho =)" +"twitter","irrelevant","126882613569912832","Thu Oct 20 04:49:07 +0000 2011","Chavez to return to Venezuela after medical tests - Boston Globe #twitter #news" +"twitter","irrelevant","126882562202271744","Thu Oct 20 04:48:54 +0000 2011","Me estoy jeteando, pero no puedo dejarte, #Twitter" +"twitter","irrelevant","126882507621797889","Thu Oct 20 04:48:41 +0000 2011",":P RT @eduardo_666_: @AleCP93 Hey broo graxiias por recordarme qe existe #Twitter jajajaja!!! Teniia siglos de no usarlo jajaja!!! :)" +"twitter","irrelevant","126882498536939521","Thu Oct 20 04:48:39 +0000 2011","Tõ De PaqueeRa em uma Linda Mulher Aquii no #twitter <~~Linda" +"twitter","irrelevant","126882470703529985","Thu Oct 20 04:48:33 +0000 2011","Estoy dando clases de #twitter @jasaro10" +"twitter","irrelevant","126882264360558592","Thu Oct 20 04:47:43 +0000 2011","El #twitter me roba mis pensamientos! Hahaha!" +"twitter","irrelevant","126882080050262016","Thu Oct 20 04:46:59 +0000 2011","Epa!... Pa' los que estamos estrenando #twitter @RogerAlexiis jajaja ✔" +"twitter","irrelevant","126881828337483776","Thu Oct 20 04:45:59 +0000 2011","Me he vuelto un ser nocturno gracias a las redes sociales, #Twitter como una de mis favoritas #MoreDigitalLessHuman" +"twitter","irrelevant","126881827339243521","Thu Oct 20 04:45:59 +0000 2011","@AleCP93 Hey broo graxiias por recordarme qe existe #Twitter jajajaja!!! +Teniia siglos de no usarlo jajaja!!! :)" +"twitter","irrelevant","126881659516755969","Thu Oct 20 04:45:19 +0000 2011","#twitter تويتر حيث تنتقل الثرثرة من اللسان الى الاصابع" +"twitter","irrelevant","126881629145808896","Thu Oct 20 04:45:12 +0000 2011","Me tokó bolber a komprar la aplicacion de #twitter x q c abia trabado :S" +"twitter","irrelevant","126881619335327744","Thu Oct 20 04:45:10 +0000 2011","Que le pasa al #Twitter... #Chingado" +"twitter","irrelevant","126881591627759616","Thu Oct 20 04:45:03 +0000 2011","#IOS 5: Cómo integrar #Tweetbot en lugar de #Twitter http://t.co/tjzscoAl" +"twitter","irrelevant","126881518151929856","Thu Oct 20 04:44:45 +0000 2011","Ahh actualicé #Twitter en el telefono!! Ahora si me van a llegar las menciones #pffffff" +"twitter","irrelevant","126881495481724931","Thu Oct 20 04:44:40 +0000 2011","@Juanbatero1 puchis, apareciste en #twitter voss.... q onda...q congelada la q me estoy dando vos..." +"twitter","irrelevant","126881485264400385","Thu Oct 20 04:44:38 +0000 2011","@pani_mondragon =O como dices eso =( pss no tengo servicios en la bb solamente #twitter" +"twitter","irrelevant","126881462334144513","Thu Oct 20 04:44:32 +0000 2011","Tengo que estudiar para mis expos de mañana pero no... yo en #twitter" +"twitter","irrelevant","126881427550773248","Thu Oct 20 04:44:24 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @MilliondollaPD" +"twitter","irrelevant","126881398316466178","Thu Oct 20 04:44:17 +0000 2011","@Victorafrosan X. G. R. ? No pues ni el nombre de tu novia me se como para tener su #Twitter" +"twitter","irrelevant","126881392956153856","Thu Oct 20 04:44:16 +0000 2011","aa laa gran laa compuu se trava sii tengo #twitter y #facebook aviiertooos" +"twitter","irrelevant","126881358848065536","Thu Oct 20 04:44:07 +0000 2011","#twitter Mii MediO de #DesahOgo..!!" +"twitter","irrelevant","126881232997974016","Thu Oct 20 04:43:37 +0000 2011","RT @itoHedz: #Twitter donde las peleas entre personas son mas comunes que frutas en un mercado." +"twitter","irrelevant","126881227729928193","Thu Oct 20 04:43:36 +0000 2011","@Paatty_cl #EsNaco Componerle una cacion a Mf y presumir de ella en #Twitter" +"twitter","irrelevant","126881169169063937","Thu Oct 20 04:43:22 +0000 2011","paradão , no , #twitter" +"twitter","irrelevant","126881114936717313","Thu Oct 20 04:43:09 +0000 2011","@ElieDzR aqui en casa escuchamdo musica en el #ipod y jodiendo en #twitter jaja #normal" +"twitter","irrelevant","126881095378665473","Thu Oct 20 04:43:05 +0000 2011","#Twitter donde las peleas entre personas son mas comunes que frutas en un mercado." +"twitter","irrelevant","126881035748261888","Thu Oct 20 04:42:50 +0000 2011","RT @Viny1970: só vejo @DadynhaSantos dando #RT em conversas no #twitter --'" +"twitter","irrelevant","126881008002940928","Thu Oct 20 04:42:44 +0000 2011","#CHISTE : EN QUE SE PARECEN LOS PITUFOS AL #TWITTER ? R= EN QUE LOS 2 TIENEN EL PAJARO AZUL XD" +"twitter","irrelevant","126880978324037632","Thu Oct 20 04:42:37 +0000 2011","Saindo #Twitter !" +"twitter","irrelevant","126880901903826945","Thu Oct 20 04:42:19 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @AmandaDGAF_16" +"twitter","irrelevant","126880815928975360","Thu Oct 20 04:41:58 +0000 2011","só vejo @DadynhaSantos dando #RT em conversas no #twitter --'" +"twitter","irrelevant","126880805610987520","Thu Oct 20 04:41:56 +0000 2011","Hasta mañana mi gente linda de #Twitter y #Facebook +sweet dream <3" +"twitter","irrelevant","126880734152634368","Thu Oct 20 04:41:39 +0000 2011","3 razones por las que #Twitter mejora la escritura: http://t.co/MBbzZGOR" +"twitter","irrelevant","126880709427208192","Thu Oct 20 04:41:33 +0000 2011","Y pues ya me voy a dormir... Comper se quedan es su #Twitter !!" +"twitter","irrelevant","126880705996259328","Thu Oct 20 04:41:32 +0000 2011","Lo primero que pongo cuando abro el navegar es #Twitter Luego #Facebook y de ultimo #Hotmail! jajaja!" +"twitter","irrelevant","126880699587371008","Thu Oct 20 04:41:30 +0000 2011","A ver si me meto en la #adiccion esta de la #mierda de #twitter" +"twitter","irrelevant","126880644105121792","Thu Oct 20 04:41:17 +0000 2011","si es ese dia, voi a kumplir 2 años kon el #twitter :D" +"twitter","irrelevant","126880580485910529","Thu Oct 20 04:41:02 +0000 2011","Hoy #MiDia #Facebook #Twitter #Noticias todo ha estado super divertido y acelerado ¿Qué estará pasando?" +"twitter","irrelevant","126880556775522304","Thu Oct 20 04:40:56 +0000 2011","kreo ke mi aniversario en el #twitter es el 24 de Dic. :O" +"twitter","irrelevant","126880519391686656","Thu Oct 20 04:40:47 +0000 2011",":o hoy no e utilizado el #twitter para nada :s" +"twitter","irrelevant","126880484797063168","Thu Oct 20 04:40:39 +0000 2011","Hasta el #queque de tarea pero heme aqui en #twitter y #FB jijijiji ya falta menos para terminar #thanksGod" +"twitter","irrelevant","126880477943570433","Thu Oct 20 04:40:37 +0000 2011","#Facebook ah quedado en el olvido gracias a #Twitter" +"twitter","irrelevant","126880436906491904","Thu Oct 20 04:40:28 +0000 2011","¡#twitter Tribune está disponible! http://t.co/RBNiWUmI" +"twitter","irrelevant","126880353817337856","Thu Oct 20 04:40:08 +0000 2011","La arrolladora en #Tapachula, esperenlo 4 de Nov.,..!! >>XD<< --> #recomendado,...!! #DalePLAY ,..!! >XD< #FollowME en #Twitter @Fm_antonio" +"twitter","irrelevant","126880217124974594","Thu Oct 20 04:39:35 +0000 2011","Buenas noches a todos ya casi fin d semana y maÑana clase a las 7 ya el ultimo jalon #twitter off" +"twitter","irrelevant","126880194588975104","Thu Oct 20 04:39:30 +0000 2011","Saludoss pa To' L@s Twitter@s Nocturn@s.. Jeje.. @elBellacoMusic Nose pero Ahoritaa me Sumbo Mas pal #Twitter q pal #Face..! #TadeModa ;)" +"twitter","irrelevant","126880098401001473","Thu Oct 20 04:39:07 +0000 2011","- La vida es Corta por lo Tanto: Besa Poco a Poco, Rie Locamente, Ama de Verdad y Perdona Rapidamente! - .. + +GoodNight #fb & #twitter :) ..." +"twitter","irrelevant","126880095334973440","Thu Oct 20 04:39:06 +0000 2011","Ganun? Di ba kasama ang wit? lol RT @RobyQQ Your #Twitter followers either like you for your sense of humor or sex appeal." +"twitter","irrelevant","126880015357984768","Thu Oct 20 04:38:47 +0000 2011","@_kellykatiucia q nada vc q é linda mesmo! :) só no me lenbro o dia q eu segui vc, Mais ta bom, mais uma pessoal especial no meu #Twitter:)" +"twitter","irrelevant","126879958529343488","Thu Oct 20 04:38:34 +0000 2011","quitandole las telarañas al #twitter jajaja" +"twitter","irrelevant","126879710054580224","Thu Oct 20 04:37:34 +0000 2011","Ammazza... Anche su #twitter la gente dorme a quest'ora" +"twitter","irrelevant","126879705046597632","Thu Oct 20 04:37:33 +0000 2011","Como esta la gent ermosa del #twitter n.n" +"twitter","irrelevant","126879692635635712","Thu Oct 20 04:37:30 +0000 2011","Etre influent sur #Twitter: publication de contenus qui engagent à l'action. http://t.co/H2qmDwpP EN via @jeffbullas" +"twitter","irrelevant","126879567385337856","Thu Oct 20 04:37:00 +0000 2011","ÐаÑтоÑщий твиттерÑнин как только попадает в толпу ÑтремитьÑÑ Ñ‚ÑƒÑ‚ же как можно быÑтрее попаÑть в #twitter" +"twitter","irrelevant","126879538415271936","Thu Oct 20 04:36:53 +0000 2011","RT @shezyyee: Sunaa karo merii jaan in se un se afsaane / sab ajnabi hain yahaan kaun kis ko pehchane #Kaifi #Shair #Twitter" +"twitter","irrelevant","126879341559824384","Thu Oct 20 04:36:06 +0000 2011","o/ RT""@Cjas0007 Quien anda activo por el #Twitter ahorita???""" +"twitter","irrelevant","126879308663894016","Thu Oct 20 04:35:59 +0000 2011","#TOMANOCU EU SIGO GENTE QE SO POSTA COISA EN CANADEIS, UOL, #twitter doido --`" +"twitter","irrelevant","126879295195987968","Thu Oct 20 04:35:55 +0000 2011","#Adomani #Twitter" +"twitter","irrelevant","126879277722505216","Thu Oct 20 04:35:51 +0000 2011","@christianortizh ps tiene pedos y la neta lo entiendo sabes?.. Y luego yo contandolo en el #Twitter #noesdedios" +"twitter","irrelevant","126879219484606464","Thu Oct 20 04:35:37 +0000 2011","Comooo es q me dices q he llegado alos limites de uso #twitter no q lo nuestro duraria siempre,..... X)" +"twitter","irrelevant","126879210177441792","Thu Oct 20 04:35:35 +0000 2011","#twitter OFF Hasta mañana xd =l" +"twitter","irrelevant","126879164258201601","Thu Oct 20 04:35:24 +0000 2011","o #TWITTER é #FODA - Esposa defende Luis Fabiano no Twitter - Jogo Aberto 18/10/2011 => http://t.co/QdFWoDMD o bixo pegou!" +"twitter","irrelevant","126879138605834240","Thu Oct 20 04:35:18 +0000 2011","buenas noches #Twitter W:" +"twitter","irrelevant","126879046071103488","Thu Oct 20 04:34:56 +0000 2011","jajajaja mi mama se metio en mi #twitter y en mi #facebook y escribio jajajaj te amo mami sos la mejor!!" +"twitter","irrelevant","126878948431900672","Thu Oct 20 04:34:33 +0000 2011","RT @Buzzcontinue: 34 des 50 plus grandes villes de France n'ont pas de compte #twitter ! http://t.co/S73ylVU6 cc @lagazettefr @e_r_w_a_n" +"twitter","irrelevant","126878924411125760","Thu Oct 20 04:34:27 +0000 2011","#Twitter Off!!" +"twitter","irrelevant","126878914625802241","Thu Oct 20 04:34:25 +0000 2011","E o meu #Primo @eduuaardo_ que nunca mais apareceu pelo #Twitter? Saudade de você primãao ♥" +"twitter","irrelevant","126878849656037377","Thu Oct 20 04:34:09 +0000 2011","Porque mi app de #Twitter para #Android no actualiza las menciones?" +"twitter","irrelevant","126878819826139136","Thu Oct 20 04:34:02 +0000 2011","definitivamente irei MATAR esse #Twitter +pq tento trocar a porr# da minha foto de smp +da EEEEEERRRO *-* EEEERRO + +#Tomanocú" +"twitter","irrelevant","126878801970995200","Thu Oct 20 04:33:58 +0000 2011","@AleeRetana cambiale el fondo a este tu coso vos!! XD esta aburriiiidooo.. x) aaa por cierto... WOW! ya no te abruma el #twitter x) +#Welcome" +"twitter","irrelevant","126878766675918848","Thu Oct 20 04:33:49 +0000 2011","RT @NadaBebbars: اللى ÙÙ‰ قلبى على تويترى #fact #twitter #lol" +"twitter","irrelevant","126878720098177024","Thu Oct 20 04:33:38 +0000 2011","Quien anda activo por el #Twitter ahorita???" +"twitter","irrelevant","126878709192990720","Thu Oct 20 04:33:36 +0000 2011","Demasiado #Twitter por hoy nos vemos #TwitterOff" +"twitter","irrelevant","126878489935753216","Thu Oct 20 04:32:43 +0000 2011","el #twitter x cel no s!rve e$ un ascΘ(...)" +"twitter","irrelevant","126878477424148480","Thu Oct 20 04:32:40 +0000 2011","@bruna_rafaelah xau amanha eu to d volto com meu #Twitter +COM FE EM DEUS +#TwiTTerOff" +"twitter","irrelevant","126878461464821760","Thu Oct 20 04:32:37 +0000 2011","La mamachy ya tiene #Twitter |||| Sigamosla es ella @DianaSanchez_27" +"twitter","irrelevant","126878337342771201","Thu Oct 20 04:32:07 +0000 2011","Buenas Noches a Todos!!! Llego mi momento. Momento #Twitter de reflexiones y analisis. Pensamientos encontrados en lo profundo de la noche." +"twitter","irrelevant","126878307617746944","Thu Oct 20 04:32:00 +0000 2011","@chihuahuentauro jajajaja #Facebook es otra cosa, insultas a #Twitter jajajajajaja" +"twitter","irrelevant","126878294867058688","Thu Oct 20 04:31:57 +0000 2011","My #Twitter #Diary مذكرات تويتري is out! http://t.co/JLsrgOXI â–¸ Top stories today via @socialwediatips" +"twitter","irrelevant","126878250541645824","Thu Oct 20 04:31:46 +0000 2011","Shit! Que la gente no sabe que existe #Twitter cambian de estado en #BBM y en #FB mas que el pañal de un recien nacido.!" +"twitter","irrelevant","126878057662398464","Thu Oct 20 04:31:00 +0000 2011","@crucius89 miiiiiiiiiiiiiiira la rotita tenia #twitter aunque esta novata aun" +"twitter","irrelevant","126878055091277824","Thu Oct 20 04:31:00 +0000 2011","@Omardelos @RonaldElApostol Hace un tiempo q el varon tira su rema 11x17 aqui en #Twitter :D" +"twitter","irrelevant","126877892855611392","Thu Oct 20 04:30:21 +0000 2011","#Twitter #twit2alsh +تويتر يثبت لى يوميا انه ÙÙ‰ الأصل كان عراÙÙ‡ كل ما أقوله حاجه يقوللى ما انا عار٠+OOoooops u already tweeted zaaaat go away" +"twitter","irrelevant","126877830150762496","Thu Oct 20 04:30:06 +0000 2011","YouTube SEO: + +It's time for another excerpt from the second edition of Music 3.0: A Survival Gui... http://t.co/ePAgAnJB #twitter #news" +"twitter","irrelevant","126877791911292928","Thu Oct 20 04:29:57 +0000 2011","RT @VLAD5564: #RT this #Follow all new music #TFB #nymusic. #newmusic #5000aday #1000aday #twitter #dipset #skullgang #730dips #byrdgan ..." +"twitter","irrelevant","126877775830327296","Thu Oct 20 04:29:53 +0000 2011","Sunaa karo merii jaan in se un se afsaane / sab ajnabi hain yahaan kaun kis ko pehchane #Kaifi #Shair #Twitter" +"twitter","irrelevant","126877738702344192","Thu Oct 20 04:29:44 +0000 2011","Me identifico mas con #TWITTER q con #FACE xq aqui puedo expresar todo lo q siento x #JUSTIN sin temor a ser criticada!!" +"twitter","irrelevant","126877737410502659","Thu Oct 20 04:29:44 +0000 2011","Nee, heb je vast de tweets gemist tussen @ecoumans & mij?! Jou zal ik zeker nomineren. RT ""@dretoorn: Zo, ben weer bij #twitter""" +"twitter","irrelevant","126877693563244544","Thu Oct 20 04:29:34 +0000 2011","@majufedz @EliasMM me retiro otro dia compartimos nuestras #leccionesparalavida con el mundo de #twitter jajaja" +"twitter","irrelevant","126877589955543040","Thu Oct 20 04:29:09 +0000 2011","Ella dice que yo soy su Niño del #Twitter *-----*" +"twitter","irrelevant","126877547899269120","Thu Oct 20 04:28:59 +0000 2011","#twitter #detik Mengaku Masih Baru di Kabinet, Cicip Baru Siapkan Program Pekan Depan http://t.co/Y3yOaRiN" +"twitter","irrelevant","126877547878289408","Thu Oct 20 04:28:59 +0000 2011","#twitter #detik Hakim Imas Menangis Ikuti Sidang Perdana - Hakim Imas Dianasari tak kuasa menahan air matanya saat d... http://t.co/G3wgjL6R" +"twitter","irrelevant","126877547710521344","Thu Oct 20 04:28:59 +0000 2011","#twitter #detik Mahfudz: Ada Parpol yang Mendorong SBY Melepas Menteri PKS http://t.co/hoyXdbDq" +"twitter","irrelevant","126877547576311808","Thu Oct 20 04:28:59 +0000 2011","#twitter #detik Buntut Kongres Rakyat Papua, 2 Orang Ditemukan Tewas http://t.co/S0lUgv4B" +"twitter","irrelevant","126877547244945408","Thu Oct 20 04:28:59 +0000 2011","#twitter #detik PKS Akan Abaikan Kontrak Politik dengan SBY - Partai Keadilan Sejahtera (PKS) masih berada di koalis... http://t.co/BbqFnSzm" +"twitter","irrelevant","126877540928331777","Thu Oct 20 04:28:57 +0000 2011","Creo que hoy es mi ultimo día en #Twitter" +"twitter","irrelevant","126877498981089280","Thu Oct 20 04:28:47 +0000 2011","Segun lo que me explicarón, fue victima de un hackeo #Twitter" +"twitter","irrelevant","126877457675595776","Thu Oct 20 04:28:37 +0000 2011","#Twitter #Pause Cenare :9" +"twitter","irrelevant","126877358740344832","Thu Oct 20 04:28:14 +0000 2011","Skype: geovannyacaro como en #twitter @geovannyacaro" +"twitter","irrelevant","126877288418639872","Thu Oct 20 04:27:57 +0000 2011","Nu doet alleen #twitter het" +"twitter","irrelevant","126877230587576320","Thu Oct 20 04:27:43 +0000 2011","#twitter off me llama mi camita jajaja.." +"twitter","irrelevant","126877113348403201","Thu Oct 20 04:27:15 +0000 2011","Ahhhh mi #twitter no caaargaaaaaa!!!" +"twitter","irrelevant","126876910788673536","Thu Oct 20 04:26:27 +0000 2011","We're hiring! Immediate openings! #socialmedia #facebook #twitter #hiring #job #owner #marketing #networking http://t.co/3LNiJC9U" +"twitter","irrelevant","126876805134159872","Thu Oct 20 04:26:02 +0000 2011","Dime porque demonios existes #Twitter ?! Oh si! Para causarle insomnio a cualquera que pase no?" +"twitter","irrelevant","126876741913415681","Thu Oct 20 04:25:47 +0000 2011","RT @DKhazhintsev: Губернатор @VShport раÑÑказывает про #twitter #ХабаровÑк" +"twitter","irrelevant","126876728206438400","Thu Oct 20 04:25:43 +0000 2011","Доброе утро #twitter" +"twitter","irrelevant","126876695989985280","Thu Oct 20 04:25:36 +0000 2011","Ternuritas los que creen me la paso conectado al #twitter no saben de las aplicaciones jajaja #Oaxaca" +"twitter","irrelevant","126876651551338496","Thu Oct 20 04:25:25 +0000 2011","#Twitter-Splitter (7) – Gottes Kindergarten: Willkommen in der größten Irrenanstalt der Welt! http://t.co/97ynzVRE via @Salecker" +"twitter","irrelevant","126876632874106881","Thu Oct 20 04:25:21 +0000 2011","jisuis, geral nas festinha acha hype ficar só no telephone téc téc téc, gentem, dá um tempo. #twitter #biscates #feios #sms #aloneinthedark" +"twitter","irrelevant","126876630491729920","Thu Oct 20 04:25:20 +0000 2011","good afternoon #twitter... +---unkaboggable talaga si # vice ganda sa kanyang # no other horse with @IAmDerekRamsay,Pooh and Chocoleit... :)" +"twitter","irrelevant","126876586170523648","Thu Oct 20 04:25:10 +0000 2011","a dormir #twitter off" +"twitter","irrelevant","126876547025084416","Thu Oct 20 04:25:00 +0000 2011","troquei meu avatar do #twitter rs." +"twitter","irrelevant","126876538653245440","Thu Oct 20 04:24:58 +0000 2011","Vuelvo a el #Twitter hasta el 07/11/2011 by _ @LLLF_" +"twitter","irrelevant","126876463965278208","Thu Oct 20 04:24:40 +0000 2011","Ya me esta empezando a Gustar #Twitter!!:DD" +"twitter","irrelevant","126876452326080512","Thu Oct 20 04:24:38 +0000 2011","@LuceroMexico Ja lu! chiste! en qe se parese papa pitufo al #twitter??? +en qe los 2 tienen el pajarito Azul ..! jaja" +"twitter","irrelevant","126876390388793344","Thu Oct 20 04:24:23 +0000 2011","#nomames esta bien frio el #twitter, casi NAIDE twittea xD." +"twitter","irrelevant","126876299556962305","Thu Oct 20 04:24:01 +0000 2011","I just installed the new Twidroyd for #Twitter on my #Android Phone - It's #Free, give it a try. http://t.co/VDYd7QaW" +"twitter","irrelevant","126876220460761088","Thu Oct 20 04:23:42 +0000 2011","Google+ to Support Pseudonyms http://t.co/ms1eBXpx #google+ #googlesocialmedia #googleventures #facebook #twitter #socialmediamarketing" +"twitter","irrelevant","126876206003003392","Thu Oct 20 04:23:39 +0000 2011","RT @NickElPoeta: Saludos a usted que a estás horas sigue en el #facebook y el #twitter.. +bendiciones" +"twitter","irrelevant","126876188294643712","Thu Oct 20 04:23:35 +0000 2011","#TwitTer Donde la gente Te confiesan Su Vida sin Preguntar. O_o" +"twitter","irrelevant","126876082593992704","Thu Oct 20 04:23:09 +0000 2011","Image update >> : AIDS ad campaign : use condoms - http://t.co/qZqblIZB #funny #humour #twitter" +"twitter","irrelevant","126876080538787841","Thu Oct 20 04:23:09 +0000 2011","checando ... #Facebook #Twitter" +"twitter","irrelevant","126876073337159680","Thu Oct 20 04:23:07 +0000 2011","que quieren decir las listas en #Twitter???" +"twitter","irrelevant","126876037689774080","Thu Oct 20 04:22:59 +0000 2011","Niemiecki rzÄ…d uruchomiÅ‚ w Å›rodÄ™ wÅ‚asny kanaÅ‚ na YouTube - Wiadomosci 24: Niemiecki rzÄ…d uruchomiÅ‚ w Å›r... http://t.co/n6Wp2QZv #twitter" +"twitter","irrelevant","126875903266529281","Thu Oct 20 04:22:27 +0000 2011","- Pasen Buenas Noches #Twitter'OFF ♥" +"twitter","irrelevant","126875893481209858","Thu Oct 20 04:22:24 +0000 2011","Luego de haberme acostado ayer a las 4 de la ma?ana... ahora entiendo por que me ta picando el sue?o!! #Twitter!!!! dejame dormiiiirr!!!" +"twitter","irrelevant","126875748484124672","Thu Oct 20 04:21:50 +0000 2011","Puta! No se por que traje los cuadernos para estudiar a la cama si sigo en #twitter #OhShit! T_T" +"twitter","irrelevant","126875741869719552","Thu Oct 20 04:21:48 +0000 2011","nossa cade o povo desse #Twitter meu deus" +"twitter","irrelevant","126875719912538112","Thu Oct 20 04:21:43 +0000 2011","meu #twitter sem graça . boa noite e beijos :9" +"twitter","irrelevant","126875637720940544","Thu Oct 20 04:21:23 +0000 2011","RT @DicasECommerce_: Use o Twitter Profissionalmente! ~ Dicas E-Commerce e Renda Extra!: http://t.co/k6yAfacy #Twitter #Blog #Ecommerce ..." +"twitter","irrelevant","126875605395456001","Thu Oct 20 04:21:16 +0000 2011","Twitter reconoce el éxito de los tweets patrocinados y prepara nuevos formatos publicitarios http://t.co/CkQiNbQb #twitter" +"twitter","irrelevant","126875567168561152","Thu Oct 20 04:21:07 +0000 2011","@diegomendesr_ popularizar o #twitter ?? Tipo ter seguidores é isso???" +"twitter","irrelevant","126875553314783232","Thu Oct 20 04:21:03 +0000 2011","Se que en estos momentos transmiten soy tu fan, cortesía de #twitter y de @aNgEluSrO" +"twitter","irrelevant","126875508662210560","Thu Oct 20 04:20:53 +0000 2011","Ya no sé si la noticia del Zoo de Ohio que me llegó a mi cel. fue sacada o no de #twitter... creo que sí" +"twitter","irrelevant","126875401887821825","Thu Oct 20 04:20:27 +0000 2011","#Tecnologia, sabias que #twitter genera 250 millones de publicaciones al día http://t.co/CA2dK0xH" +"twitter","irrelevant","126875254437056512","Thu Oct 20 04:19:52 +0000 2011","Buenas noches para todos felices sueños muuaahhzz!!! gunaiiii #twitter" +"twitter","irrelevant","126875239438229504","Thu Oct 20 04:19:48 +0000 2011","#twitter off.. La cama me llama!!!" +"twitter","irrelevant","126875209897750528","Thu Oct 20 04:19:41 +0000 2011","@Jess_Haddad D; tonces deja el vicio de #Twitter!! xD" +"twitter","irrelevant","126875201072922624","Thu Oct 20 04:19:39 +0000 2011","Buenas noches a todos #Twitter off" +"twitter","irrelevant","126875171008163840","Thu Oct 20 04:19:32 +0000 2011","RT @NegooHB: Somos 2 Intao @DehTavares que tem o #Twitter #HAHA" +"twitter","irrelevant","126875123457331200","Thu Oct 20 04:19:21 +0000 2011","tempat asik buat curhat #twitter" +"twitter","irrelevant","126875039621578752","Thu Oct 20 04:19:01 +0000 2011","RT @teresarod: Tengo lo que a mi me basta para ser feliz #(8) #twitter OFF como? No sabia!!" +"twitter","irrelevant","126875031983759360","Thu Oct 20 04:18:59 +0000 2011","PartyLiveFree #TWITTER FOLLOWERS, #FACEBOOK LIKES, YOUTUBE VIEWS, EMAIL BLASTS, ETC. #getmorefacebookfanpagelikes" +"twitter","irrelevant","126875011221946368","Thu Oct 20 04:18:54 +0000 2011","RT @Kcyoungboss: RT @nine_oh SIMPLY DA BEST ON #TWITTER MY TWEEPS @Kcyoungboss @JenniferWarters @TONYTECH818 @Kozak2012 @3010lys @TheLeg ..." +"twitter","irrelevant","126874994180497408","Thu Oct 20 04:18:50 +0000 2011","mi sobrina ve un perfil sin foto en #twitter y me dice: ¿¿¿QUIEN ES ESE HUEVO??? +""""""JAJAJA,,,," +"twitter","irrelevant","126874967433428992","Thu Oct 20 04:18:44 +0000 2011","@JhodeMrd1 pero siempre pones cosas como pa lee las lea ah? El debe de tener #twitter......Y si no lo es lo va hacer o ya fue tu novio??" +"twitter","irrelevant","126874894943260674","Thu Oct 20 04:18:26 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow #IFollowBack GET A #SHOUTOUT FOR FOLLOWING ME!!" +"twitter","irrelevant","126874749321216000","Thu Oct 20 04:17:52 +0000 2011","Buenas noches #twitter" +"twitter","irrelevant","126874719772356608","Thu Oct 20 04:17:45 +0000 2011","Eita q o povo do #twitter foi todos dormir!!!" +"twitter","irrelevant","126874707780841472","Thu Oct 20 04:17:42 +0000 2011","@Yerson_Aponte Que conste quedo en el #Twitter !! xD" +"twitter","irrelevant","126874706119888896","Thu Oct 20 04:17:41 +0000 2011","@ACRIS_REYS Agora q estava legal acabou. Deixa eu t perguntar. Como eu popularizo o meu #twitter ? Vc tem dicas?;)*" +"twitter","irrelevant","126874654055989248","Thu Oct 20 04:17:29 +0000 2011","as indiretaa ta solta hoje nesse #twitter UAHSUAHSUASA' E SO PRA ELA S2" +"twitter","irrelevant","126874645587701760","Thu Oct 20 04:17:27 +0000 2011","#Twitter se tarda para cambiar de avatar .-" +"twitter","irrelevant","126874549550714880","Thu Oct 20 04:17:04 +0000 2011","Hasta Mañana Que Descansen #Twitter Off !!" +"twitter","irrelevant","126874482798370816","Thu Oct 20 04:16:48 +0000 2011","[10/20]何ãŒã©ã†ã™ã”ã„ï¼ï¼Ÿ Android4.0ã®æ–°æ©Ÿèƒ½ï¼ ⇒ http://t.co/BUGFa8Xj #twitter" +"twitter","irrelevant","126874479291924480","Thu Oct 20 04:16:47 +0000 2011","ãºã‚“ã¦ã‚‹ã®ãƒ‡ã‚¸ã‚¿ãƒ«ãƒšãƒ³ã€ã‚ªãƒ¼ãƒªãƒƒãƒ‰ç¤¾ã®ã‚¯ãƒ©ã‚¦ãƒ‰åž‹ãƒ†ã‚­ã‚¹ãƒˆåŒ–サービスã¨é€£æº ~スマートフォンã‹ã‚‰ã®æ“作ã§ã€æ‰‹æ›¸ãã®ãƒ‡ã‚¸ã‚¿ãƒ«åŒ–ã‹ã‚‰ã€ãƒ†ã‚­ã‚¹ãƒˆåŒ–ãŒå®Œçµï½žï¼ˆCNET Japan) ⇒ http://t.co/jtdAcA2W #twitter" +"twitter","irrelevant","126874451500474368","Thu Oct 20 04:16:41 +0000 2011","lo mejor del #twitter es que ahora ya puedes darle el #pesame a tus #artistas desde tu celularrrrrrr!! #sarcasmo" +"twitter","irrelevant","126874366775525377","Thu Oct 20 04:16:20 +0000 2011","Buenas noches îŒ #twitter friends! Me despido q me voy a poner hacer mi tarea de acct îŽ q flojera! Pero no se me permite fail any classes îƒ" +"twitter","irrelevant","126874348110888960","Thu Oct 20 04:16:16 +0000 2011","Bueno... #twitter OFF ! .... Mañanaa a casa ensamble con @NatiiGutiierrez & @pinkangel_26. :D !! Bona Noche ♥ :)" +"twitter","irrelevant","126874313423994880","Thu Oct 20 04:16:08 +0000 2011","Bueno... Mañana Es Un Día Largo Y Agitado ;) #TWITTER OF" +"twitter","irrelevant","126874273280303104","Thu Oct 20 04:15:58 +0000 2011","#LaAficion al #Twitter es #Perjudicial para la #SaludMental . Jaja #SiSeñor @JunsCastro! #Hablamos! #Chao jaja #DileaTuMenteQueDescance..." +"twitter","irrelevant","126874268779810817","Thu Oct 20 04:15:57 +0000 2011","RT @juancamilo890: @Marbelle30 ...Esta familia de #twitter cada vez crece mas y mas rápidamente estas en 71.406 seguidores y estoy segur ..." +"twitter","irrelevant","126874165105008640","Thu Oct 20 04:15:32 +0000 2011","Gamit m din pala #twitter now @akosiadrienne haha! (>̯-Ì®<)" +"twitter","irrelevant","126874164039659520","Thu Oct 20 04:15:32 +0000 2011","#Twitter Jogo de Brasil x Argentina em Guadalajara é destaque no Twitter - Terra Brasil http://t.co/8kak5aO7" +"twitter","irrelevant","126874157840474113","Thu Oct 20 04:15:31 +0000 2011","hahaha los zombies qe usan #twitter cmo chat sn zombienpndejs...hahahahhaha" +"twitter","irrelevant","126874145408561152","Thu Oct 20 04:15:28 +0000 2011","@_Pao92 ah bueno!!! jeje ta bn... ps aqui #fb y #twitter . jeje x cierto ya no me sta siguiendo vdd?? :'( ahorita vi mis seguidores y no sta" +"twitter","irrelevant","126874136017518593","Thu Oct 20 04:15:25 +0000 2011","RT @Xuuxinha_: o #twitter pedi : no que esta pensando agora ? se fosse pra mim só falar isso, iria ficar escrevendo o dia inteiro milena ..." +"twitter","irrelevant","126874079239217153","Thu Oct 20 04:15:12 +0000 2011","Querida @mamá, te escribo esta #carta para dejarte tranquila, no estoy todo el tiempo en #Twitter. Fav para papá y un RT para ti." +"twitter","irrelevant","126874040261545985","Thu Oct 20 04:15:03 +0000 2011","Visualiza tus emails, menciones en #twitter, eventos de #facebook y más, desde Centro de notificaciones de #IOS 5 http://t.co/BsFmwKFH" +"twitter","irrelevant","126874002084990976","Thu Oct 20 04:14:53 +0000 2011","Buenas Noches que descansen #Twitter #OF ZzzzzZzzzzz" +"twitter","irrelevant","126873944501399552","Thu Oct 20 04:14:40 +0000 2011","Ahora el #Facebook Imita al #Msn y #Twitter :/" +"twitter","irrelevant","126873912515624960","Thu Oct 20 04:14:32 +0000 2011","#GoodNightPeople Hasta Mñn #Twitter :B" +"twitter","irrelevant","126873902499635200","Thu Oct 20 04:14:30 +0000 2011","Somos 2 Intao @DehTavares que tem o #Twitter #HAHA" +"twitter","irrelevant","126873886938763264","Thu Oct 20 04:14:26 +0000 2011","HABER FACEBOOK por que no entretienes a tu gente para que ya no se nos vaya #TWITTER tanta plebe ¬¬" +"twitter","irrelevant","126873874909507584","Thu Oct 20 04:14:23 +0000 2011","Acaso #twitter muestra la mayor parte de lo que pensamos???" +"twitter","irrelevant","126873816319262721","Thu Oct 20 04:14:09 +0000 2011","¿Desde cuándo #Twitter desactivó para todos la opción ""Los RTs de esta persona aparecerán en tu TL""?" +"twitter","irrelevant","126873785512116225","Thu Oct 20 04:14:02 +0000 2011","#BOANOITE #TWITTER . SAINDO \Õ . #CHUPAFLAMENGO" +"twitter","irrelevant","126873756437200896","Thu Oct 20 04:13:55 +0000 2011","@IsabelSalazarJ @PremiosTWCo y en el HT #Twitter eres tambn cabeza con 44 votos Isa, tienes lo votos regados con HT #yovotopor tienes 1" +"twitter","irrelevant","126873660442148865","Thu Oct 20 04:13:32 +0000 2011","Creo k ya me saldre de #Twitter" +"twitter","irrelevant","126873596080558082","Thu Oct 20 04:13:17 +0000 2011","ja volto #twitter , vou na cozinha ." +"twitter","irrelevant","126873574895140864","Thu Oct 20 04:13:12 +0000 2011","Disculpame #twitter te e tenido algo avandonado este dia" +"twitter","irrelevant","126873447912587264","Thu Oct 20 04:12:41 +0000 2011","pues esto fue todo por hoy! ;D bye #FB sigo un rato más por #twitter" +"twitter","irrelevant","126873417126383616","Thu Oct 20 04:12:34 +0000 2011","Ya me dieron ganas de iniciar una campaña mediática en #twitter... para mandar al Espinoza al INEA #SóloenMéxico" +"twitter","irrelevant","126873407487881217","Thu Oct 20 04:12:32 +0000 2011","vo sair daki do #twitter já :) só vo ver uns videos aki , e já to indo ," +"twitter","irrelevant","126873260385239040","Thu Oct 20 04:11:57 +0000 2011","Lo Bueno de #twitter es que no cualquier babos@ Tiene . Claro que hay unos casos que............." +"twitter","irrelevant","126873138079346688","Thu Oct 20 04:11:27 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @CocaCola_Korea @ThisIsDoyin_" +"twitter","irrelevant","126873037982281729","Thu Oct 20 04:11:04 +0000 2011","lo mejor de #twitter es que no tenes que esperar un monton a que carge tu foto:D♥" +"twitter","irrelevant","126873004494954496","Thu Oct 20 04:10:56 +0000 2011","#twitter Off a dormir para no pensar en ti!" +"twitter","irrelevant","126872948022849536","Thu Oct 20 04:10:42 +0000 2011","MEU #TWITTER TA TÃO MOVIMENTADO ;D ai sim em !" +"twitter","irrelevant","126872939638439936","Thu Oct 20 04:10:40 +0000 2011","Me Ausenteii por mutivos que me DeixaramTriste mais #Twitter Estou de Volta uhuruoooo!!!!" +"twitter","irrelevant","126872936568201216","Thu Oct 20 04:10:39 +0000 2011","Ya tengo sueño y mañana madrugo DDD: #HastaMañana #TweetOff Pstd: #Twitter produce #insomnio y es tan #Pocesivo e.e #bye !! #siguemeytesigo" +"twitter","irrelevant","126872919480610816","Thu Oct 20 04:10:35 +0000 2011","@Brandon_Solano hahaha, ve pues xD Ok bye' de #twitter ILTSM!' (LL' <3" +"twitter","irrelevant","126872906738319360","Thu Oct 20 04:10:32 +0000 2011","@ayrontrompete Beleza. Mande mesmo por DM , que assim que eu entrar no #twitter. Eu pego e coloco lá no site !" +"twitter","irrelevant","126872886232363008","Thu Oct 20 04:10:27 +0000 2011","@elias_elegantes voltoo a gosta do #TWITTER ?" +"twitter","irrelevant","126872881417293824","Thu Oct 20 04:10:26 +0000 2011","Si eso pasa cierro mi cuenta de #twitter! D':" +"twitter","irrelevant","126872771929182209","Thu Oct 20 04:10:00 +0000 2011","hahahhaha´ no puedo usar el #twitter por laa compuu" +"twitter","irrelevant","126872763221819392","Thu Oct 20 04:09:58 +0000 2011","Saino Aki #Twitter" +"twitter","irrelevant","126872483394621440","Thu Oct 20 04:08:51 +0000 2011","@samixitoledo jajajaja pero no kiere decir q m vas a dejar de hablar adems el mio amore no tiene #Twitter. Jajaja" +"twitter","irrelevant","126872365211725824","Thu Oct 20 04:08:23 +0000 2011","Tengo lo que a mi me basta para ser feliz #(8) #twitter OFF" +"twitter","irrelevant","126872362007277568","Thu Oct 20 04:08:22 +0000 2011","yo se q el pajarito de #twitter tanbien come maicillo XD" +"twitter","irrelevant","126872326095638528","Thu Oct 20 04:08:14 +0000 2011","#Camilokas #Valelokas #Twitter@s y mi #LalyLoka Hasta mañana! Qe tengan lindos! Son increibles! LQM!! Mil bss =)" +"twitter","irrelevant","126872241693667328","Thu Oct 20 04:07:54 +0000 2011","@AndhixD yaaa hehhe q habras echo toda la tardee mmm no no maaal maaal! Haha oye algo le pasa a mi #twitter ☹" +"twitter","irrelevant","126872218025213952","Thu Oct 20 04:07:48 +0000 2011","Quem não tem um #TWITTER que atire a primeira pedraaaa" +"twitter","irrelevant","126872199620591617","Thu Oct 20 04:07:44 +0000 2011","deberia de ponerme a leer un libro en lugar de estar perdiendo el tiempo en #fb y #twitter :(" +"twitter","irrelevant","126872143593095168","Thu Oct 20 04:07:30 +0000 2011","Bonjour #twitter , reveil trés difficile pour ma part.. !" +"twitter","irrelevant","126872127986073600","Thu Oct 20 04:07:27 +0000 2011","God morgon #Sverige och #Twitter !" +"twitter","irrelevant","126872084679892993","Thu Oct 20 04:07:16 +0000 2011","Que buenas frases que hay en #twitter me parece que voy a hacer un libro! jaja" +"twitter","irrelevant","126872039138131968","Thu Oct 20 04:07:05 +0000 2011","Cualquier contenido en #twitter con palabras u oraciones soez el propietario no se hace responsable por daños a la moral d c/quien! #hedicho" +"twitter","irrelevant","126872023552102400","Thu Oct 20 04:07:02 +0000 2011","Buenas noches a todos :) #twitter off" +"twitter","irrelevant","126871950185345024","Thu Oct 20 04:06:44 +0000 2011","@nelsonfenc recupere el #twitter para mi #iPODzombi!! #EpicWin de nuevo!!! ;-)" +"twitter","irrelevant","126871942799175682","Thu Oct 20 04:06:42 +0000 2011","Quem não tem um #TWITTER que atire a primeira pedra!" +"twitter","irrelevant","126871914579898369","Thu Oct 20 04:06:36 +0000 2011","@hiarly iae meu brother .. min siga ae no #twitter .. ta lembrado de min né ?" +"twitter","irrelevant","126871909177626626","Thu Oct 20 04:06:34 +0000 2011","Justo cuando aprendes a usar Twitter, ya no podés salir. #Twitter" +"twitter","irrelevant","126871907302785024","Thu Oct 20 04:06:34 +0000 2011","Awwwwr me encanta #Twitter porque por este medio desquito todo mis corajes" +"twitter","irrelevant","126871890890461184","Thu Oct 20 04:06:30 +0000 2011","el #twitter es demasiado adictivo $:" +"twitter","irrelevant","126871890320035841","Thu Oct 20 04:06:30 +0000 2011","Use o Twitter Profissionalmente! ~ Dicas E-Commerce e Renda Extra!: http://t.co/k6yAfacy #Twitter #Blog #Ecommerce #Brasil #Marketing" +"twitter","irrelevant","126871852185436160","Thu Oct 20 04:06:21 +0000 2011","vou #fazer uma #montagem com minhas #fotos para colocar como #plano de fundo aki no #twitter!!!!!!!!!!!!" +"twitter","irrelevant","126871831583002625","Thu Oct 20 04:06:16 +0000 2011","@ferisykes por fi te encuentro en #Twitter :DD" +"twitter","irrelevant","126871719557341184","Thu Oct 20 04:05:49 +0000 2011","Quien no ha creado otra cuenta en #twitter para seguirse? Yo si #Mevalehuevo" +"twitter","irrelevant","126871696887132160","Thu Oct 20 04:05:44 +0000 2011","#Twitter, definitivamente... me distraes de mis labores importantes! xD" +"twitter","irrelevant","126871658991599616","Thu Oct 20 04:05:35 +0000 2011","RT @JuanJimenez_V: Tan linda #MiN' me arreglo el #Twitter de nuevoooo!! ♥" +"twitter","irrelevant","126871512195145729","Thu Oct 20 04:05:00 +0000 2011","#twitter off buenas noches:$" +"twitter","irrelevant","126871511305961473","Thu Oct 20 04:05:00 +0000 2011","##FB & ##Twitter ##Off! Buenas Nochees(: +Espero verte en mis sueños... +Naah, de todas formas te veo mañana! XD" +"twitter","irrelevant","126871498832097280","Thu Oct 20 04:04:57 +0000 2011","JESUS tantos pinches siglos sin escribir en Ti Mi Querido #Twitter<3" +"twitter","irrelevant","126871426107047936","Thu Oct 20 04:04:39 +0000 2011","A Dormiir Deskanseeeen sueñen & duermaan ricoo!! :) #Twitter OFF!!" +"twitter","irrelevant","126871277813239808","Thu Oct 20 04:04:04 +0000 2011","@Olsensn osea, #fb no me notificó del inbox de los planes de la boda...por eso uso más el #twitter ¬¬' anyway, ya contesté por fin! =)" +"twitter","irrelevant","126871263250620416","Thu Oct 20 04:04:00 +0000 2011","RT @alisalam1990: البايو ÙÙŠ تويتر مثل عناوين المنازل لا تكش٠عن صاحبها إلا إذا دخلت التايم لاين ØŒ ÙØ¥Ù…ا تجد تطابق أو كأنك دخلت بيت عنوانه ..." +"twitter","irrelevant","126871213732671488","Thu Oct 20 04:03:49 +0000 2011","Entre al #Twitter solo para desahogarme! :B" +"twitter","irrelevant","126871199362990081","Thu Oct 20 04:03:45 +0000 2011","#Twitter es como el alcohol: Empiezas por curiosidad, luego socializar, después por diversión y terminas en adicción. / soy ya alcohólico ;)" +"twitter","irrelevant","126871089929392128","Thu Oct 20 04:03:19 +0000 2011","alejado de #twitter y #facebook mi vida esta vacia, ya ni tiempo tengo de quejarme...." +"twitter","irrelevant","126871084950749184","Thu Oct 20 04:03:18 +0000 2011","·Bueno """"#twitter #OoF q paSen Buenas nOChe...!! @rubioCoTiZe #Acuerdese ya tah buenOh haha xD" +"twitter","irrelevant","126870987559026688","Thu Oct 20 04:02:55 +0000 2011","Tengo olvidado mi #twitter :S" +"twitter","irrelevant","126870943330091008","Thu Oct 20 04:02:44 +0000 2011","Tan linda #MiN' me arreglo el #Twitter de nuevoooo!! ♥" +"twitter","irrelevant","126870919590330369","Thu Oct 20 04:02:39 +0000 2011","#Twitter Off A Mimir Tempranitoo Muahh Los Quiero A Todos Que Duerman bn" +"twitter","irrelevant","126870916729810944","Thu Oct 20 04:02:38 +0000 2011","Típico que el #Facebook a esta hora aburre y el #twitter me desvela" +"twitter","irrelevant","126870873176150017","Thu Oct 20 04:02:27 +0000 2011","RT @SaeldesN: @AndeersonF_ è Nóis Indicar eu ai no teu #twitter Pra Galera Seguir Pode Ser ' ?" +"twitter","irrelevant","126870813839335424","Thu Oct 20 04:02:13 +0000 2011","De verdd qe si me he sentidoo rara estar un Dia sin mucho #twitter :B" +"twitter","irrelevant","126870802837680128","Thu Oct 20 04:02:11 +0000 2011","@Angelina_adr Se que Utiliza más Pin que #Twitter & que #Facebook, agrega hay pin: 2178D655" +"twitter","irrelevant","126870669492359169","Thu Oct 20 04:01:39 +0000 2011","@AndeersonF_ è Nóis Indicar eu ai no teu #twitter Pra Galera Seguir Pode Ser ' ?" +"twitter","irrelevant","126870663758757888","Thu Oct 20 04:01:38 +0000 2011","@adictoTR solo pajas escribis en el #Twitter va jejee" +"twitter","irrelevant","126870566916456448","Thu Oct 20 04:01:14 +0000 2011","Están de flojera los topic tender en #twitter" +"twitter","irrelevant","126870498452832256","Thu Oct 20 04:00:58 +0000 2011","#iMessage يتجاوز تويتر ويجد رÙيقا للمشي قبل +#Twitter !! +هل الأصدقاء الواقع أقرب من أصدقاء العالم Ø§Ù„Ø§ÙØªØ±Ø§Ø¶ÙŠØŸ" +"twitter","irrelevant","126870493402898434","Thu Oct 20 04:00:57 +0000 2011","Porra mo0 cara que nao entro no #TWITTER mais um Salve pra vc's e amanha tem mais flllws pra quem fika bora laah pra quem vai ksoaksopk" +"twitter","irrelevant","126870445659131904","Thu Oct 20 04:00:46 +0000 2011","Google Apps Coming To Google+ ‘Within Days’; Company Taking A ‘Cautious Approach’ To APIs http://t.co/1rgAI0RH #twitter" +"twitter","irrelevant","126870402910797826","Thu Oct 20 04:00:35 +0000 2011","Nadie es tan lind@ como en la foto del #twitter y tampoco tan feo como en la foto de la cedula..." +"twitter","irrelevant","126870391774908416","Thu Oct 20 04:00:33 +0000 2011","@8_ivonne @1_Ddanille ahhahahaha noceee asaber porq me llamo y digo ke me cnectara a #twitter pero niiiiii cntestaaaa-!!!" +"twitter","irrelevant","126870381867966465","Thu Oct 20 04:00:30 +0000 2011","#Facebook y #Twitter me volverán loco! :$!" +"twitter","irrelevant","126870318764662784","Thu Oct 20 04:00:15 +0000 2011","Sigue ah @KhrizJoohn con su nueva cuenta en #Twitter" +"twitter","irrelevant","126870301471551489","Thu Oct 20 04:00:11 +0000 2011","@pimentta_qzl kkkkatat agora ame maais eu amo um tantão que não cabe nesse post do #twitter ..." +"twitter","irrelevant","126870289861718016","Thu Oct 20 04:00:08 +0000 2011","@Andrees_ramos Jajajajaja ya se que etsamos en #Twitter baboso pero yo lo vi en FB, INBOX pues!" +"twitter","irrelevant","126870079798378497","Thu Oct 20 03:59:18 +0000 2011","Ala tengo a #twitter asi o mas abandonado :$" +"twitter","irrelevant","126870058050912256","Thu Oct 20 03:59:13 +0000 2011","RT @Azizshalan: تويتر: قريباً بإمكان اي مستخدم الرجوع لتغريداتة القديمة والرسائل الخاصة ايضاً + +#GoodNews + #Twitter" +"twitter","irrelevant","126870004766478336","Thu Oct 20 03:59:00 +0000 2011","RT @Yvandavid: RT @Yonopienso_: RT @Anyelius12: RT @Yonopienso_: RT @Yvandavid: Que lo mataron por #Twitter ya RT @Yonopienso_: ¿Que le ..." +"twitter","irrelevant","126869995610324993","Thu Oct 20 03:58:58 +0000 2011","@adrianosabino2 Maagiina amoor! Tavaa #Off esses diias akii noo #Twitter meesmoo! fiica triste cmg naaum taa?" +"twitter","irrelevant","126869990535217152","Thu Oct 20 03:58:57 +0000 2011","Un generador de partículas subatomicas marca Acme es mas divertido que ver a una mosca dormir... #hedicho #Twitter #Off" +"twitter","irrelevant","126869983769788417","Thu Oct 20 03:58:55 +0000 2011","#ok #NoEntiendo todo esto del #Twitter pero ya tengo el mío... SIGANME los buenos #FF #Followers" +"twitter","irrelevant","126869957085634560","Thu Oct 20 03:58:49 +0000 2011","#twitter casi no te pelo, lo siento u.u JAJA" +"twitter","irrelevant","126869921996091392","Thu Oct 20 03:58:41 +0000 2011","@DesirePoulain Ya vi que si le entendiste al #Twitter :D" +"twitter","irrelevant","126869901922152448","Thu Oct 20 03:58:36 +0000 2011","RT @Yonopienso_: RT @Anyelius12: RT @Yonopienso_: RT @Yvandavid: Que lo mataron por #Twitter ya RT @Yonopienso_: ¿Que le paso a Osmel?xD." +"twitter","irrelevant","126869798419300352","Thu Oct 20 03:58:11 +0000 2011","RT @MizzSpicey: #TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @DrunkedOutTeezy" +"twitter","irrelevant","126869705620332544","Thu Oct 20 03:57:49 +0000 2011","RT @Anyelius12: RT @Yonopienso_: RT @Yvandavid: Que lo mataron por #Twitter ya RT @Yonopienso_: ¿Que le paso a Osmel?xD." +"twitter","irrelevant","126869683449233408","Thu Oct 20 03:57:44 +0000 2011","@Unipamplona las clases empiezan el 24 de octubre ?... Espefifiquen en los 140caracteres del #twitter no coloquen enlaces que no abren.." +"twitter","irrelevant","126869567061495808","Thu Oct 20 03:57:16 +0000 2011","@raellenkoga_ kk , isso porque eu ttinha #twitter antes de você, mais ttinha esqueciido de tudo :S kkkkkkk" +"twitter","irrelevant","126869527416938496","Thu Oct 20 03:57:07 +0000 2011","Mi nuevo amor #Twitter *O* Facebook qedo atras! :DD Lo cerrare despues de mi cumpleaños & me qedare con twatter para 100pre! ^^ (:" +"twitter","irrelevant","126869515983257600","Thu Oct 20 03:57:04 +0000 2011","Los 10 nuevos mandamientos de #Twitter http://t.co/1B8iNhTV @entrebits_com" +"twitter","irrelevant","126869499046670336","Thu Oct 20 03:57:00 +0000 2011","#ooff #TWITTER NOS VEMOS" +"twitter","irrelevant","126869440276078592","Thu Oct 20 03:56:46 +0000 2011","@JonathanGodin Je crois que #Twitter devra ajuster ces petit problèmes désagréable !" +"twitter","irrelevant","126869422857142272","Thu Oct 20 03:56:42 +0000 2011","#twitter me esta absorbiendo D:" +"twitter","irrelevant","126869344851472384","Thu Oct 20 03:56:23 +0000 2011","@Marbelle30 ...Esta familia de #twitter cada vez crece mas y mas rápidamente estas en 71.406 seguidores y estoy seguro que serán muchos mas." +"twitter","irrelevant","126869327222804480","Thu Oct 20 03:56:19 +0000 2011","""Mejorar la calidad de la educación"" A la señora ministra le bastaria los 140 caracteres de #twitter para explicar su grandiosa reforma." +"twitter","irrelevant","126869308029669376","Thu Oct 20 03:56:14 +0000 2011","RT @igobythenameofA: #Follow2gain #Followme #1000ADAY #TEAMFOLLOWBACK #twitter #200aday #500aday #Followme #f4f #ff #tl #INSTANFOLLOWBAC ..." +"twitter","irrelevant","126869231693348865","Thu Oct 20 03:55:56 +0000 2011","@tryladis bienvenida a #Twitter que bueno tenerte por aquí, esto es genial :)" +"twitter","irrelevant","126869213980786688","Thu Oct 20 03:55:52 +0000 2011","A mi #twitter le pica." +"twitter","irrelevant","126869163250679810","Thu Oct 20 03:55:40 +0000 2011","RT @icurioso: La famosa frase “ME LO DIJO UN PAJARITO†ya tiene sentido gracias a #Twitter!" +"twitter","irrelevant","126869159341604866","Thu Oct 20 03:55:39 +0000 2011","Excelente mi Ranking Global en #Twitter estoy entre 4,551 de 18,299,114 :o Gracias a Ustedes y a sus RT y en Venezuela estoy de #250 yes Sr" +"twitter","irrelevant","126869146045644800","Thu Oct 20 03:55:36 +0000 2011","#TRUE RT @ELDESTRIPAPUTAS YO SOY LO MAS CABRON QUE HA NACIDO EN #TWITTER .. QUIEN ES TWITTERO ANTIGUO LO SABE DESDE NOVIEMBRE DEL 2010" +"twitter","irrelevant","126869110901575680","Thu Oct 20 03:55:27 +0000 2011","RT @Yonopienso_: RT @Yvandavid: Que lo mataron por #Twitter ya RT @Yonopienso_: ¿Que le paso a Osmel?xD." +"twitter","irrelevant","126869094610907136","Thu Oct 20 03:55:23 +0000 2011","#Fcbk empieza a agregar nuevas aplicaciones volviendose cada vez mas obsoleto y aburrido... #Twitter es mejor que #fcbk #hedicho #jum" +"twitter","irrelevant","126869070292320256","Thu Oct 20 03:55:18 +0000 2011","Llega un momento que #twitter es tu desahogo" +"twitter","irrelevant","126869031180451840","Thu Oct 20 03:55:08 +0000 2011","jajajaja ;) ps q mas se hace En #Twitter @PilarGarciaM" +"twitter","irrelevant","126869021969756161","Thu Oct 20 03:55:06 +0000 2011","@matudelatower Se dieron cuenta que si ponen la T de #Twitter dada vuelta queda la F de #Facebook? // PLOP PLOP PLOP" +"twitter","irrelevant","126868980664250369","Thu Oct 20 03:54:56 +0000 2011","Mañana entro hasta las 9:30 así q quiero dsvelarme, platiquenme lo q sea o al menos diganme qn sta n #twitter, den alguna señal de vida !!!" +"twitter","irrelevant","126868846517817344","Thu Oct 20 03:54:24 +0000 2011","De regreso al mundo #Twitter" +"twitter","irrelevant","126868773562089472","Thu Oct 20 03:54:07 +0000 2011","Se dieron cuenta que si ponen la T de #Twitter dada vuelta queda la F de #Facebook?" +"twitter","irrelevant","126868680003960833","Thu Oct 20 03:53:45 +0000 2011","tava fazendo o #twitter da minha irmã kkk" +"twitter","irrelevant","126868673213378560","Thu Oct 20 03:53:43 +0000 2011","Nada como darle vida de nuevo a mi #twitter..." +"twitter","irrelevant","126868623645089792","Thu Oct 20 03:53:31 +0000 2011","Vou Dormir . Boa Noite Galera Do #Twitter E Do #Facebook" +"twitter","irrelevant","126868612408553472","Thu Oct 20 03:53:28 +0000 2011","RT @IsmaelitoDrums: VolviiendoO Ah #Twitter .! haha Diios Les Bendiiga" +"twitter","irrelevant","126868611922010112","Thu Oct 20 03:53:28 +0000 2011","@TebanRk @criszambrano23 oí a estos menes ome, cuando he dicho ""jajaja, patos?"" en el #twitter?... Jamás!!" +"twitter","irrelevant","126868601721462784","Thu Oct 20 03:53:26 +0000 2011","Que según un humano puede decir 4800 palabras en 24 horas... Y cuando no hay con quien vomitarlas todas?? A cierto el chat o #twitter" +"twitter","irrelevant","126868586882007041","Thu Oct 20 03:53:22 +0000 2011","@melo_gabby Sao os assuntos mais comentados do mundo no #Twitter. Mas o que eu queria que vc visse, nao ta mais la." +"twitter","irrelevant","126868539784183808","Thu Oct 20 03:53:11 +0000 2011","HAHAHA, éee ne pelo #twitter, até #garotanachuva canta beeem, #kkk,! voo grava uma shamada de vooz pra voce ouvir! tenho mt sucesso ! #kkk" +"twitter","irrelevant","126868539641577472","Thu Oct 20 03:53:11 +0000 2011","RT @ELDESTRIPAPUTAS: YO SOY LO MAS CABRON QUE HA NACIDO EN #TWITTER .. QUIEN ES TWITTERO ANTIGUO LO SABE DESDE NOVIEMBRE DEL 2010" +"twitter","irrelevant","126868472515928064","Thu Oct 20 03:52:55 +0000 2011","Bienvenida a #twitter @adyalessio asi q @christianstraw @afetati @Zul3Rivera den #Follow please!! X q hubo un error d dedo hace un rato!!" +"twitter","irrelevant","126868462336348162","Thu Oct 20 03:52:53 +0000 2011","YO SOY LO MAS CABRON QUE HA NACIDO EN #TWITTER .. QUIEN ES TWITTERO ANTIGUO LO SABE DESDE NOVIEMBRE DEL 2010" +"twitter","irrelevant","126868364927827968","Thu Oct 20 03:52:29 +0000 2011","probando la videollamada en facebook......buenooooo en esto si le ganaron a #Twitter........" +"twitter","irrelevant","126868357625548800","Thu Oct 20 03:52:28 +0000 2011","@melkoff pues bienvenida vas a ver que pronto le encontraras sentido es la onda el #twitter" +"twitter","irrelevant","126868352835661824","Thu Oct 20 03:52:27 +0000 2011","#Osea jajaja me entero que ahora en el #Twitter se blokea gente mis supuestros primos me blokearon #Lol" +"twitter","irrelevant","126868349396324352","Thu Oct 20 03:52:26 +0000 2011","@luhhcavalcante Aqui no #Twitter num pode dizer ne? tudo bem... depois vc me conta das cachaça. ahuahauahuahuahuahau" +"twitter","irrelevant","126868328659693568","Thu Oct 20 03:52:21 +0000 2011","@IrinaAlfonzo por eso siempre he dicho! El #FB es para q todos vean lo q haces y chismeen y el #Twitter es mas #informativo" +"twitter","irrelevant","126868322431143937","Thu Oct 20 03:52:19 +0000 2011","me encanta la pareja de @lari_riquelme en el #bailando2011! pasame su #twitter!" +"twitter","irrelevant","126868256479920128","Thu Oct 20 03:52:04 +0000 2011","@alegarcia001. Feliz aniversario !!! Que sean muy felices juntos y que cumplan muchos años mas !!!! Jajaja #twitter" +"twitter","irrelevant","126868229409865728","Thu Oct 20 03:51:57 +0000 2011","#yoconfieso que llevo 2 dias fuera de #twitter y mi vida no ha sido la misma. Los extrané îƒ" +"twitter","irrelevant","126868208924884993","Thu Oct 20 03:51:52 +0000 2011","RT @iTuSabias: Aléjate de las redes sociales si acabas de terminar una relación, en #Twitter leerás las razones y en #Facebook verás por ..." +"twitter","irrelevant","126868204881580032","Thu Oct 20 03:51:51 +0000 2011","RT @CarsiMeister: Lite para que sirve despues de todo si tengo a #Twitter" +"twitter","irrelevant","126868198032281600","Thu Oct 20 03:51:50 +0000 2011","Na real vou sair do #Twitter!!" +"twitter","irrelevant","126868196295843840","Thu Oct 20 03:51:49 +0000 2011","@EilsonOliveira a roupa num tira , mais os dedos no #twitter heheh #saudades" +"twitter","irrelevant","126868167921373185","Thu Oct 20 03:51:42 +0000 2011","RT @Yvandavid: Que lo mataron por #Twitter ya RT @Yonopienso_: ¿Que le paso a Osmel?xD." +"twitter","irrelevant","126868148879241216","Thu Oct 20 03:51:38 +0000 2011","Che a alguien le gusta mi nuevo fondo o mi icono de #twitter ? :o" +"twitter","irrelevant","126868137827250176","Thu Oct 20 03:51:35 +0000 2011","Ta medio tela hoy el #twitter #twitteroff & goodnight 4 everyone!" +"twitter","irrelevant","126868136531197952","Thu Oct 20 03:51:35 +0000 2011","@Mayra_Moreno1 #congrats ya sabes usar #twitter jajajaja #loveu" +"twitter","irrelevant","126867955844788225","Thu Oct 20 03:50:52 +0000 2011","Hehehe =} @SidyRM Pois é,Fazer o q né? O #Twitter Não #Vive Sem Miim ! Ou Sera o #Contrário ? Sei lá ... kkk =)" +"twitter","irrelevant","126867858461433857","Thu Oct 20 03:50:29 +0000 2011","@NahuN_Robles jajaja fregues de #TWITTER x'D" +"twitter","irrelevant","126867818242248704","Thu Oct 20 03:50:19 +0000 2011","@Giovanettinico Ojo que @JorgeHillcoat le esta metiendo con #Twitter .. Lo veo entusiasmado .. Faltaria @JokinImanol nada mas .." +"twitter","irrelevant","126867784494891008","Thu Oct 20 03:50:11 +0000 2011","#Twitter off mii voii..!!" +"twitter","irrelevant","126867765343686656","Thu Oct 20 03:50:07 +0000 2011","Lite para que sirve despues de todo si tengo a #Twitter" +"twitter","irrelevant","126867764282540032","Thu Oct 20 03:50:06 +0000 2011","VolviiendoO Ah #Twitter .! haha Diios Les Bendiiga" +"twitter","irrelevant","126867715947368449","Thu Oct 20 03:49:55 +0000 2011","Me encanta vacilar y ver tantas cosas en el #Twitter cada vez estoy mas #enamorado de este medio de comunicacion es #unico" +"twitter","irrelevant","126867510288072705","Thu Oct 20 03:49:06 +0000 2011","@JorgeHillcoat Hola Jorge, todo bien ? Me alegro que le estes metiendo onda a #Twitter ! Mañana te cuento .. Me jodi el tobillo .. Abrazo !" +"twitter","irrelevant","126867253609234433","Thu Oct 20 03:48:05 +0000 2011","eo tenho a mania de entrar no #TWITTER i respondeeer as coisas ki mi mandão kkkkkkkkkkkkk" +"twitter","irrelevant","126867181358170112","Thu Oct 20 03:47:47 +0000 2011","Bueno,buenas noche.#twitter" +"twitter","irrelevant","126867170742374400","Thu Oct 20 03:47:45 +0000 2011","Tengooo muchiisiimo sueño #twitter off ... :) descanseen!" +"twitter","irrelevant","126867149775044608","Thu Oct 20 03:47:40 +0000 2011","Bom, vou fechar esse #Twitter, porque ele está roubando os meus assuntos ¬¬" +"twitter","irrelevant","126867019646771201","Thu Oct 20 03:47:09 +0000 2011","Policías asaltando a empresarios en plena luz del día? que falta? reggetoneros con cuentas de #Twitter? #nomamar" +"twitter","irrelevant","126867015620239360","Thu Oct 20 03:47:08 +0000 2011","Saalee puee iaa me voi salee #twitter asta mañana" +"twitter","irrelevant","126867000030007296","Thu Oct 20 03:47:04 +0000 2011","jajajajaja PTM!! Gigo, si tuvieras #Twitter llegaría a poner todas las idioteces que dices jajajajaja #EpicWin!" +"twitter","irrelevant","126866948909842432","Thu Oct 20 03:46:52 +0000 2011","no entiendo como con lo ingenioso que soy en twitter tengo tan pocos followers. posta #demasiadoego #twitter #stevejobs" +"twitter","irrelevant","126866905096130560","Thu Oct 20 03:46:41 +0000 2011","Ninguem fala cmgo no #Twitter!" +"twitter","irrelevant","126866882979573760","Thu Oct 20 03:46:36 +0000 2011","RT @sam1meta #Twitter: Rápido, contundente, y directo. 17 razones para usarlo http://t.co/itML0Lxq… @sam1meta #marketing #sm" +"twitter","irrelevant","126866835449708544","Thu Oct 20 03:46:25 +0000 2011","#Twitter كل من ايدو الو" +"twitter","irrelevant","126866833537110016","Thu Oct 20 03:46:24 +0000 2011","Saaindo aqq ;* boa noite #twitter !" +"twitter","irrelevant","126866827715420160","Thu Oct 20 03:46:23 +0000 2011","@Alx2495 jaja y entons que quieres que te explique del #Twitter ???" +"twitter","irrelevant","126866789962498048","Thu Oct 20 03:46:14 +0000 2011","@k_vanee hahaha entonces q espera para irse a dormir?? aja pero el vicio del #twitter le gana mas! hahaha uii no señito eso no se hace lol" +"twitter","irrelevant","126866773705375744","Thu Oct 20 03:46:10 +0000 2011","@Giiz_arg yo me quedare en #twitter" +"twitter","irrelevant","126866759792852992","Thu Oct 20 03:46:07 +0000 2011","@SteffiSavillon Hahaha aplicando el buen #Twitter para las buenas tiraderas!" +"twitter","irrelevant","126866701877903361","Thu Oct 20 03:45:53 +0000 2011","@Giiz_arg pero lo originales son los que ya tienen mas de un año con #twitter o algunos meses o que no?" +"twitter","irrelevant","126866689236283392","Thu Oct 20 03:45:50 +0000 2011","@Elitotora Ajajaja!! Pero si avisé poh!! Acá en el #twitter, ademá q' te dije q' la iba a ver... xD" +"twitter","irrelevant","126866515122331648","Thu Oct 20 03:45:08 +0000 2011","#Twitter: Rápido, contundente, y directo. 17 razones para usarlo http://t.co/m0NP3FCK @sam1meta #marketing #sm" +"twitter","irrelevant","126866508818296832","Thu Oct 20 03:45:07 +0000 2011","Agora vou tentar dormi Game Over #Twitter #Facebook" +"twitter","irrelevant","126866454845984768","Thu Oct 20 03:44:54 +0000 2011","pensando seriamente em excluir meu #twitter" +"twitter","irrelevant","126866426521849857","Thu Oct 20 03:44:47 +0000 2011","C meus amigos do #Twitter a maioria do Brasil eu tenho dificuldade p um chat qdo trabalho se madrugada! #horariodeverao no!!!" +"twitter","irrelevant","126866385757417472","Thu Oct 20 03:44:38 +0000 2011","Puta pero da Wueba dormirse....Bueno mejor sigo en #Twitter..jejeje :)" +"twitter","irrelevant","126866312130609152","Thu Oct 20 03:44:20 +0000 2011","@ArmiiBunbury :OO no sabia que tenias #twitter ;) followme & me das followback vale ;)" +"twitter","irrelevant","126866267998134272","Thu Oct 20 03:44:10 +0000 2011","#BuenDia #Twitter Visite nuestra web y recomiendenos desde el boton tweet!" +"twitter","irrelevant","126866222355722240","Thu Oct 20 03:43:59 +0000 2011","@itiinha_lais, é @EvelynJullyanne, segue de volta esse #Twitter, a gente ta fundando a Pastoral da Juventude e precisamos de ajuda! Segue bj" +"twitter","irrelevant","126866176889454592","Thu Oct 20 03:43:48 +0000 2011","Michael Jackson Quiñonez, jugador de la Selección que está en los Panamericanos es ""TT"" tendencia mundial en #Twitter." +"twitter","irrelevant","126866076016447489","Thu Oct 20 03:43:24 +0000 2011","Mee ustan sus besiitos x #twitter aw* es una princesiita!" +"twitter","irrelevant","126866058337460224","Thu Oct 20 03:43:20 +0000 2011","#Facebook esta asustado porque #twitter viene arrasando..." +"twitter","irrelevant","126866027316383744","Thu Oct 20 03:43:12 +0000 2011","Eres mil Veces mejor #Twitter :D" +"twitter","irrelevant","126866021402427392","Thu Oct 20 03:43:11 +0000 2011","@RiiickSilva A gente empolga nee com #Twitter #Rss" +"twitter","irrelevant","126865987365634048","Thu Oct 20 03:43:03 +0000 2011","RT : TAXI ที่นั่งมา คนขับบอà¸à¹€à¸„้าเป็นเสื้อà¹à¸”ง ... à¹à¸•่ด่าทัà¸à¸©à¸´à¸“ ... บอà¸à¸§à¹ˆà¸²à¸„นอุตส่าห์เลือภà¹à¸•่พอน้ำท่วมà¹à¸¥à¹‰à¸§à¸«à¸²à¸¢à¸«à¸±à¸§ #twitter" +"twitter","irrelevant","126865977970401280","Thu Oct 20 03:43:00 +0000 2011","Mention #Twitter and get a Pittsburgh HVAC and Electrical systems Checkup for $129 Contact http://t.co/KI7JJulI" +"twitter","irrelevant","126865966624813057","Thu Oct 20 03:42:58 +0000 2011","Los psicólogos se están quedando sin pacientes, el #twitter se los está robando.." +"twitter","irrelevant","126865936115445761","Thu Oct 20 03:42:50 +0000 2011","Pobres losers los que ponen en FB esto no es #Twitter, ósea no comparen por favor..." +"twitter","irrelevant","126865932806139904","Thu Oct 20 03:42:50 +0000 2011","Fun in sun @intework this summer #twitter" +"twitter","irrelevant","126865890665967618","Thu Oct 20 03:42:40 +0000 2011","de esa gente q acaba de leer el libro, vió una definición médica y la pone en #Twitter _ #wevatotal" +"twitter","irrelevant","126865868599726080","Thu Oct 20 03:42:34 +0000 2011","todos en #twitter conmocionados con #SoyTuFan2daTemporada" +"twitter","irrelevant","126865864086667264","Thu Oct 20 03:42:33 +0000 2011","Bueno y ya dejen ese ""trasteo masivo"" de frases de #twitter al #facebook ... CREATIVIDAD GENTE! . CREEAAATIIIVIIIDAAAD !" +"twitter","irrelevant","126865777423941633","Thu Oct 20 03:42:13 +0000 2011","@sundaaay ja installera om appen sÃ¥ hoppas de räcker + +#Twitter / @twitter pleas fix the app! Can't se mentions! And DM:s" +"twitter","irrelevant","126865704380153856","Thu Oct 20 03:41:55 +0000 2011","RT @HdzMarin1: Mi pasatiempo favorito hoy en día, es como ver hay personas bipolares, tanto en #Twitter como en la vida real..." +"twitter","irrelevant","126865703969099776","Thu Oct 20 03:41:55 +0000 2011","Nossa, meu BG aqui no #Twitter está uma coisa pavorosa... preciso recortar essa foto de novo... meu rosto ficou todo deformado... hahaha" +"twitter","irrelevant","126865661602447362","Thu Oct 20 03:41:45 +0000 2011","Me acabo de dar cuenta que he pasado el #tuit numero 1000!!! +#Twitter" +"twitter","irrelevant","126865633869709312","Thu Oct 20 03:41:38 +0000 2011","ã€#bot】相互フォロワー募集! +#followme #followmeJP #sougofollow #followdaibosyu #goen #followback #follow #sougo #daibosyu #twitter #time #japan" +"twitter","irrelevant","126865576093171712","Thu Oct 20 03:41:25 +0000 2011","RT @JhonAlexGarciaA: MI NOVIA Y COSTUMBRE DE DEJARME DE SEGUIR EN #TWITTER JAJJAA" +"twitter","irrelevant","126865573228445696","Thu Oct 20 03:41:24 +0000 2011","no tempo qe eu usava #twitter era bem melhor, fazia muitas amizades, hoje em dia #twitter serve apenas para falar bobeiras :s" +"twitter","irrelevant","126865566236553216","Thu Oct 20 03:41:22 +0000 2011","Mi pasatiempo favorito hoy en día, es como ver hay personas bipolares, tanto en #Twitter como en la vida real..." +"twitter","irrelevant","126865487584968704","Thu Oct 20 03:41:03 +0000 2011","xau #twitter." +"twitter","irrelevant","126865416671862785","Thu Oct 20 03:40:47 +0000 2011","Con tanto estudio me olvide de #twitter" +"twitter","irrelevant","126865410598502400","Thu Oct 20 03:40:45 +0000 2011","MI NOVIA Y COSTUMBRE DE DEJARME DE SEGUIR EN #TWITTER JAJJAA" +"twitter","irrelevant","126865389861871617","Thu Oct 20 03:40:40 +0000 2011","RT @JaylaStarr: Sex Games http://t.co/5qtmLHJn #@JaylaStarr #@JCannonDSD #sex #Twitter #video" +"twitter","irrelevant","126865304277090304","Thu Oct 20 03:40:20 +0000 2011","@lorenbsas @telerinonline @VeroTurismo @elisafn @arielmun @matchpoint20 ah si! ya es hora, pero #Twitter me tienta tanto! me encanta!" +"twitter","irrelevant","126865296463118337","Thu Oct 20 03:40:18 +0000 2011","Sex Games http://t.co/5qtmLHJn #@JaylaStarr #@JCannonDSD #sex #Twitter #video" +"twitter","irrelevant","126865215492071424","Thu Oct 20 03:39:59 +0000 2011","@Jerryfolio Bienvenido a #twitter" +"twitter","irrelevant","126865127977918464","Thu Oct 20 03:39:38 +0000 2011","Bienvenida al mundo #twitter... Te quiero mucho Azucena!!! @Elidaandrea!!!" +"twitter","irrelevant","126865092565417984","Thu Oct 20 03:39:29 +0000 2011","@fitotorresm Camarada @EderCarmona01 si tiene #Twitter & veremos la pelicula si la encuentra XD" +"twitter","irrelevant","126865045949919233","Thu Oct 20 03:39:18 +0000 2011","#Twitter #Off :( Quizá mañana comenzaré a Twittear desde un movil..Mi nuevo Nokia X2-01!! :)" +"twitter","irrelevant","126865038676987904","Thu Oct 20 03:39:16 +0000 2011","Me acabo de dar cuenta que he pasado el #tuit numero 1000!!! +#Twitter" +"twitter","irrelevant","126865014215802880","Thu Oct 20 03:39:11 +0000 2011","vo dorrmi bjuss ""#TWITTER" +"twitter","irrelevant","126864993760182272","Thu Oct 20 03:39:06 +0000 2011","Chucha! me equivoque.... perdón cierto que #twitter es ATP" +"twitter","irrelevant","126864955285831680","Thu Oct 20 03:38:57 +0000 2011","Hay dias que tengo tanto q pensar y digo lo voy a poner en #twitter pero no tengo tiempo !" +"twitter","irrelevant","126864954140803072","Thu Oct 20 03:38:56 +0000 2011","Que bueno que hay gente que no te lee aqui en #twitter :D" +"twitter","irrelevant","126864917566455809","Thu Oct 20 03:38:48 +0000 2011","el momento en el que me puedo desahogar, si efectividamente es en #twitter" +"twitter","irrelevant","126864908582260736","Thu Oct 20 03:38:45 +0000 2011","""Não sou viciada em dorgas nem em alcool mais sou no #Twitter fato""" +"twitter","irrelevant","126864848649846784","Thu Oct 20 03:38:31 +0000 2011","#Twitter OFF! Duerman super rico! :D" +"twitter","irrelevant","126864847399944192","Thu Oct 20 03:38:31 +0000 2011","Hoy no abri mi #twitter en todo el santo dia 5;" +"twitter","irrelevant","126864712766980096","Thu Oct 20 03:37:59 +0000 2011","aai galeraa do #twitter abraço para queem ficaa" +"twitter","irrelevant","126864692470751232","Thu Oct 20 03:37:54 +0000 2011","Es todo por hoy....#twitter off =)" +"twitter","irrelevant","126864623252144128","Thu Oct 20 03:37:37 +0000 2011","@dialepimu Ya decía yo ""ésta nena escribe muy bn me imagino que debe tener un perfil en #twitter""" +"twitter","irrelevant","126864485943214080","Thu Oct 20 03:37:05 +0000 2011","El dia de hoy esta muy flojo el #Twitter qe pasaa DDDDDDDD: la apocalipsis ok no jaja" +"twitter","irrelevant","126864460957749248","Thu Oct 20 03:36:59 +0000 2011","(y) hipocritas! RT: si quieren pedirle algo a Dios oreeeen que el asi nos escucha... no que lo escriben por #facebook y #twitter -.-'" +"twitter","irrelevant","126864441999507456","Thu Oct 20 03:36:54 +0000 2011","Es k algo le pasa a mi #twitter nada en lo que me taggean aparece ... Que sucede?" +"twitter","irrelevant","126864344125419520","Thu Oct 20 03:36:31 +0000 2011","RT @joe_joseff: De acuerdo a la encuesta hecha por mi la mayoría de mis amigos de #Facebook no saben utilizar #twitter xD #QueMensos" +"twitter","irrelevant","126864341071962112","Thu Oct 20 03:36:30 +0000 2011","@matheusdcb Me segue de volta Matheus ajeitei meu #twitter hj." +"twitter","irrelevant","126864316120047617","Thu Oct 20 03:36:24 +0000 2011","No me puedo dormir, pero me quedo con mi mejor amigo... #TWITTER." +"twitter","irrelevant","126864259027181568","Thu Oct 20 03:36:11 +0000 2011","Este #TWITTER se esta saturando mucho:@" +"twitter","irrelevant","126864201804300289","Thu Oct 20 03:35:57 +0000 2011","#TWITTER, minha salvação ... =D" +"twitter","irrelevant","126864188969725953","Thu Oct 20 03:35:54 +0000 2011","meu #Twitter ta a coisa mais linda rs !!" +"twitter","irrelevant","126864074045784064","Thu Oct 20 03:35:26 +0000 2011","Graacias :D RT @RaHorajty051230 @JeMaZuCe buena pic de perfil de #twitter!!" +"twitter","irrelevant","126864072447766528","Thu Oct 20 03:35:26 +0000 2011","De acuerdo a la encuesta hecha por mi la mayoría de mis amigos de #Facebook no saben utilizar #twitter xD #QueMensos" +"twitter","irrelevant","126864016613183489","Thu Oct 20 03:35:13 +0000 2011","jajajajaja la @mignonmc contandome un secretoo ii x el #twitter jajajaja" +"twitter","irrelevant","126863938339094531","Thu Oct 20 03:34:54 +0000 2011","#FB no es mi amigo, por eso mejor #Twitter , al menos ahorita" +"twitter","irrelevant","126863845326209024","Thu Oct 20 03:34:32 +0000 2011","@CriistianZepeda oyendo musica, chat & en #TWITTER" +"twitter","irrelevant","126863808177254400","Thu Oct 20 03:34:23 +0000 2011","RT @MizzSpicey: #TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @Only1DeejayER << oooh ..." +"twitter","irrelevant","126863734265217025","Thu Oct 20 03:34:05 +0000 2011","#twitter se me hiso vicio" +"twitter","irrelevant","126863732931432448","Thu Oct 20 03:34:05 +0000 2011","RT @Daniel_Evilla: ese poco de pensamientos y frases de manes en #twitter... son pura pendejadas me imagino que no cumplen con ninguno." +"twitter","irrelevant","126863723179675648","Thu Oct 20 03:34:03 +0000 2011","RT @_MARCELOMENDES: Sigam no #Twitter, vale muito legal! www.twitter.com/TREINANDOLIDER" +"twitter","irrelevant","126863722412118017","Thu Oct 20 03:34:03 +0000 2011","Ya el #twitter como que me esta dejando de parecer divertido..." +"twitter","irrelevant","126863708889690112","Thu Oct 20 03:33:59 +0000 2011","o #twitter pedi : no que esta pensando agora ? se fosse pra mim só falar isso, iria ficar escrevendo o dia inteiro milena, milena! @mi_qw s2" +"twitter","irrelevant","126863696726200320","Thu Oct 20 03:33:56 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @Only1DeejayER << oooh DAMN nice pic!!! #Ladies" +"twitter","irrelevant","126863647845789696","Thu Oct 20 03:33:45 +0000 2011","@marthaubaque1 Que bacano hermanita que estés en la honda del #Twitter :)" +"twitter","irrelevant","126863594850746369","Thu Oct 20 03:33:32 +0000 2011","Meu #Deus ilumine aqueles que eu amo: @mayaraangelica @andreeybaltieri @Amandafl_ e o @FabrcioCamargo os meus amigos d #TWITTER :)" +"twitter","irrelevant","126863579914829824","Thu Oct 20 03:33:29 +0000 2011","mm but yeo .. #twitter .. free up @_tashaaiWESTEND ! free up @_tashaaiWESTEND ! free up @_tashaaiWESTEND !" +"twitter","irrelevant","126863563133431809","Thu Oct 20 03:33:25 +0000 2011","ese poco de pensamientos y frases de manes en #twitter... son pura pendejadas me imagino que no cumplen con ninguno." +"twitter","irrelevant","126863551355822080","Thu Oct 20 03:33:22 +0000 2011","@danielpulido03 Con el tiempo querido amigo entenderas pronto aprenderas... alabado sea #twitter el diario de #personasinviables :D" +"twitter","irrelevant","126863538626109440","Thu Oct 20 03:33:19 +0000 2011","[aa] , suavi bando de cornos & cornas , nun desejo boa noitte pra min , uma maldição pra voçs viciados en #Twitter , haha ++" +"twitter","irrelevant","126863416211156992","Thu Oct 20 03:32:50 +0000 2011","@nachogoano tiene 146692 seguidores, a la masividad de #twitter es sorprendente." +"twitter","irrelevant","126863368492564481","Thu Oct 20 03:32:38 +0000 2011","tengo q ir a dormir pero no qiero xD #Twitter =D" +"twitter","irrelevant","126863360779227136","Thu Oct 20 03:32:36 +0000 2011","Juelaa de verdd qe si me ahce fata #twitter" +"twitter","irrelevant","126863360686956544","Thu Oct 20 03:32:36 +0000 2011","@PorzelinCako jaja yo tmb esoy como @maffer_rdz como no tengo ni perro ps minimo #Twitter jaja" +"twitter","irrelevant","126863321327611904","Thu Oct 20 03:32:27 +0000 2011","y ustedes se ga ga ga ga cuando el mago les manda por #Twitter" +"twitter","irrelevant","126863319226265600","Thu Oct 20 03:32:26 +0000 2011","@NaniRicciHitch Mais oooh! Je #Joke comme on dit sur #twitter -___-' On papote demain! :) Bises" +"twitter","irrelevant","126863283658567680","Thu Oct 20 03:32:18 +0000 2011","#Twitter esta loco!" +"twitter","irrelevant","126863189844566016","Thu Oct 20 03:31:56 +0000 2011","A Dormir.! #Twitter OFF" +"twitter","irrelevant","126863136111333376","Thu Oct 20 03:31:43 +0000 2011","Tengo que bancar propaganda de política en la calle, en la radio, la tele, panfletos en mi casa... ahora en #Twitter también? no rompan!!" +"twitter","irrelevant","126863118398787584","Thu Oct 20 03:31:39 +0000 2011","Me voy a dormir... Goodnight #twitter" +"twitter","irrelevant","126863108802232320","Thu Oct 20 03:31:36 +0000 2011","@mnoo_ana @g6wa1998 Etha Nim. w blyala tbon #twitter :p" +"twitter","irrelevant","126863108688986112","Thu Oct 20 03:31:36 +0000 2011","QUE VIDEO FACEBOOK CON LA OPCION DE VIDEOLLAMADA LE MONTARON LA COMPETENCIA A #MSN #SKYPE #TWITTER ES ORIGINAL TIENE #TWICAM ;)" +"twitter","irrelevant","126863078112509952","Thu Oct 20 03:31:29 +0000 2011","@YasserDVM jaja ya vez una qe es ineligene y estudia en #Twitter jajaj ntc" +"twitter","irrelevant","126863012433899520","Thu Oct 20 03:31:13 +0000 2011","@maffkilleritta #twitter se puso de nena y no me dejaba contestar" +"twitter","irrelevant","126862981333139457","Thu Oct 20 03:31:06 +0000 2011","ツイッター検索 #minsyu - モãƒã‚¤ãƒ«ç‰ˆ http://t.co/EsyvJ7Q7 #twitter #æ¤œç´¢çµæžœ" +"twitter","irrelevant","126862918921891840","Thu Oct 20 03:30:51 +0000 2011","Ya chao #twitter !" +"twitter","irrelevant","126862904514445312","Thu Oct 20 03:30:48 +0000 2011","Ya Me voy Adios! #twitter por que esta muy Fome :S" +"twitter","irrelevant","126862852681240576","Thu Oct 20 03:30:35 +0000 2011","Hasta mañana #Twitter" +"twitter","irrelevant","126862832489861121","Thu Oct 20 03:30:30 +0000 2011","ô negocio parado aqui tá o #msn e o #twitter" +"twitter","irrelevant","126862813355442176","Thu Oct 20 03:30:26 +0000 2011","Maldito seas por siempre #twitter" +"twitter","irrelevant","126862789649248256","Thu Oct 20 03:30:20 +0000 2011","ãªã«ã‚„らフォロー制é™ã«å¼•ã£æŽ›ã‹ã£ãŸã‚ˆã†ã§ã™â€¦ã‚‚ã£ã¨ãƒ•ォロアーを増やã•ãªãã¡ã‚ƒâ™ªã€€#followme #followmejp #follow #フォロー #japan #日本 #twitter " +"twitter","irrelevant","126862725933576192","Thu Oct 20 03:30:05 +0000 2011","Foto nova aqui no #Twitter. Essa ficou melhor.... rs +#Adoro esse blazer. Preciso comprar um... #CTX arrazando nas roupas que eu visto no pgm" +"twitter","irrelevant","126862722028666880","Thu Oct 20 03:30:04 +0000 2011","#twitter #detik Ali Imron dan Imam Samudra Survei Paddys Pub http://t.co/otUl1ujQ" +"twitter","irrelevant","126862722007699456","Thu Oct 20 03:30:04 +0000 2011","#twitter #detik Ketua Dewan Adat Papua Jadi Tersangka Kasus Makar http://t.co/Jzqi1JUI" +"twitter","irrelevant","126862721793789953","Thu Oct 20 03:30:04 +0000 2011","#twitter #detik Ada Ledakan, TransJ Pinang Ranti-Pluit Tetap Beroperasi http://t.co/is9X23JR" +"twitter","irrelevant","126862721609248769","Thu Oct 20 03:30:04 +0000 2011","#twitter #detik SBY Bertemu PM Malaysia di Lombok - Setelah meresmikan Bandara Internasional Lombok, Presiden SBY me... http://t.co/h8PI2sjX" +"twitter","irrelevant","126862719927332864","Thu Oct 20 03:30:04 +0000 2011","#twitter #detik Tak Ada Api di TransJ yang Meledak di SPBG Pinang Ranti http://t.co/OTIJLXp7" +"twitter","irrelevant","126862668039598080","Thu Oct 20 03:29:51 +0000 2011","RT @edwin_ubaque: Hola @marthaubaque bienvenia a #twitter" +"twitter","irrelevant","126862654324211713","Thu Oct 20 03:29:48 +0000 2011","Seremos pelotudos, pero somos una bocha #Twitter" +"twitter","irrelevant","126862595117424641","Thu Oct 20 03:29:34 +0000 2011","Hace cuanto que no me conecto al msn, ya no le doy tanta bola al facebook, y todo por tu culpa #twitter" +"twitter","irrelevant","126862536162295808","Thu Oct 20 03:29:20 +0000 2011","tenho que ir dormir mais o #twitter naum deixa :S" +"twitter","irrelevant","126862464003477504","Thu Oct 20 03:29:03 +0000 2011","#TeamFollowBack #iFollowBack #InstantFollowBack #Follow4Follow #network #tlow #twitter" +"twitter","irrelevant","126862447041712128","Thu Oct 20 03:28:59 +0000 2011","RT @anfisabreus Как наÑтроить кроÑÑпоÑтинг комментариев Ñ Ð±Ð»Ð¾Ð³Ð° на Твиттер http://t.co/6DLxWIlU #twitter" +"twitter","irrelevant","126862429912186880","Thu Oct 20 03:28:54 +0000 2011","Hoy definitivamente noche para la risa en #twitter" +"twitter","irrelevant","126862343148802048","Thu Oct 20 03:28:34 +0000 2011","Me voy un rato del #twitter y todo lo que me entero al volver ¬¬!..." +"twitter","irrelevant","126862329269858305","Thu Oct 20 03:28:30 +0000 2011","#twitter sos un vicioooooo" +"twitter","irrelevant","126862315990679552","Thu Oct 20 03:28:27 +0000 2011","Bonjour #twitter" +"twitter","irrelevant","126862308302532608","Thu Oct 20 03:28:25 +0000 2011","El #twitter ha cambiado mi vida!" +"twitter","irrelevant","126862269148692481","Thu Oct 20 03:28:16 +0000 2011","@luisrodrigogali ajjaja noo nooo poes porqq se va a enterarr a menos q tengaa #Twitter jajaja" +"twitter","irrelevant","126862268725080065","Thu Oct 20 03:28:16 +0000 2011","me acabo de dar cuenta q tengo mas seguidores en #Twitter :D gracias y espero q sigan sumandoo mas y mas :D para seguir aun mas activo aki" +"twitter","irrelevant","126862217013497856","Thu Oct 20 03:28:04 +0000 2011","RT @guuuif: Meu twitter tá ficando velho já, My #twitter age is 285 days 8 hours 52 minutes 40 seconds" +"twitter","irrelevant","126862201440043008","Thu Oct 20 03:28:00 +0000 2011","No sé que haría sin #Twitter. Bueno sí, probablemente mis tareas. :)" +"twitter","irrelevant","126862151343284224","Thu Oct 20 03:27:48 +0000 2011","Alguien ha escuchado hablar de #blaving ? Es parecido al #twitter pero con voz ;)" +"twitter","irrelevant","126862150265352193","Thu Oct 20 03:27:48 +0000 2011","RT @social_net_info: ã¿ã‚“ãªã§ã¤ãã‚‹Twitter連動型çµå©šå¼ãƒ ãƒ¼ãƒ“ーサービス「Congratweet(コングラッツイート)ã€: http://t.co/9osWU51e #Twitter" +"twitter","irrelevant","126862116874489856","Thu Oct 20 03:27:40 +0000 2011","o @RafaArrudaa olha o papel de fundo do meu #TWITTER se vai raxa #HSAUHSAUASHASUHSUSHAUHSUASHUASHASHUASH'" +"twitter","irrelevant","126861992001679360","Thu Oct 20 03:27:10 +0000 2011","hace unos días tenía 711 mensajes de e-mail, lo que más quiero es leerlos todos, acabo de ver y tengo 736 y lo peor #facebook & #twitter= 0" +"twitter","irrelevant","126861988004499457","Thu Oct 20 03:27:09 +0000 2011","Arrumando meu #twitter :D" +"twitter","irrelevant","126861979762696192","Thu Oct 20 03:27:07 +0000 2011","Tinha Esquecido Do #Twitter Ç.Ç Nunca Fiz Isso !Rsrsrs" +"twitter","irrelevant","126861955762888706","Thu Oct 20 03:27:01 +0000 2011","@Markeenn meu #Twitter ==> @isaiasx19 Sumiu #rsrsr" +"twitter","irrelevant","126861933071704065","Thu Oct 20 03:26:56 +0000 2011","#putamadre hay gente que pasa todo el día poniendo sus problemas de amor cada 5 minutos en #FB consiganse una cuenta de #twitter losers!" +"twitter","irrelevant","126861782378745856","Thu Oct 20 03:26:20 +0000 2011","Hola! bienvenidos al mundo en cuando #twitter se convierte en una adicción y no podés estar un día sin entrar ... ahhh" +"twitter","irrelevant","126861771314184192","Thu Oct 20 03:26:17 +0000 2011","Ehhhh-.- yOO Diiqooo lO que quiierooo xk es Miii #Twitter :(" +"twitter","irrelevant","126861717790658561","Thu Oct 20 03:26:05 +0000 2011","kk eu queria muito mudar meu #BG aqui do #twitter :D" +"twitter","irrelevant","126861611741872128","Thu Oct 20 03:25:39 +0000 2011","@cagimo mire la última foto que monte al #Twitter, ayude a su amigo a la colección :D :D :D #Indirecta" +"twitter","irrelevant","126861593249185793","Thu Oct 20 03:25:35 +0000 2011","to saindo aqui pessoal do #twitter, bjbj e abraços." +"twitter","irrelevant","126861589122002944","Thu Oct 20 03:25:34 +0000 2011","Gracias a ti hoy puedo ser feliz ,i cuando llegaste aprendi a vivir .Gracias #twitter" +"twitter","irrelevant","126861539511775232","Thu Oct 20 03:25:22 +0000 2011","Saindo aqui do #Twitter.." +"twitter","irrelevant","126861496109121536","Thu Oct 20 03:25:12 +0000 2011","コミュニティファクトリーã€5カ国語対応ã®ã‚¹ãƒžãƒ¼ãƒˆãƒ•ォンカメラアプリ「DECOPICã€ï¼ˆCNET Japan) ⇒ http://t.co/xghcsgx5 #twitter" +"twitter","irrelevant","126861493059854336","Thu Oct 20 03:25:11 +0000 2011","アップルã€S・ジョブズæ°ã‚’ã—ã®ã¶è¿½æ‚¼ãƒšãƒ¼ã‚¸ã‚’公開(CNET Japan) ⇒ http://t.co/NnwaTSiH #twitter" +"twitter","irrelevant","126861480128819201","Thu Oct 20 03:25:08 +0000 2011","no los menciono por que me calientan el #twitter" +"twitter","irrelevant","126861380992241664","Thu Oct 20 03:24:44 +0000 2011","voou saiir do #twitter , e vou fica peelo eemi .. o paai qe se fooda k- beijo gente ." +"twitter","irrelevant","126861357831303168","Thu Oct 20 03:24:39 +0000 2011","ツイッターを利用ã—ã¦æ„Ÿè¬ã®æ°—æŒã¡ã¨ã¨ã‚‚ã«ç´„ï¼’ï¼ï¼ï¼å††ãŒæŒ¯ã‚Šè¾¼ã¾ã‚Œç¶šã‘る方法→ココ→ http://t.co/TyjUGsnQ â†ï½ºï½ºâ† :) #ビジãƒã‚¹ #稼ã #副業 #Twitter #ツイッター" +"twitter","irrelevant","126861252134830080","Thu Oct 20 03:24:14 +0000 2011","#Twitter Hoje ta #BONBANO só #ELITE poooha" +"twitter","irrelevant","126861245109387264","Thu Oct 20 03:24:12 +0000 2011","Un Algodón mkon, me invadió el #Twitter ._. xd" +"twitter","irrelevant","126861233138827264","Thu Oct 20 03:24:09 +0000 2011","Tenia qe comentar en #Twitter el concierto de caifanes poca modre yeah!!!!!" +"twitter","irrelevant","126861218592980992","Thu Oct 20 03:24:06 +0000 2011","que tengas #twitter no significa que sabes que es #tweetear xD" +"twitter","irrelevant","126861187605471232","Thu Oct 20 03:23:58 +0000 2011","mm por q casi nadie sabe utilizar #TWITTER??" +"twitter","irrelevant","126861118684676096","Thu Oct 20 03:23:42 +0000 2011","DICEN q estoy viciada con #twitter... Pero mmm... NO!" +"twitter","irrelevant","126861100120686592","Thu Oct 20 03:23:37 +0000 2011","@maffer_rdz si de heco igual a mi ya casi no me gusa de hecho siempre lo primero qe habro es #Twitter es mas facil qe me contacten aqi jeje" +"twitter","irrelevant","126860970583797760","Thu Oct 20 03:23:07 +0000 2011","@maxpowertoloza bienvenido #TWITTER jejejeeje buena!!!" +"twitter","irrelevant","126860953450065920","Thu Oct 20 03:23:02 +0000 2011","@Frank_2869 hey q pedo como vamos en #TWITTER" +"twitter","irrelevant","126860911557345280","Thu Oct 20 03:22:52 +0000 2011","Me acuerdo cuando abri #twitter el primer follow que di fue a #PistokO" +"twitter","irrelevant","126860890749419521","Thu Oct 20 03:22:47 +0000 2011","soy un guacamaya en #twitter" +"twitter","irrelevant","126860877344407552","Thu Oct 20 03:22:44 +0000 2011","RT @monizetru: Lo mejor de #twitter sucede en la madruga, cuando los continentes abrazan un no sé qué que a mí me (falta)>SOBRA!" +"twitter","irrelevant","126860835892109312","Thu Oct 20 03:22:34 +0000 2011","@Yesenia_Rivero puxi losiento deverdad q toy medio alejadoo de #twitter pero pronto estare al dia.. cuidate muxo. besitos a distancia bay!!!" +"twitter","irrelevant","126860830942834689","Thu Oct 20 03:22:33 +0000 2011","el #twitter se vuelve Bisiiioo" +"twitter","irrelevant","126860808251637762","Thu Oct 20 03:22:28 +0000 2011","Com 250 milhões de mensagens diárias, Twitter cogita pagar por tuítes #Twitter http://t.co/sucRDj5Y via @idgnow" +"twitter","irrelevant","126860789536657408","Thu Oct 20 03:22:23 +0000 2011","Hola #Twitter, y también hola estúpido caso clínico. u.u" +"twitter","irrelevant","126860754912681985","Thu Oct 20 03:22:15 +0000 2011","@aleExlima93 si voz lo que me perdi, pero almenos lo disfrute, #twitter lo mejor" +"twitter","irrelevant","126860740689797120","Thu Oct 20 03:22:12 +0000 2011","o @_Netex foi perguntar pra @Eriikuxa se o novo #Twitter dela é @vsf #eurir kkkkkkk' e ainda disse a ela q foi eu q perguntei ¬¬" +"twitter","irrelevant","126860719860883458","Thu Oct 20 03:22:07 +0000 2011","Realmente no se por que ponen cosas de #twitter en fb!! vv facebook es FB! Y twitter es TWITTER!! C'mon! vv" +"twitter","irrelevant","126860653167263744","Thu Oct 20 03:21:51 +0000 2011","Será que antes de tener #Twitter le decía a la gente cuanta cosa se me ocurriera?" +"twitter","irrelevant","126860610020446209","Thu Oct 20 03:21:41 +0000 2011","NOSSA *OOOOOOOO* já n basta aquela #BALEIA CHATA DO #TWITTER agora tem tartaruga! :/ rsrsrss q merda !" +"twitter","irrelevant","126860576600227841","Thu Oct 20 03:21:33 +0000 2011","#MichaelJakcsonQuiñonez wowo ya es un tema muy comentado en #twitter" +"twitter","irrelevant","126860372987740160","Thu Oct 20 03:20:44 +0000 2011","Sigo resentida porque yo quería ver solamente a cobra ☺ @Evenpro para la próxima no metan tanto la pata! #Twitter OFF" +"twitter","irrelevant","126860329463447552","Thu Oct 20 03:20:34 +0000 2011","Cuantos tweets al dia es capaz de hacer una persona? #twitter" +"twitter","irrelevant","126860146289803265","Thu Oct 20 03:19:50 +0000 2011","vou saindo do #TWITTER ...." +"twitter","irrelevant","126860127167987712","Thu Oct 20 03:19:45 +0000 2011","@EdiSant1 tah di mais em mlk fez um #twitter haha" +"twitter","irrelevant","126860109090521088","Thu Oct 20 03:19:41 +0000 2011","Creo q cuando una persona ya no te quiere te borrade su #twitter" +"twitter","irrelevant","126859990513369088","Thu Oct 20 03:19:13 +0000 2011","El live stream del server de IPN esta over capacity por @SoytuFan_mx no la caguen si no es #twitter" +"twitter","irrelevant","126859860640940032","Thu Oct 20 03:18:42 +0000 2011","De volta no #Twitter.... Boa noite pessoas'" +"twitter","irrelevant","126859846506127360","Thu Oct 20 03:18:39 +0000 2011","@StevenRestern Wvn soi novato en #Twitter :( me explica :D" +"twitter","irrelevant","126859794383515649","Thu Oct 20 03:18:26 +0000 2011","Abonnez vous @TunisieNews2011 et Suivez toute l' #actualité sur #Twitter | #Tunisie #TunisieNews #News" +"twitter","irrelevant","126859777308495872","Thu Oct 20 03:18:22 +0000 2011","A mimir!!!! Con el higado entrincado...! Bendita familia....!! :/ #twitter.off" +"twitter","irrelevant","126859668151738368","Thu Oct 20 03:17:56 +0000 2011","Lo mejor de #twitter sucede en la madruga, cuando los continentes abrazan un no sé qué que a mí me falta." +"twitter","irrelevant","126859627051757568","Thu Oct 20 03:17:46 +0000 2011","De novo mais uma vez eu akie de madrugada falando sozinho no #Twitter =S" +"twitter","irrelevant","126859610006110208","Thu Oct 20 03:17:42 +0000 2011","@Crissp16 Si voz aunque sea solo #twitter el #facebook no importa tanto" +"twitter","irrelevant","126859509414100992","Thu Oct 20 03:17:18 +0000 2011","YA SE PRENDIO LA MECHA ENTRE MEXICO vs ECUADOR #TWITTER! EL PARTIDO TERMINO PRENDIDO Y AHORA POR #MichaelJacksonQuiñonez LO CREEN OFENSIVO!!" +"twitter","irrelevant","126859503495938048","Thu Oct 20 03:17:17 +0000 2011","@maffer_rdz jajaja asu encerio qe si me ganas yo no tantos y eso qe casi odo el dia checo #Twitter :D" +"twitter","irrelevant","126859490061598720","Thu Oct 20 03:17:14 +0000 2011","me dio pereza buscar su usuario en #twitter :P+" +"twitter","irrelevant","126859443127332864","Thu Oct 20 03:17:02 +0000 2011","RT @mlucia187: Atrum es TT!!!! Jajajaja!!! Q buen nivel el de #twitter esta noche!! Gracias por existir #ElElegido :)" +"twitter","irrelevant","126859428455657472","Thu Oct 20 03:16:59 +0000 2011","Y asi cambie las cosas en el #facebook y en #twitter! =D" +"twitter","irrelevant","126859363079041024","Thu Oct 20 03:16:43 +0000 2011","o @jcsantos22f1 sumiuuuuuuuuuuuuuuu D, #TWITTER kkkkkk + apareceu n,msn... saudadeeees" +"twitter","irrelevant","126859354614939648","Thu Oct 20 03:16:41 +0000 2011","Hasta el puto #Twitter me esta cagando ¬¬'" +"twitter","irrelevant","126859286558158849","Thu Oct 20 03:16:25 +0000 2011","Buenas noches, sigan pasandosela bn .. ok no #Twitter Off :3" +"twitter","irrelevant","126859257856540672","Thu Oct 20 03:16:18 +0000 2011","Yo no toy en #Twitter hoy ando cansada :/" +"twitter","irrelevant","126859211593351168","Thu Oct 20 03:16:07 +0000 2011","http://t.co/9SWZtOM6 +#Twitter, faster than #earthquakes +تويتر أسرع من الزلزال! #funny #fast #tweet #earthquick #LOL" +"twitter","irrelevant","126859155175772161","Thu Oct 20 03:15:54 +0000 2011","o sea primera vez que @carlo_patricio me da reply y a #TWITTER se le ocurre borrarlo? #Carajamadre! :(" +"twitter","irrelevant","126858835058098176","Thu Oct 20 03:14:37 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @GabeMoormanLaw @AshliSeumanu953 @pamperedplay" +"twitter","irrelevant","126858789868670976","Thu Oct 20 03:14:27 +0000 2011","Cuando no tengo nada que hacer #twitter, cuando estoy haciendo tareas #twitter, cuando estoy en #twitter no hago NADA" +"twitter","irrelevant","126858639855206400","Thu Oct 20 03:13:51 +0000 2011","#TeamFollowBack #TFB #MustFollow #MF #FollowNow #FN #FollowFriday #FF #IFB #Twitter #Follow @all3rgict0y0u @yibumsuk @DatFix @Alleyupi" +"twitter","irrelevant","126858260216160256","Thu Oct 20 03:12:20 +0000 2011","Deiixando o #twitter BOA NOITE a todos qual quer coisa só chamarem no facebook! http://t.co/LOeZAYeE #FUI" +"twitter","irrelevant","126858248325308416","Thu Oct 20 03:12:17 +0000 2011","Now tuned in to http://t.co/DYMDmU3z with @deejaydenco! #WorldWide #Twitter #Facebook join me!!! #RT" +"twitter","irrelevant","126858186962644992","Thu Oct 20 03:12:03 +0000 2011","@jessnovelo el #twitter? Jajaja" +"twitter","irrelevant","126858148878368769","Thu Oct 20 03:11:54 +0000 2011","deus ajuda quem no #twitter madruga haha'" +"twitter","irrelevant","126858034466131969","Thu Oct 20 03:11:26 +0000 2011","Nunca comprendí para que sirve el #Twitter" +"twitter","irrelevant","126858032951996416","Thu Oct 20 03:11:26 +0000 2011","""@julioeffio: @chriseffio yo juraba que te habian hackeado el #Twitter ...TODO fue tan #minipandi"" @julioeffio love u! ♥" +"twitter","irrelevant","126858004690767872","Thu Oct 20 03:11:19 +0000 2011","@jorgetb7 jajajajajaja mejor dilo x la radio pues jaja si lo publicas en #twitter me coso el ojo noma ? Jajaja" +"twitter","irrelevant","126857921068941314","Thu Oct 20 03:10:59 +0000 2011","msn, quase ninguem, facebook paradão geral despedindo, #twitter geral ralando :S'" +"twitter","irrelevant","126857918929838080","Thu Oct 20 03:10:59 +0000 2011","a mimir Nos leemos mañanaa #Twitter Off" +"twitter","irrelevant","126857746200014849","Thu Oct 20 03:10:18 +0000 2011","@sniferl4bs O.o Estoy interesado en aprender cómo desarrollar un #bot para #Twitter y quizás para #Identica tambien, :) Luego compartes, no." +"twitter","irrelevant","126857736238530560","Thu Oct 20 03:10:15 +0000 2011","Vamos a ver que esta ofreciendo #Twitter" +"twitter","irrelevant","126857518591901698","Thu Oct 20 03:09:23 +0000 2011","@DayaThomas Amiga le doy mi Bienvenida al #Twitter!!! Kisesessesesesesesese" +"twitter","irrelevant","126857511230902272","Thu Oct 20 03:09:22 +0000 2011","Hoy si es hora de dormir ツ #twitter&facebookOFF" +"twitter","irrelevant","126857421321797634","Thu Oct 20 03:09:00 +0000 2011","uff... así con el #twitter... nada que hacer al respecto #anoquesociólogo" +"twitter","irrelevant","126857383715676160","Thu Oct 20 03:08:51 +0000 2011","@ppmorraz hahaha esos chistes de #ninel se han vuelto famosos en #twitter" +"twitter","irrelevant","126857361515216897","Thu Oct 20 03:08:46 +0000 2011","que como #FACEBOOK ya paso de moda AHORS TODA A PLEBE SE VIENE A #twitter ¬¬" +"twitter","irrelevant","126857211921174528","Thu Oct 20 03:08:10 +0000 2011","cuando van a entender que el #twitter no es un chat jajajaja" +"twitter","irrelevant","126857080513638400","Thu Oct 20 03:07:39 +0000 2011","Me dijeron q me vuelva al país de los soretes y llegue a #Twitter con todos ustedes :D" +"twitter","irrelevant","126857071667847168","Thu Oct 20 03:07:37 +0000 2011","saiiidoooo aqui do #twitter . só Msn agr add aii danilosooares@hotmail.com ;DD" +"twitter","irrelevant","126857049920385024","Thu Oct 20 03:07:32 +0000 2011","رقم الÙلو +والÙلورز +والتويتات +للبيع +لاعلى +سعر +#TweetOfTheDay +#Twitter http://t.co/iIwDNk0n" +"twitter","irrelevant","126856764242137088","Thu Oct 20 03:06:24 +0000 2011","Oiie gente boa madruga kkkk,depois de passar um tempão sem mexer no #twitter agora resolvir mexer de vol... x.x" +"twitter","irrelevant","126856732331884545","Thu Oct 20 03:06:16 +0000 2011","ela pensa que eu não faria por ela tuudo isso que ela citou no #twitter dela ¬.¬" +"twitter","irrelevant","126856603021484032","Thu Oct 20 03:05:45 +0000 2011","Buenas Noches Followers :3 #twitter-off" +"twitter","irrelevant","126856425371746304","Thu Oct 20 03:05:03 +0000 2011","Ciúmes do meu #Twitter não da né?! :>" +"twitter","irrelevant","126856274531991552","Thu Oct 20 03:04:27 +0000 2011","Sin más a que hacer referencia me voy pal carajo a dormir buenas noches a los escasos panas q tengo en el #twitter" +"twitter","irrelevant","126856135918620673","Thu Oct 20 03:03:54 +0000 2011","@thai_q liga pra acordar ele e fala q foi pedido #twitter kkkkk" +"twitter","irrelevant","126856097863708672","Thu Oct 20 03:03:45 +0000 2011","#Twitter ya vale 8,000 mdd http://t.co/gqxbJO7J 4.1 millones de habitantes en México tienen una cuenta" +"twitter","irrelevant","126856097431699456","Thu Oct 20 03:03:45 +0000 2011","@nataliagdl1207 hahahaha bby, obviamente no es para ti, ni para nadie en #Twitter" +"twitter","irrelevant","126855687060987904","Thu Oct 20 03:02:07 +0000 2011","me re copè con #twitter" +"twitter","irrelevant","126855171702661120","Thu Oct 20 03:00:04 +0000 2011","Buenas noches genteeee :) #twitter los quierooo ..." +"twitter","irrelevant","126854999442587648","Thu Oct 20 02:59:23 +0000 2011","#twitter tiene la mala costumbre de ponerce bno cuano yo me voy :/" +"twitter","irrelevant","126854818101858304","Thu Oct 20 02:58:40 +0000 2011","Oi @flaviasansi. Muito bem vinda ao meu #Twitter. Sempre dou followback pelo meu perfil profissional. Permaneça por aqui, certo? Abrass!" +"twitter","irrelevant","126854423317188608","Thu Oct 20 02:57:06 +0000 2011","Eles arrastaram os barcos para a praia, deixaram tudo e #seguiram Jesus.(Lucas 5-11)e este foii o primeiro #twitter da historia humana #RT" \ No newline at end of file diff --git a/minor_project/bin/activate b/minor_project/bin/activate new file mode 100644 index 0000000..6273acd --- /dev/null +++ b/minor_project/bin/activate @@ -0,0 +1,84 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + + +if [ "${BASH_SOURCE-}" = "$0" ]; then + echo "You must source this script: \$ source $0" >&2 + exit 33 +fi + +deactivate () { + unset -f pydoc >/dev/null 2>&1 + + # reset old environment variables + # ! [ -z ${VAR+_} ] returns true if VAR is declared at all + if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null + fi + + if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV='/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project' +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +if ! [ -z "${PYTHONHOME+_}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1-}" + if [ "x" != x ] ; then + PS1="${PS1-}" + else + PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}" + fi + export PS1 +fi + +# Make sure to unalias pydoc if it's already there +alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true + +pydoc () { + python -m pydoc "$@" +} + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null +fi diff --git a/minor_project/bin/activate.csh b/minor_project/bin/activate.csh new file mode 100644 index 0000000..cad7805 --- /dev/null +++ b/minor_project/bin/activate.csh @@ -0,0 +1,55 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . + +set newline='\ +' + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV '/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project' + +set _OLD_VIRTUAL_PATH="$PATH:q" +setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q" + + + +if ('' != "") then + set env_name = '' +else + set env_name = '('"$VIRTUAL_ENV:t:q"') ' +endif + +if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then + if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then + set do_prompt = "1" + else + set do_prompt = "0" + endif +else + set do_prompt = "1" +endif + +if ( $do_prompt == "1" ) then + # Could be in a non-interactive environment, + # in which case, $prompt is undefined and we wouldn't + # care about the prompt anyway. + if ( $?prompt ) then + set _OLD_VIRTUAL_PROMPT="$prompt:q" + if ( "$prompt:q" =~ *"$newline:q"* ) then + : + else + set prompt = "$env_name:q$prompt:q" + endif + endif +endif + +unset env_name +unset do_prompt + +alias pydoc python -m pydoc + +rehash diff --git a/minor_project/bin/activate.fish b/minor_project/bin/activate.fish new file mode 100644 index 0000000..b1e1daa --- /dev/null +++ b/minor_project/bin/activate.fish @@ -0,0 +1,100 @@ +# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. +# Do not run it directly. + +function _bashify_path -d "Converts a fish path to something bash can recognize" + set fishy_path $argv + set bashy_path $fishy_path[1] + for path_part in $fishy_path[2..-1] + set bashy_path "$bashy_path:$path_part" + end + echo $bashy_path +end + +function _fishify_path -d "Converts a bash path to something fish can recognize" + echo $argv | tr ':' '\n' +end + +function deactivate -d 'Exit virtualenv mode and return to the normal environment.' + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling + if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH") + else + set -gx PATH "$_OLD_VIRTUAL_PATH" + end + set -e _OLD_VIRTUAL_PATH + end + + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME" + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + and functions -q _old_fish_prompt + # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. + set -l fish_function_path + + # Erase virtualenv's `fish_prompt` and restore the original. + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + end + + set -e VIRTUAL_ENV + + if test "$argv[1]" != 'nondestructive' + # Self-destruct! + functions -e pydoc + functions -e deactivate + functions -e _bashify_path + functions -e _fishify_path + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV '/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project' + +# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling +if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) +else + set -gx _OLD_VIRTUAL_PATH "$PATH" +end +set -gx PATH "$VIRTUAL_ENV"'/bin' $PATH + +# Unset `$PYTHONHOME` if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +function pydoc + python -m pydoc $argv +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # Copy the current `fish_prompt` function as `_old_fish_prompt`. + functions -c fish_prompt _old_fish_prompt + + function fish_prompt + # Run the user's prompt first; it might depend on (pipe)status. + set -l prompt (_old_fish_prompt) + + # Prompt override provided? + # If not, just prepend the environment name. + if test -n '' + printf '%s%s' '' (set_color normal) + else + printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV") + end + + string join -- \n $prompt # handle multi-line prompts + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/minor_project/bin/activate.ps1 b/minor_project/bin/activate.ps1 new file mode 100644 index 0000000..95504d3 --- /dev/null +++ b/minor_project/bin/activate.ps1 @@ -0,0 +1,60 @@ +$script:THIS_PATH = $myinvocation.mycommand.path +$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent + +function global:deactivate([switch] $NonDestructive) { + if (Test-Path variable:_OLD_VIRTUAL_PATH) { + $env:PATH = $variable:_OLD_VIRTUAL_PATH + Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global + } + + if (Test-Path function:_old_virtual_prompt) { + $function:prompt = $function:_old_virtual_prompt + Remove-Item function:\_old_virtual_prompt + } + + if ($env:VIRTUAL_ENV) { + Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue + } + + if (!$NonDestructive) { + # Self destruct! + Remove-Item function:deactivate + Remove-Item function:pydoc + } +} + +function global:pydoc { + python -m pydoc $args +} + +# unset irrelevant variables +deactivate -nondestructive + +$VIRTUAL_ENV = $BASE_DIR +$env:VIRTUAL_ENV = $VIRTUAL_ENV + +New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH + +$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH +if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { + function global:_old_virtual_prompt { + "" + } + $function:_old_virtual_prompt = $function:prompt + + if ("" -ne "") { + function global:prompt { + # Add the custom prefix to the existing prompt + $previous_prompt_value = & $function:_old_virtual_prompt + ("" + $previous_prompt_value) + } + } + else { + function global:prompt { + # Add a prefix to the current prompt, but don't discard it. + $previous_prompt_value = & $function:_old_virtual_prompt + $new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) " + ($new_prompt_value + $previous_prompt_value) + } + } +} diff --git a/minor_project/bin/activate.xsh b/minor_project/bin/activate.xsh new file mode 100644 index 0000000..c37c92f --- /dev/null +++ b/minor_project/bin/activate.xsh @@ -0,0 +1,46 @@ +"""Xonsh activate script for virtualenv""" +from xonsh.tools import get_sep as _get_sep + +def _deactivate(args): + if "pydoc" in aliases: + del aliases["pydoc"] + + if ${...}.get("_OLD_VIRTUAL_PATH", ""): + $PATH = $_OLD_VIRTUAL_PATH + del $_OLD_VIRTUAL_PATH + + if ${...}.get("_OLD_VIRTUAL_PYTHONHOME", ""): + $PYTHONHOME = $_OLD_VIRTUAL_PYTHONHOME + del $_OLD_VIRTUAL_PYTHONHOME + + if "VIRTUAL_ENV" in ${...}: + del $VIRTUAL_ENV + + if "VIRTUAL_ENV_PROMPT" in ${...}: + del $VIRTUAL_ENV_PROMPT + + if "nondestructive" not in args: + # Self destruct! + del aliases["deactivate"] + + +# unset irrelevant variables +_deactivate(["nondestructive"]) +aliases["deactivate"] = _deactivate + +$VIRTUAL_ENV = r"/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project" + +$_OLD_VIRTUAL_PATH = $PATH +$PATH = $PATH[:] +$PATH.add($VIRTUAL_ENV + _get_sep() + "bin", front=True, replace=True) + +if ${...}.get("PYTHONHOME", ""): + # unset PYTHONHOME if set + $_OLD_VIRTUAL_PYTHONHOME = $PYTHONHOME + del $PYTHONHOME + +$VIRTUAL_ENV_PROMPT = "" +if not $VIRTUAL_ENV_PROMPT: + del $VIRTUAL_ENV_PROMPT + +aliases["pydoc"] = ["python", "-m", "pydoc"] diff --git a/minor_project/bin/activate_this.py b/minor_project/bin/activate_this.py new file mode 100644 index 0000000..b382433 --- /dev/null +++ b/minor_project/bin/activate_this.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +"""Activate virtualenv for current interpreter: + +Use exec(open(this_file).read(), {'__file__': this_file}). + +This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. +""" +import os +import site +import sys + +try: + abs_file = os.path.abspath(__file__) +except NameError: + raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))") + +bin_dir = os.path.dirname(abs_file) +base = bin_dir[: -len("bin") - 1] # strip away the bin part from the __file__, plus the path separator + +# prepend bin to PATH (this file is inside the bin directory) +os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep)) +os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory + +# add the virtual environments libraries to the host python import mechanism +prev_length = len(sys.path) +for lib in "../lib/python3.6/site-packages".split(os.pathsep): + path = os.path.realpath(os.path.join(bin_dir, lib)) + site.addsitedir(path.decode("utf-8") if "" else path) +sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length] + +sys.real_prefix = sys.prefix +sys.prefix = base diff --git a/minor_project/bin/chardetect b/minor_project/bin/chardetect new file mode 100755 index 0000000..3a716ba --- /dev/null +++ b/minor_project/bin/chardetect @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from chardet.cli.chardetect import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/easy_install b/minor_project/bin/easy_install new file mode 100755 index 0000000..7053b0a --- /dev/null +++ b/minor_project/bin/easy_install @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/easy_install-3.6 b/minor_project/bin/easy_install-3.6 new file mode 100755 index 0000000..7053b0a --- /dev/null +++ b/minor_project/bin/easy_install-3.6 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/easy_install3 b/minor_project/bin/easy_install3 new file mode 100755 index 0000000..7053b0a --- /dev/null +++ b/minor_project/bin/easy_install3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/f2py b/minor_project/bin/f2py new file mode 100755 index 0000000..73e203b --- /dev/null +++ b/minor_project/bin/f2py @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/f2py3 b/minor_project/bin/f2py3 new file mode 100755 index 0000000..73e203b --- /dev/null +++ b/minor_project/bin/f2py3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/f2py3.6 b/minor_project/bin/f2py3.6 new file mode 100755 index 0000000..73e203b --- /dev/null +++ b/minor_project/bin/f2py3.6 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/futurize b/minor_project/bin/futurize new file mode 100755 index 0000000..fb8669a --- /dev/null +++ b/minor_project/bin/futurize @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from libfuturize.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/nltk b/minor_project/bin/nltk new file mode 100755 index 0000000..6f0e927 --- /dev/null +++ b/minor_project/bin/nltk @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from nltk.cli import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli()) diff --git a/minor_project/bin/pasteurize b/minor_project/bin/pasteurize new file mode 100755 index 0000000..e63bc80 --- /dev/null +++ b/minor_project/bin/pasteurize @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from libpasteurize.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/pip b/minor_project/bin/pip new file mode 100755 index 0000000..8e6ac24 --- /dev/null +++ b/minor_project/bin/pip @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/pip3 b/minor_project/bin/pip3 new file mode 100755 index 0000000..8e6ac24 --- /dev/null +++ b/minor_project/bin/pip3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/pip3.6 b/minor_project/bin/pip3.6 new file mode 100755 index 0000000..8e6ac24 --- /dev/null +++ b/minor_project/bin/pip3.6 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/python b/minor_project/bin/python new file mode 120000 index 0000000..ae65fda --- /dev/null +++ b/minor_project/bin/python @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/minor_project/bin/python3 b/minor_project/bin/python3 new file mode 120000 index 0000000..d8654aa --- /dev/null +++ b/minor_project/bin/python3 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/minor_project/bin/python3.6 b/minor_project/bin/python3.6 new file mode 120000 index 0000000..d8654aa --- /dev/null +++ b/minor_project/bin/python3.6 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/minor_project/bin/tqdm b/minor_project/bin/tqdm new file mode 100755 index 0000000..b46c7b2 --- /dev/null +++ b/minor_project/bin/tqdm @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from tqdm.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/wheel b/minor_project/bin/wheel new file mode 100755 index 0000000..840ea40 --- /dev/null +++ b/minor_project/bin/wheel @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/wheel-3.6 b/minor_project/bin/wheel-3.6 new file mode 100755 index 0000000..840ea40 --- /dev/null +++ b/minor_project/bin/wheel-3.6 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/bin/wheel3 b/minor_project/bin/wheel3 new file mode 100755 index 0000000..840ea40 --- /dev/null +++ b/minor_project/bin/wheel3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/azzam/Desktop/College/Minor Project/sentiment analysis/Sentiment-Analysis-Twitter/minor_project/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/minor_project/lib/python3.6/site-packages/PIL/BdfFontFile.py b/minor_project/lib/python3.6/site-packages/PIL/BdfFontFile.py new file mode 100644 index 0000000..102b72e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/BdfFontFile.py @@ -0,0 +1,110 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +""" +Parse X Bitmap Distribution Format (BDF) +""" + + +from . import FontFile, Image + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other", +} + +bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"} + + +def bdf_char(f): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode("ascii") + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") + + # load bitmap + bitmap = [] + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap.append(s[:-1]) + bitmap = b"".join(bitmap) + + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] + + bbox = (dx, dy), (l, -d - y, x + l, -d), (0, 0, x, y) + + try: + im = Image.frombytes("1", (x, y), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (x, y)) + + return id, int(props["ENCODING"]), bbox, im + + +class BdfFontFile(FontFile.FontFile): + """Font file plugin for the X11 BDF format.""" + + def __init__(self, fp): + super().__init__() + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + raise SyntaxError("not a valid BDF file") + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii") + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i + 1 : -1].decode("ascii")) + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/minor_project/lib/python3.6/site-packages/PIL/BlpImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/BlpImagePlugin.py new file mode 100644 index 0000000..d5d7c0e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/BlpImagePlugin.py @@ -0,0 +1,422 @@ +""" +Blizzard Mipmap Format (.blp) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +BLP1 files, used mostly in Warcraft III, are not fully supported. +All types of BLP2 files used in World of Warcraft are supported. + +The BLP file structure consists of a header, up to 16 mipmaps of the +texture + +Texture sizes must be powers of two, though the two dimensions do +not have to be equal; 512x256 is valid, but 512x200 is not. +The first mipmap (mipmap #0) is the full size image; each subsequent +mipmap halves both dimensions. The final mipmap should be 1x1. + +BLP files come in many different flavours: +* JPEG-compressed (type == 0) - only supported for BLP1. +* RAW images (type == 1, encoding == 1). Each mipmap is stored as an + array of 8-bit values, one per pixel, left to right, top to bottom. + Each value is an index to the palette. +* DXT-compressed (type == 1, encoding == 2): +- DXT1 compression is used if alpha_encoding == 0. + - An additional alpha bit is used if alpha_depth == 1. + - DXT3 compression is used if alpha_encoding == 1. + - DXT5 compression is used if alpha_encoding == 7. +""" + +import struct +from io import BytesIO + +from . import Image, ImageFile + +BLP_FORMAT_JPEG = 0 + +BLP_ENCODING_UNCOMPRESSED = 1 +BLP_ENCODING_DXT = 2 +BLP_ENCODING_UNCOMPRESSED_RAW_BGRA = 3 + +BLP_ALPHA_ENCODING_DXT1 = 0 +BLP_ALPHA_ENCODING_DXT3 = 1 +BLP_ALPHA_ENCODING_DXT5 = 7 + + +def unpack_565(i): + return (((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3) + + +def decode_dxt1(data, alpha=False): + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 8 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + # Decode next 8-byte block. + idx = block * 8 + color0, color1, bits = struct.unpack_from("> 2 + + a = 0xFF + if control == 0: + r, g, b = r0, g0, b0 + elif control == 1: + r, g, b = r1, g1, b1 + elif control == 2: + if color0 > color1: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + else: + r = (r0 + r1) // 2 + g = (g0 + g1) // 2 + b = (b0 + b1) // 2 + elif control == 3: + if color0 > color1: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + else: + r, g, b, a = 0, 0, 0, 0 + + if alpha: + ret[j].extend([r, g, b, a]) + else: + ret[j].extend([r, g, b]) + + return ret + + +def decode_dxt3(data): + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + idx = block * 16 + block = data[idx : idx + 16] + # Decode next 16-byte block. + bits = struct.unpack_from("<8B", block) + color0, color1 = struct.unpack_from(">= 4 + else: + high = True + a &= 0xF + a *= 17 # We get a value between 0 and 15 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +def decode_dxt5(data): + """ + input: one "row" of data (i.e. will produce 4 * width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + idx = block * 16 + block = data[idx : idx + 16] + # Decode next 16-byte block. + a0, a1 = struct.unpack_from("> alphacode_index) & 0x07 + elif alphacode_index == 15: + alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06) + else: # alphacode_index >= 18 and alphacode_index <= 45 + alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07 + + if alphacode == 0: + a = a0 + elif alphacode == 1: + a = a1 + elif a0 > a1: + a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7 + elif alphacode == 6: + a = 0 + elif alphacode == 7: + a = 255 + else: + a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +class BLPFormatError(NotImplementedError): + pass + + +class BlpImageFile(ImageFile.ImageFile): + """ + Blizzard Mipmap Format + """ + + format = "BLP" + format_description = "Blizzard Mipmap Format" + + def _open(self): + self.magic = self.fp.read(4) + self._read_blp_header() + + if self.magic == b"BLP1": + decoder = "BLP1" + self.mode = "RGB" + elif self.magic == b"BLP2": + decoder = "BLP2" + self.mode = "RGBA" if self._blp_alpha_depth else "RGB" + else: + raise BLPFormatError(f"Bad BLP magic {repr(self.magic)}") + + self.tile = [(decoder, (0, 0) + self.size, 0, (self.mode, 0, 1))] + + def _read_blp_header(self): + (self._blp_compression,) = struct.unpack(" mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +def _dib_accept(prefix): + return i32(prefix) in [12, 40, 64, 108, 124] + + +# ============================================================================= +# Image plugin for the Windows BMP format. +# ============================================================================= +class BmpImageFile(ImageFile.ImageFile): + """ Image plugin for the Windows Bitmap format (BMP) """ + + # ------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + + # -------------------------------------------------- BMP Compression values + COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5} + for k, v in COMPRESSIONS.items(): + vars()[k] = v + + def _bitmap(self, header=0, offset=0): + """ Read relevant info about the BMP """ + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + file_info = {} + # read bmp header size @offset 14 (this is part of the header size) + file_info["header_size"] = i32(read(4)) + file_info["direction"] = -1 + + # -------------------- If requested, read header at a specific position + # read the rest of the bmp header, without its size + header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4) + + # -------------------------------------------------- IBM OS/2 Bitmap v1 + # ----- This format has different offsets because of width/height types + if file_info["header_size"] == 12: + file_info["width"] = i16(header_data, 0) + file_info["height"] = i16(header_data, 2) + file_info["planes"] = i16(header_data, 4) + file_info["bits"] = i16(header_data, 6) + file_info["compression"] = self.RAW + file_info["palette_padding"] = 3 + + # --------------------------------------------- Windows Bitmap v2 to v5 + # v3, OS/2 v2, v4, v5 + elif file_info["header_size"] in (40, 64, 108, 124): + file_info["y_flip"] = header_data[7] == 0xFF + file_info["direction"] = 1 if file_info["y_flip"] else -1 + file_info["width"] = i32(header_data, 0) + file_info["height"] = ( + i32(header_data, 4) + if not file_info["y_flip"] + else 2 ** 32 - i32(header_data, 4) + ) + file_info["planes"] = i16(header_data, 8) + file_info["bits"] = i16(header_data, 10) + file_info["compression"] = i32(header_data, 12) + # byte size of pixel data + file_info["data_size"] = i32(header_data, 16) + file_info["pixels_per_meter"] = ( + i32(header_data, 20), + i32(header_data, 24), + ) + file_info["colors"] = i32(header_data, 28) + file_info["palette_padding"] = 4 + self.info["dpi"] = tuple( + int(x / 39.3701 + 0.5) for x in file_info["pixels_per_meter"] + ) + if file_info["compression"] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate( + ["r_mask", "g_mask", "b_mask", "a_mask"] + ): + file_info[mask] = i32(header_data, 36 + idx * 4) + else: + # 40 byte headers only have the three components in the + # bitfields masks, ref: + # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx + # See also + # https://github.com/python-pillow/Pillow/issues/1293 + # There is a 4th component in the RGBQuad, in the alpha + # location, but it is listed as a reserved component, + # and it is not generally an alpha channel + file_info["a_mask"] = 0x0 + for mask in ["r_mask", "g_mask", "b_mask"]: + file_info[mask] = i32(read(4)) + file_info["rgb_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + ) + file_info["rgba_mask"] = ( + file_info["r_mask"], + file_info["g_mask"], + file_info["b_mask"], + file_info["a_mask"], + ) + else: + raise OSError(f"Unsupported BMP header type ({file_info['header_size']})") + + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self._size = file_info["width"], file_info["height"] + + # ------- If color count was not found in the header, compute from bits + file_info["colors"] = ( + file_info["colors"] + if file_info.get("colors", 0) + else (1 << file_info["bits"]) + ) + + # ---------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None)) + if self.mode is None: + raise OSError(f"Unsupported BMP pixel depth ({file_info['bits']})") + + # ---------------- Process BMP with Bitfields compression (not palette) + if file_info["compression"] == self.BITFIELDS: + SUPPORTED = { + 32: [ + (0xFF0000, 0xFF00, 0xFF, 0x0), + (0xFF0000, 0xFF00, 0xFF, 0xFF000000), + (0xFF, 0xFF00, 0xFF0000, 0xFF000000), + (0x0, 0x0, 0x0, 0x0), + (0xFF000000, 0xFF0000, 0xFF00, 0x0), + ], + 24: [(0xFF0000, 0xFF00, 0xFF)], + 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)], + } + MASK_MODES = { + (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX", + (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR", + (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA", + (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xFF0000, 0xFF00, 0xFF)): "BGR", + (16, (0xF800, 0x7E0, 0x1F)): "BGR;16", + (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15", + } + if file_info["bits"] in SUPPORTED: + if ( + file_info["bits"] == 32 + and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])] + self.mode = "RGBA" if "A" in raw_mode else self.mode + elif ( + file_info["bits"] in (24, 16) + and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]] + ): + raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])] + else: + raise OSError("Unsupported BMP bitfields layout") + else: + raise OSError("Unsupported BMP bitfields layout") + elif file_info["compression"] == self.RAW: + if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + else: + raise OSError(f"Unsupported BMP compression ({file_info['compression']})") + + # --------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + + # ---------------------------------------------------- 1-bit images + if not (0 < file_info["colors"] <= 65536): + raise OSError(f"Unsupported BMP Palette size ({file_info['colors']})") + else: + padding = file_info["palette_padding"] + palette = read(padding * file_info["colors"]) + greyscale = True + indices = ( + (0, 255) + if file_info["colors"] == 2 + else list(range(file_info["colors"])) + ) + + # ----------------- Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind * padding : ind * padding + 3] + if rgb != o8(val) * 3: + greyscale = False + + # ------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info["colors"] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw( + "BGRX" if padding == 4 else "BGR", palette + ) + + # ---------------------------- Finally set the tile data for the plugin + self.info["compression"] = file_info["compression"] + self.tile = [ + ( + "raw", + (0, 0, file_info["width"], file_info["height"]), + offset or self.fp.tell(), + ( + raw_mode, + ((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3), + file_info["direction"], + ), + ) + ] + + def _open(self): + """ Open file, check magic number and read header """ + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if not _accept(head_data): + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data, 10) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +# ============================================================================= +# Image plugin for the DIB format (BMP alias) +# ============================================================================= +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + + +# +# -------------------------------------------------------------------- +# Write BMP file + + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _dib_save(im, fp, filename): + _save(im, fp, filename, False) + + +def _save(im, fp, filename, bitmap_header=True): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as BMP") from e + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701 + 0.5), dpi)) + + stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3) + header = 40 # or 64 for OS/2 version 2 + image = stride * im.size[1] + + # bitmap header + if bitmap_header: + offset = 14 + header + colors * 4 + file_size = offset + image + if file_size > 2 ** 32 - 1: + raise ValueError("File size is too large for the BMP format") + fp.write( + b"BM" # file type (magic) + + o32(file_size) # file size + + o32(0) # reserved + + o32(offset) # image data offset + ) + + # bitmap info header + fp.write( + o32(header) # info header size + + o32(im.size[0]) # width + + o32(im.size[1]) # height + + o16(1) # planes + + o16(bits) # depth + + o32(0) # compression (0=uncompressed) + + o32(image) # size of bitmap + + o32(ppm[0]) # resolution + + o32(ppm[1]) # resolution + + o32(colors) # colors used + + o32(colors) # colors important + ) + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if im.mode == "1": + for i in (0, 255): + fp.write(o8(i) * 4) + elif im.mode == "L": + for i in range(256): + fp.write(o8(i) * 4) + elif im.mode == "P": + fp.write(im.im.getpalette("RGB", "BGRX")) + + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))]) + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") + +Image.register_open(DibImageFile.format, DibImageFile, _dib_accept) +Image.register_save(DibImageFile.format, _dib_save) + +Image.register_extension(DibImageFile.format, ".dib") + +Image.register_mime(DibImageFile.format, "image/bmp") diff --git a/minor_project/lib/python3.6/site-packages/PIL/BufrStubImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000..48f21e1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/BufrStubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific BUFR image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + + format = "BUFR" + format_description = "BUFR" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(4)): + raise SyntaxError("Not a BUFR file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise OSError("BUFR save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/minor_project/lib/python3.6/site-packages/PIL/ContainerIO.py b/minor_project/lib/python3.6/site-packages/PIL/ContainerIO.py new file mode 100644 index 0000000..45e80b3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ContainerIO.py @@ -0,0 +1,120 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +import io + + +class ContainerIO: + """ + A file object that provides read access to a part of an existing + file (for example a TAR file). + """ + + def __init__(self, file, offset, length): + """ + Create file object. + + :param file: Existing file. + :param offset: Start of region, in bytes. + :param length: Size of region, in bytes. + """ + self.fh = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self): + return False + + def seek(self, offset, mode=io.SEEK_SET): + """ + Move file pointer. + + :param offset: Offset in bytes. + :param mode: Starting position. Use 0 for beginning of region, 1 + for current offset, and 2 for end of region. You cannot move + the pointer outside the defined region. + """ + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + def tell(self): + """ + Get current file pointer. + + :returns: Offset from start of region, in bytes. + """ + return self.pos + + def read(self, n=0): + """ + Read data. + + :param n: Number of bytes to read. If omitted or zero, + read until end of region. + :returns: An 8-bit string. + """ + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return b"" if "b" in self.fh.mode else "" + self.pos = self.pos + n + return self.fh.read(n) + + def readline(self): + """ + Read a line of text. + + :returns: An 8-bit string. + """ + s = b"" if "b" in self.fh.mode else "" + newline_character = b"\n" if "b" in self.fh.mode else "\n" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == newline_character: + break + return s + + def readlines(self): + """ + Read multiple lines of text. + + :returns: A list of 8-bit strings. + """ + lines = [] + while True: + s = self.readline() + if not s: + break + lines.append(s) + return lines diff --git a/minor_project/lib/python3.6/site-packages/PIL/CurImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/CurImagePlugin.py new file mode 100644 index 0000000..42af5ca --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/CurImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +from . import BmpImagePlugin, Image +from ._binary import i16le as i16 +from ._binary import i32le as i32 + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + + +class CurImageFile(BmpImagePlugin.BmpImageFile): + + format = "CUR" + format_description = "Windows Cursor" + + def _open(self): + + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + raise SyntaxError("not a CUR file") + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s, 4)): + s = self.fp.read(16) + if not m: + m = s + elif s[0] > m[0] and s[1] > m[1]: + m = s + if not m: + raise TypeError("No cursors were found") + + # load as bitmap + self._bitmap(i32(m, 12) + offset) + + # patch up the bitmap height + self._size = self.size[0], self.size[1] // 2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0) + self.size, o, a + + return + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/minor_project/lib/python3.6/site-packages/PIL/DcxImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/DcxImagePlugin.py new file mode 100644 index 0000000..de21db8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/DcxImagePlugin.py @@ -0,0 +1,89 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._binary import i32le as i32 +from .PcxImagePlugin import PcxImageFile + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + + +class DcxImageFile(PcxImageFile): + + format = "DCX" + format_description = "Intel DCX" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # Header + s = self.fp.read(4) + if not _accept(s): + raise SyntaxError("not a DCX file") + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self.__fp = self.fp + self.frame = None + self.n_frames = len(self._offset) + self.is_animated = self.n_frames > 1 + self.seek(0) + + def seek(self, frame): + if not self._seek_check(frame): + return + self.frame = frame + self.fp = self.__fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self): + return self.frame + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/minor_project/lib/python3.6/site-packages/PIL/DdsImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/DdsImagePlugin.py new file mode 100644 index 0000000..df2d006 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/DdsImagePlugin.py @@ -0,0 +1,190 @@ +""" +A Pillow loader for .dds files (S3TC-compressed aka DXTC) +Jerome Leclanche + +Documentation: + https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ +""" + +import struct +from io import BytesIO + +from . import Image, ImageFile + +# Magic ("DDS ") +DDS_MAGIC = 0x20534444 + +# DDS flags +DDSD_CAPS = 0x1 +DDSD_HEIGHT = 0x2 +DDSD_WIDTH = 0x4 +DDSD_PITCH = 0x8 +DDSD_PIXELFORMAT = 0x1000 +DDSD_MIPMAPCOUNT = 0x20000 +DDSD_LINEARSIZE = 0x80000 +DDSD_DEPTH = 0x800000 + +# DDS caps +DDSCAPS_COMPLEX = 0x8 +DDSCAPS_TEXTURE = 0x1000 +DDSCAPS_MIPMAP = 0x400000 + +DDSCAPS2_CUBEMAP = 0x200 +DDSCAPS2_CUBEMAP_POSITIVEX = 0x400 +DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800 +DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000 +DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000 +DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000 +DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000 +DDSCAPS2_VOLUME = 0x200000 + +# Pixel Format +DDPF_ALPHAPIXELS = 0x1 +DDPF_ALPHA = 0x2 +DDPF_FOURCC = 0x4 +DDPF_PALETTEINDEXED8 = 0x20 +DDPF_RGB = 0x40 +DDPF_LUMINANCE = 0x20000 + + +# dds.h + +DDS_FOURCC = DDPF_FOURCC +DDS_RGB = DDPF_RGB +DDS_RGBA = DDPF_RGB | DDPF_ALPHAPIXELS +DDS_LUMINANCE = DDPF_LUMINANCE +DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS +DDS_ALPHA = DDPF_ALPHA +DDS_PAL8 = DDPF_PALETTEINDEXED8 + +DDS_HEADER_FLAGS_TEXTURE = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT +DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT +DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH +DDS_HEADER_FLAGS_PITCH = DDSD_PITCH +DDS_HEADER_FLAGS_LINEARSIZE = DDSD_LINEARSIZE + +DDS_HEIGHT = DDSD_HEIGHT +DDS_WIDTH = DDSD_WIDTH + +DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS_TEXTURE +DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS_COMPLEX | DDSCAPS_MIPMAP +DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS_COMPLEX + +DDS_CUBEMAP_POSITIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX +DDS_CUBEMAP_NEGATIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX +DDS_CUBEMAP_POSITIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY +DDS_CUBEMAP_NEGATIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY +DDS_CUBEMAP_POSITIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ +DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ + + +# DXT1 +DXT1_FOURCC = 0x31545844 + +# DXT3 +DXT3_FOURCC = 0x33545844 + +# DXT5 +DXT5_FOURCC = 0x35545844 + + +# dxgiformat.h + +DXGI_FORMAT_R8G8B8A8_TYPELESS = 27 +DXGI_FORMAT_R8G8B8A8_UNORM = 28 +DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = 29 +DXGI_FORMAT_BC7_TYPELESS = 97 +DXGI_FORMAT_BC7_UNORM = 98 +DXGI_FORMAT_BC7_UNORM_SRGB = 99 + + +class DdsImageFile(ImageFile.ImageFile): + format = "DDS" + format_description = "DirectDraw Surface" + + def _open(self): + magic, header_size = struct.unpack(" 0: + s = fp.read(min(lengthfile, 100 * 1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + # Build Ghostscript command + command = [ + "gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dBATCH", # exit after processing + "-dNOPAUSE", # don't pause between pages + "-dSAFER", # safe mode + "-sDEVICE=ppmraw", # ppm driver + f"-sOutputFile={outfile}", # output file + # adjust for image origin + "-c", + f"{-bbox[0]} {-bbox[1]} translate", + "-f", + infile, # input file + # showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272) + "-c", + "showpage", + ] + + if gs_windows_binary is not None: + if not gs_windows_binary: + raise OSError("Unable to locate Ghostscript on paths") + command[0] = gs_windows_binary + + # push data through Ghostscript + try: + startupinfo = None + if sys.platform.startswith("win"): + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.check_call(command, startupinfo=startupinfo) + out_im = Image.open(outfile) + out_im.load() + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + im = out_im.im.copy() + out_im.close() + return im + + +class PSFile: + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ + + def __init__(self, fp): + self.fp = fp + self.char = None + + def seek(self, offset, whence=io.SEEK_SET): + self.char = None + self.fp.seek(offset, whence) + + def readline(self): + s = self.char or b"" + self.char = None + + c = self.fp.read(1) + while c not in b"\r\n": + s = s + c + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return s.decode("latin-1") + + +def _accept(prefix): + return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + + +## +# Image plugin for Encapsulated PostScript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"} + + def _open(self): + (length, offset) = self._find_offset(self.fp) + + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + fp = PSFile(self.fp) + + # go to offset - start of "%!PS" + fp.seek(offset) + + box = None + + self.mode = "RGB" + self._size = 1, 1 # FIXME: huh? + + # + # Load EPS header + + s_raw = fp.readline() + s = s_raw.strip("\r\n") + + while s_raw: + if s: + if len(s) > 255: + raise SyntaxError("not an EPS file") + + try: + m = split.match(s) + except re.error as e: + raise SyntaxError("not an EPS file") from e + + if m: + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self._size = box[2] - box[0], box[3] - box[1] + self.tile = [ + ("eps", (0, 0) + self.size, offset, (length, box)) + ] + except Exception: + pass + + else: + m = field.match(s) + if m: + k = m.group(1) + + if k == "EndComments": + break + if k[:8] == "PS-Adobe": + self.info[k[:8]] = k[9:] + else: + self.info[k] = "" + elif s[0] == "%": + # handle non-DSC PostScript comments that some + # tools mistakenly put in the Comments section + pass + else: + raise OSError("bad EPS header") + + s_raw = fp.readline() + s = s_raw.strip("\r\n") + + if s and s[:1] != "%": + break + + # + # Scan for an "ImageData" descriptor + + while s[:1] == "%": + + if len(s) > 255: + raise SyntaxError("not an EPS file") + + if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] + + if int(bi) != 8: + break + try: + self.mode = self.mode_map[int(mo)] + except ValueError: + break + + self._size = int(x), int(y) + return + + s = fp.readline().strip("\r\n") + if not s: + break + + if not box: + raise OSError("cannot determine EPS bounding box") + + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, io.SEEK_END) + length = fp.tell() + offset = 0 + elif i32(s, 0) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s, 4) + length = i32(s, 8) + else: + raise SyntaxError("not an EPS file") + + return (length, offset) + + def load(self, scale=1): + # Load EPS via Ghostscript + if not self.tile: + return + self.im = Ghostscript(self.tile, self.size, self.fp, scale) + self.mode = self.im.mode + self._size = self.im.size + self.tile = [] + + def load_seek(self, *args, **kwargs): + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# +# -------------------------------------------------------------------- + + +def _save(im, fp, filename, eps=1): + """EPS Writer for the Python Imaging Library.""" + + # + # make sure image data is available + im.load() + + # + # determine PostScript image mode + if im.mode == "L": + operator = (8, 1, "image") + elif im.mode == "RGB": + operator = (8, 3, "false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, "false 4 colorimage") + else: + raise ValueError("image mode is not supported") + + base_fp = fp + wrapped_fp = False + if fp != sys.stdout: + fp = io.TextIOWrapper(fp, encoding="latin-1") + wrapped_fp = True + + try: + if eps: + # + # write EPS header + fp.write("%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write("%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write("%%Pages: 1\n") + fp.write("%%EndComments\n") + fp.write("%%Page: 1 1\n") + fp.write("%%ImageData: %d %d " % im.size) + fp.write('%d %d 0 1 1 "%s"\n' % operator) + + # + # image header + fp.write("gsave\n") + fp.write("10 dict begin\n") + fp.write(f"/buf {im.size[0] * operator[1]} string def\n") + fp.write("%d %d scale\n" % im.size) + fp.write("%d %d 8\n" % im.size) # <= bits + fp.write(f"[{im.size[0]} 0 0 -{im.size[1]} 0 {im.size[1]}]\n") + fp.write("{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + "\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, base_fp, [("eps", (0, 0) + im.size, 0, None)]) + + fp.write("\n%%%%EndBinary\n") + fp.write("grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + finally: + if wrapped_fp: + fp.detach() + + +# +# -------------------------------------------------------------------- + + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extensions(EpsImageFile.format, [".ps", ".eps"]) + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/minor_project/lib/python3.6/site-packages/PIL/ExifTags.py b/minor_project/lib/python3.6/site-packages/PIL/ExifTags.py new file mode 100644 index 0000000..f1c037e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ExifTags.py @@ -0,0 +1,318 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +""" +This module provides constants and clear-text names for various +well-known EXIF tags. +""" + + +TAGS = { + # possibly incomplete + 0x000B: "ProcessingSoftware", + 0x00FE: "NewSubfileType", + 0x00FF: "SubfileType", + 0x0100: "ImageWidth", + 0x0101: "ImageLength", + 0x0102: "BitsPerSample", + 0x0103: "Compression", + 0x0106: "PhotometricInterpretation", + 0x0107: "Thresholding", + 0x0108: "CellWidth", + 0x0109: "CellLength", + 0x010A: "FillOrder", + 0x010D: "DocumentName", + 0x010E: "ImageDescription", + 0x010F: "Make", + 0x0110: "Model", + 0x0111: "StripOffsets", + 0x0112: "Orientation", + 0x0115: "SamplesPerPixel", + 0x0116: "RowsPerStrip", + 0x0117: "StripByteCounts", + 0x0118: "MinSampleValue", + 0x0119: "MaxSampleValue", + 0x011A: "XResolution", + 0x011B: "YResolution", + 0x011C: "PlanarConfiguration", + 0x011D: "PageName", + 0x0120: "FreeOffsets", + 0x0121: "FreeByteCounts", + 0x0122: "GrayResponseUnit", + 0x0123: "GrayResponseCurve", + 0x0124: "T4Options", + 0x0125: "T6Options", + 0x0128: "ResolutionUnit", + 0x0129: "PageNumber", + 0x012D: "TransferFunction", + 0x0131: "Software", + 0x0132: "DateTime", + 0x013B: "Artist", + 0x013C: "HostComputer", + 0x013D: "Predictor", + 0x013E: "WhitePoint", + 0x013F: "PrimaryChromaticities", + 0x0140: "ColorMap", + 0x0141: "HalftoneHints", + 0x0142: "TileWidth", + 0x0143: "TileLength", + 0x0144: "TileOffsets", + 0x0145: "TileByteCounts", + 0x014A: "SubIFDs", + 0x014C: "InkSet", + 0x014D: "InkNames", + 0x014E: "NumberOfInks", + 0x0150: "DotRange", + 0x0151: "TargetPrinter", + 0x0152: "ExtraSamples", + 0x0153: "SampleFormat", + 0x0154: "SMinSampleValue", + 0x0155: "SMaxSampleValue", + 0x0156: "TransferRange", + 0x0157: "ClipPath", + 0x0158: "XClipPathUnits", + 0x0159: "YClipPathUnits", + 0x015A: "Indexed", + 0x015B: "JPEGTables", + 0x015F: "OPIProxy", + 0x0200: "JPEGProc", + 0x0201: "JpegIFOffset", + 0x0202: "JpegIFByteCount", + 0x0203: "JpegRestartInterval", + 0x0205: "JpegLosslessPredictors", + 0x0206: "JpegPointTransforms", + 0x0207: "JpegQTables", + 0x0208: "JpegDCTables", + 0x0209: "JpegACTables", + 0x0211: "YCbCrCoefficients", + 0x0212: "YCbCrSubSampling", + 0x0213: "YCbCrPositioning", + 0x0214: "ReferenceBlackWhite", + 0x02BC: "XMLPacket", + 0x1000: "RelatedImageFileFormat", + 0x1001: "RelatedImageWidth", + 0x1002: "RelatedImageLength", + 0x4746: "Rating", + 0x4749: "RatingPercent", + 0x800D: "ImageID", + 0x828D: "CFARepeatPatternDim", + 0x828E: "CFAPattern", + 0x828F: "BatteryLevel", + 0x8298: "Copyright", + 0x829A: "ExposureTime", + 0x829D: "FNumber", + 0x83BB: "IPTCNAA", + 0x8649: "ImageResources", + 0x8769: "ExifOffset", + 0x8773: "InterColorProfile", + 0x8822: "ExposureProgram", + 0x8824: "SpectralSensitivity", + 0x8825: "GPSInfo", + 0x8827: "ISOSpeedRatings", + 0x8828: "OECF", + 0x8829: "Interlace", + 0x882A: "TimeZoneOffset", + 0x882B: "SelfTimerMode", + 0x9000: "ExifVersion", + 0x9003: "DateTimeOriginal", + 0x9004: "DateTimeDigitized", + 0x9101: "ComponentsConfiguration", + 0x9102: "CompressedBitsPerPixel", + 0x9201: "ShutterSpeedValue", + 0x9202: "ApertureValue", + 0x9203: "BrightnessValue", + 0x9204: "ExposureBiasValue", + 0x9205: "MaxApertureValue", + 0x9206: "SubjectDistance", + 0x9207: "MeteringMode", + 0x9208: "LightSource", + 0x9209: "Flash", + 0x920A: "FocalLength", + 0x920B: "FlashEnergy", + 0x920C: "SpatialFrequencyResponse", + 0x920D: "Noise", + 0x9211: "ImageNumber", + 0x9212: "SecurityClassification", + 0x9213: "ImageHistory", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x9216: "TIFF/EPStandardID", + 0x927C: "MakerNote", + 0x9286: "UserComment", + 0x9290: "SubsecTime", + 0x9291: "SubsecTimeOriginal", + 0x9292: "SubsecTimeDigitized", + 0x9400: "AmbientTemperature", + 0x9401: "Humidity", + 0x9402: "Pressure", + 0x9403: "WaterDepth", + 0x9404: "Acceleration", + 0x9405: "CameraElevationAngle", + 0x9C9B: "XPTitle", + 0x9C9C: "XPComment", + 0x9C9D: "XPAuthor", + 0x9C9E: "XPKeywords", + 0x9C9F: "XPSubject", + 0xA000: "FlashPixVersion", + 0xA001: "ColorSpace", + 0xA002: "ExifImageWidth", + 0xA003: "ExifImageHeight", + 0xA004: "RelatedSoundFile", + 0xA005: "ExifInteroperabilityOffset", + 0xA20B: "FlashEnergy", + 0xA20C: "SpatialFrequencyResponse", + 0xA20E: "FocalPlaneXResolution", + 0xA20F: "FocalPlaneYResolution", + 0xA210: "FocalPlaneResolutionUnit", + 0xA214: "SubjectLocation", + 0xA215: "ExposureIndex", + 0xA217: "SensingMethod", + 0xA300: "FileSource", + 0xA301: "SceneType", + 0xA302: "CFAPattern", + 0xA401: "CustomRendered", + 0xA402: "ExposureMode", + 0xA403: "WhiteBalance", + 0xA404: "DigitalZoomRatio", + 0xA405: "FocalLengthIn35mmFilm", + 0xA406: "SceneCaptureType", + 0xA407: "GainControl", + 0xA408: "Contrast", + 0xA409: "Saturation", + 0xA40A: "Sharpness", + 0xA40B: "DeviceSettingDescription", + 0xA40C: "SubjectDistanceRange", + 0xA420: "ImageUniqueID", + 0xA430: "CameraOwnerName", + 0xA431: "BodySerialNumber", + 0xA432: "LensSpecification", + 0xA433: "LensMake", + 0xA434: "LensModel", + 0xA435: "LensSerialNumber", + 0xA500: "Gamma", + 0xC4A5: "PrintImageMatching", + 0xC612: "DNGVersion", + 0xC613: "DNGBackwardVersion", + 0xC614: "UniqueCameraModel", + 0xC615: "LocalizedCameraModel", + 0xC616: "CFAPlaneColor", + 0xC617: "CFALayout", + 0xC618: "LinearizationTable", + 0xC619: "BlackLevelRepeatDim", + 0xC61A: "BlackLevel", + 0xC61B: "BlackLevelDeltaH", + 0xC61C: "BlackLevelDeltaV", + 0xC61D: "WhiteLevel", + 0xC61E: "DefaultScale", + 0xC61F: "DefaultCropOrigin", + 0xC620: "DefaultCropSize", + 0xC621: "ColorMatrix1", + 0xC622: "ColorMatrix2", + 0xC623: "CameraCalibration1", + 0xC624: "CameraCalibration2", + 0xC625: "ReductionMatrix1", + 0xC626: "ReductionMatrix2", + 0xC627: "AnalogBalance", + 0xC628: "AsShotNeutral", + 0xC629: "AsShotWhiteXY", + 0xC62A: "BaselineExposure", + 0xC62B: "BaselineNoise", + 0xC62C: "BaselineSharpness", + 0xC62D: "BayerGreenSplit", + 0xC62E: "LinearResponseLimit", + 0xC62F: "CameraSerialNumber", + 0xC630: "LensInfo", + 0xC631: "ChromaBlurRadius", + 0xC632: "AntiAliasStrength", + 0xC633: "ShadowScale", + 0xC634: "DNGPrivateData", + 0xC635: "MakerNoteSafety", + 0xC65A: "CalibrationIlluminant1", + 0xC65B: "CalibrationIlluminant2", + 0xC65C: "BestQualityScale", + 0xC65D: "RawDataUniqueID", + 0xC68B: "OriginalRawFileName", + 0xC68C: "OriginalRawFileData", + 0xC68D: "ActiveArea", + 0xC68E: "MaskedAreas", + 0xC68F: "AsShotICCProfile", + 0xC690: "AsShotPreProfileMatrix", + 0xC691: "CurrentICCProfile", + 0xC692: "CurrentPreProfileMatrix", + 0xC6BF: "ColorimetricReference", + 0xC6F3: "CameraCalibrationSignature", + 0xC6F4: "ProfileCalibrationSignature", + 0xC6F6: "AsShotProfileName", + 0xC6F7: "NoiseReductionApplied", + 0xC6F8: "ProfileName", + 0xC6F9: "ProfileHueSatMapDims", + 0xC6FA: "ProfileHueSatMapData1", + 0xC6FB: "ProfileHueSatMapData2", + 0xC6FC: "ProfileToneCurve", + 0xC6FD: "ProfileEmbedPolicy", + 0xC6FE: "ProfileCopyright", + 0xC714: "ForwardMatrix1", + 0xC715: "ForwardMatrix2", + 0xC716: "PreviewApplicationName", + 0xC717: "PreviewApplicationVersion", + 0xC718: "PreviewSettingsName", + 0xC719: "PreviewSettingsDigest", + 0xC71A: "PreviewColorSpace", + 0xC71B: "PreviewDateTime", + 0xC71C: "RawImageDigest", + 0xC71D: "OriginalRawFileDigest", + 0xC71E: "SubTileBlockSize", + 0xC71F: "RowInterleaveFactor", + 0xC725: "ProfileLookTableDims", + 0xC726: "ProfileLookTableData", + 0xC740: "OpcodeList1", + 0xC741: "OpcodeList2", + 0xC74E: "OpcodeList3", + 0xC761: "NoiseProfile", +} +"""Maps EXIF tags to tag names.""" + + +GPSTAGS = { + 0: "GPSVersionID", + 1: "GPSLatitudeRef", + 2: "GPSLatitude", + 3: "GPSLongitudeRef", + 4: "GPSLongitude", + 5: "GPSAltitudeRef", + 6: "GPSAltitude", + 7: "GPSTimeStamp", + 8: "GPSSatellites", + 9: "GPSStatus", + 10: "GPSMeasureMode", + 11: "GPSDOP", + 12: "GPSSpeedRef", + 13: "GPSSpeed", + 14: "GPSTrackRef", + 15: "GPSTrack", + 16: "GPSImgDirectionRef", + 17: "GPSImgDirection", + 18: "GPSMapDatum", + 19: "GPSDestLatitudeRef", + 20: "GPSDestLatitude", + 21: "GPSDestLongitudeRef", + 22: "GPSDestLongitude", + 23: "GPSDestBearingRef", + 24: "GPSDestBearing", + 25: "GPSDestDistanceRef", + 26: "GPSDestDistance", + 27: "GPSProcessingMethod", + 28: "GPSAreaInformation", + 29: "GPSDateStamp", + 30: "GPSDifferential", + 31: "GPSHPositioningError", +} +"""Maps EXIF GPS tags to tag names.""" diff --git a/minor_project/lib/python3.6/site-packages/PIL/FitsStubImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/FitsStubImagePlugin.py new file mode 100644 index 0000000..c2ce865 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/FitsStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS stub adapter +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific FITS image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:6] == b"SIMPLE" + + +class FITSStubImageFile(ImageFile.StubImageFile): + + format = "FITS" + format_description = "FITS" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(6)): + raise SyntaxError("Not a FITS file") + + # FIXME: add more sanity checks here; mandatory header items + # include SIMPLE, BITPIX, NAXIS, etc. + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise OSError("FITS save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept) +Image.register_save(FITSStubImageFile.format, _save) + +Image.register_extensions(FITSStubImageFile.format, [".fit", ".fits"]) diff --git a/minor_project/lib/python3.6/site-packages/PIL/FliImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/FliImagePlugin.py new file mode 100644 index 0000000..f2d4857 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/FliImagePlugin.py @@ -0,0 +1,171 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import i32le as i32 +from ._binary import o8 + +# +# decoder + + +def _accept(prefix): + return len(prefix) >= 6 and i16(prefix, 4) in [0xAF11, 0xAF12] + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + + +class FliImageFile(ImageFile.ImageFile): + + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # HEAD + s = self.fp.read(128) + if not ( + _accept(s) + and i16(s, 14) in [0, 3] # flags + and s[20:22] == b"\x00\x00" # reserved + ): + raise SyntaxError("not an FLI/FLC file") + + # frames + self.n_frames = i16(s, 6) + self.is_animated = self.n_frames > 1 + + # image characteristics + self.mode = "P" + self._size = i16(s, 8), i16(s, 10) + + # animation speed + duration = i32(s, 16) + magic = i16(s, 4) + if magic == 0xAF11: + duration = (duration * 1000) // 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s, 4) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + s = self.fp.read(16) + + if i16(s, 4) == 0xF1FA: + # look for palette chunk + s = self.fp.read(6) + if i16(s, 4) == 11: + self._palette(palette, 2) + elif i16(s, 4) == 4: + self._palette(palette, 0) + + palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self.__fp = self.fp + self.__rewind = self.fp.tell() + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + s[0] + n = s[1] + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = s[n] << shift + g = s[n + 1] << shift + b = s[n + 2] << shift + palette[i] = (r, g, b) + i += 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0) + + for f in range(self.__frame + 1, frame + 1): + self._seek(f) + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self.__fp.seek(self.__rewind) + self.__offset = 128 + else: + # ensure that the previous frame was loaded + self.load() + + if frame != self.__frame + 1: + raise ValueError(f"cannot seek to frame {frame}") + self.__frame = frame + + # move to next frame + self.fp = self.__fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + raise EOFError + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0) + self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self): + return self.__frame + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extensions(FliImageFile.format, [".fli", ".flc"]) diff --git a/minor_project/lib/python3.6/site-packages/PIL/FontFile.py b/minor_project/lib/python3.6/site-packages/PIL/FontFile.py new file mode 100644 index 0000000..c5fc80b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/FontFile.py @@ -0,0 +1,111 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +import os + +from . import Image, _binary + +WIDTH = 800 + + +def puti16(fp, values): + """Write network order (big-endian) 16-bit sequence""" + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +class FontFile: + """Base class for raster font file handlers.""" + + bitmap = None + + def __init__(self): + + self.info = {} + self.glyph = [None] * 256 + + def __getitem__(self, ix): + return self.glyph[ix] + + def compile(self): + """Create metrics and bitmap""" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = src[2] - src[0] + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return "" + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + # yy = src[3] - src[1] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + self.metrics[i] = d, dst, s + + def save(self, filename): + """Save font""" + + self.compile() + + # font data + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp: + fp.write(b"PILfont\n") + fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, [0] * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) diff --git a/minor_project/lib/python3.6/site-packages/PIL/FpxImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/FpxImagePlugin.py new file mode 100644 index 0000000..5e38546 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/FpxImagePlugin.py @@ -0,0 +1,242 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# +import olefile + +from . import Image, ImageFile +from ._binary import i32le as i32 + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007FFE): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017FFE): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for the FlashPix images. + + +class FpxImageFile(ImageFile.ImageFile): + + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = olefile.OleFileIO(self.fp) + except OSError as e: + raise SyntaxError("not an FPX file; invalid OLE file") from e + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + raise SyntaxError("not an FPX file; bad root CLSID") + + self._open_index(1) + + def _open_index(self, index=1): + # + # get the Image Contents Property Set + + prop = self.ole.getproperties( + [f"Data Object Store {index:06d}", "\005Image Contents"] + ) + + # size (highest resolution) + + self._size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size / 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + colors = [] + bands = i32(s, 4) + if bands > 4: + raise OSError("Invalid number of bands") + for i in range(bands): + # note: for now, we ignore the "uncalibrated" flag + colors.append(i32(s, 8 + i * 4) & 0x7FFFFFFF) + + self.mode, self.rawmode = MODES[tuple(colors)] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index=1, subimage=0): + # + # setup tile descriptors for a given subimage + + stream = [ + f"Data Object Store {index:06d}", + f"Resolution {subimage:04d}", + "Subimage 0000 Header", + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + if size != self.size: + raise OSError("subimage mismatch") + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + + compression = i32(s, i + 8) + + if compression == 0: + self.tile.append( + ( + "raw", + (x, y, x + xtile, y + ytile), + i32(s, i) + 28, + (self.rawmode), + ) + ) + + elif compression == 1: + + # FIXME: the fill decoder is not implemented + self.tile.append( + ( + "fill", + (x, y, x + xtile, y + ytile), + i32(s, i) + 28, + (self.rawmode, s[12:16]), + ) + ) + + elif compression == 2: + + internal_color_conversion = s[14] + jpeg_tables = s[15] + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append( + ( + "jpeg", + (x, y, x + xtile, y + ytile), + i32(s, i) + 28, + (rawmode, jpegmode), + ) + ) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + raise OSError("unknown/invalid compression") + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self.fp = None + + def load(self): + + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"]) + + return ImageFile.ImageFile.load(self) + + +# +# -------------------------------------------------------------------- + + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/minor_project/lib/python3.6/site-packages/PIL/FtexImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/FtexImagePlugin.py new file mode 100644 index 0000000..9006612 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/FtexImagePlugin.py @@ -0,0 +1,106 @@ +""" +A Pillow loader for .ftc and .ftu files (FTEX) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001 + +The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a +packed custom format called FTEX. This file format uses file extensions FTC +and FTU. +* FTC files are compressed textures (using standard texture compression). +* FTU files are not compressed. +Texture File Format +The FTC and FTU texture files both use the same format. This +has the following structure: +{header} +{format_directory} +{data} +Where: +{header} = { + u32:magic, + u32:version, + u32:width, + u32:height, + u32:mipmap_count, + u32:format_count +} + +* The "magic" number is "FTEX". +* "width" and "height" are the dimensions of the texture. +* "mipmap_count" is the number of mipmaps in the texture. +* "format_count" is the number of texture formats (different versions of the +same texture) in this file. + +{format_directory} = format_count * { u32:format, u32:where } + +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB +uncompressed textures. +The texture data for a format starts at the position "where" in the file. + +Each set of texture data in the file has the following structure: +{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } } +* "mipmap_size" is the number of bytes in that mip level. For compressed +textures this is the size of the texture data compressed with DXT1. For 24 bit +uncompressed textures, this is 3 * width * height. Following this are the image +bytes for that mipmap level. + +Note: All data is stored in little-Endian (Intel) byte order. +""" + +import struct +from io import BytesIO + +from . import Image, ImageFile + +MAGIC = b"FTEX" +FORMAT_DXT1 = 0 +FORMAT_UNCOMPRESSED = 1 + + +class FtexImageFile(ImageFile.ImageFile): + format = "FTEX" + format_description = "Texture File Format (IW2:EOC)" + + def _open(self): + struct.unpack("= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2) + + +## +# Image plugin for the GIMP brush format. + + +class GbrImageFile(ImageFile.ImageFile): + + format = "GBR" + format_description = "GIMP brush file" + + def _open(self): + header_size = i32(self.fp.read(4)) + version = i32(self.fp.read(4)) + if header_size < 20: + raise SyntaxError("not a GIMP brush") + if version not in (1, 2): + raise SyntaxError(f"Unsupported GIMP brush version: {version}") + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0: + raise SyntaxError("not a GIMP brush") + if color_depth not in (1, 4): + raise SyntaxError(f"Unsupported GIMP brush color depth: {color_depth}") + + if version == 1: + comment_length = header_size - 20 + else: + comment_length = header_size - 28 + magic_number = self.fp.read(4) + if magic_number != b"GIMP": + raise SyntaxError("not a GIMP brush, bad magic number") + self.info["spacing"] = i32(self.fp.read(4)) + + comment = self.fp.read(comment_length)[:-1] + + if color_depth == 1: + self.mode = "L" + else: + self.mode = "RGBA" + + self._size = width, height + + self.info["comment"] = comment + + # Image might not be small + Image._decompression_bomb_check(self.size) + + # Data is an uncompressed block of w * h * bytes/pixel + self._data_size = width * height * color_depth + + def load(self): + if self.im: + # Already loaded + return + + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self._data_size)) + + +# +# registry + + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/minor_project/lib/python3.6/site-packages/PIL/GdImageFile.py b/minor_project/lib/python3.6/site-packages/PIL/GdImageFile.py new file mode 100644 index 0000000..9c34ada --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/GdImageFile.py @@ -0,0 +1,90 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +""" +.. note:: + This format cannot be automatically recognized, so the + class is not registered for use with :py:func:`PIL.Image.open()`. To open a + gd file, use the :py:func:`PIL.GdImageFile.open()` function instead. + +.. warning:: + THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This + implementation is provided for convenience and demonstrational + purposes only. +""" + + +from . import ImageFile, ImagePalette, UnidentifiedImageError +from ._binary import i16be as i16 +from ._binary import i32be as i32 + + +class GdImageFile(ImageFile.ImageFile): + """ + Image plugin for the GD uncompressed format. Note that this format + is not supported by the standard :py:func:`PIL.Image.open()` function. To use + this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and + use the :py:func:`PIL.GdImageFile.open()` function. + """ + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self): + + # Header + s = self.fp.read(1037) + + if not i16(s) in [65534, 65535]: + raise SyntaxError("Not a valid GD 2.x .gd file") + + self.mode = "L" # FIXME: "P" + self._size = i16(s, 2), i16(s, 4) + + trueColor = s[6] + trueColorOffset = 2 if trueColor else 0 + + # transparency index + tindex = i32(s, 7 + trueColorOffset) + if tindex < 256: + self.info["transparency"] = tindex + + self.palette = ImagePalette.raw( + "XBGR", s[7 + trueColorOffset + 4 : 7 + trueColorOffset + 4 + 256 * 4] + ) + + self.tile = [ + ("raw", (0, 0) + self.size, 7 + trueColorOffset + 4 + 256 * 4, ("L", 0, 1)) + ] + + +def open(fp, mode="r"): + """ + Load texture from a GD image file. + + :param filename: GD file name, or an opened file handle. + :param mode: Optional mode. In this version, if the mode argument + is given, it must be "r". + :returns: An image instance. + :raises OSError: If the image could not be read. + """ + if mode != "r": + raise ValueError("bad mode") + + try: + return GdImageFile(fp) + except SyntaxError as e: + raise UnidentifiedImageError("cannot identify this image file") from e diff --git a/minor_project/lib/python3.6/site-packages/PIL/GifImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/GifImagePlugin.py new file mode 100644 index 0000000..7c083bd --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/GifImagePlugin.py @@ -0,0 +1,888 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import itertools +import math +import os +import subprocess + +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +# -------------------------------------------------------------------- +# Identify/read GIF files + + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + _close_exclusive_fp_after_loading = False + + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and s[0]: + return self.fp.read(s[0]) + return None + + def _open(self): + + # Screen + s = self.fp.read(13) + if not _accept(s): + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self._size = i16(s, 6), i16(s, 8) + self.tile = [] + flags = s[10] + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = s[11] + # check if palette contains colour indices + p = self.fp.read(3 << bits) + for i in range(0, len(p), 3): + if not (i // 3 == p[i] == p[i + 1] == p[i + 2]): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + break + + self.__fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + if self._n_frames is not None: + self._is_animated = self._n_frames != 1 + else: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + if frame != 0: + self.im = None + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError as e: + self.seek(last_frame) + raise EOFError("no more images in GIF file") from e + + def _seek(self, frame): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1 + self.__frame = -1 + self.__fp.seek(self.__rewind) + self._prev_im = None + self.disposal_method = 0 + else: + # ensure that the previous frame was loaded + if not self.im: + self.load() + + if frame != self.__frame + 1: + raise ValueError(f"cannot seek to frame {frame}") + self.__frame = frame + + self.tile = [] + + self.fp = self.__fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + from copy import copy + + self.palette = copy(self.global_palette) + + info = {} + while True: + + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if s[0] == 249: + # + # graphic control extension + # + flags = block[0] + if flags & 1: + info["transparency"] = block[3] + info["duration"] = i16(block, 1) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif s[0] == 254: + # + # comment extension + # + while block: + if "comment" in info: + info["comment"] += block + else: + info["comment"] = block + block = self.data() + continue + elif s[0] == 255: + # + # application extension + # + info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and block[0] == 1: + info["loop"] = i16(block, 1) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s, 0), i16(s, 2) + x1, y1 = x0 + i16(s, 4), y0 + i16(s, 6) + if x1 > self.size[0] or y1 > self.size[1]: + self._size = max(x1, self.size[0]), max(y1, self.size[1]) + self.dispose_extent = x0, y0, x1, y1 + flags = s[8] + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + self.palette = ImagePalette.raw("RGB", self.fp.read(3 << bits)) + + # image data + bits = self.fp.read(1)[0] + self.__offset = self.fp.tell() + self.tile = [ + ("gif", (x0, y0, x1, y1), self.__offset, (bits, interlace)) + ] + break + + else: + pass + # raise OSError, "illegal GIF tag `%x`" % s[0] + + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + Image._decompression_bomb_check(self.size) + self.dispose = Image.core.fill("P", self.size, self.info["background"]) + else: + # replace with previous contents + if self.im: + self.dispose = self.im.copy() + + # only dispose the extent in this frame + if self.dispose: + self.dispose = self._crop(self.dispose, self.dispose_extent) + except (AttributeError, KeyError): + pass + + if not self.tile: + # self.__fp = None + raise EOFError + + for k in ["transparency", "duration", "comment", "extension", "loop"]: + if k in info: + self.info[k] = info[k] + elif k in self.info: + del self.info[k] + + self.mode = "L" + if self.palette: + self.mode = "P" + + def tell(self): + return self.__frame + + def load_end(self): + ImageFile.ImageFile.load_end(self) + + # if the disposal method is 'do not dispose', transparent + # pixels should show the content of the previous frame + if self._prev_im and self._prev_disposal_method == 1: + # we do this by pasting the updated area onto the previous + # frame which we then use as the current image content + updated = self._crop(self.im, self.dispose_extent) + self._prev_im.paste(updated, self.dispose_extent, updated.convert("RGBA")) + self.im = self._prev_im + self._prev_im = self.im.copy() + self._prev_disposal_method = self.disposal_method + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# -------------------------------------------------------------------- +# Write GIF files + + +RAWMODE = {"1": "L", "L": "L", "P": "P"} + + +def _normalize_mode(im, initial_call=False): + """ + Takes an image (or frame), returns an image in a mode that is appropriate + for saving in a Gif. + + It may return the original image, or it may return an image converted to + palette or 'L' mode. + + UNDONE: What is the point of mucking with the initial call palette, for + an image that shouldn't have a palette, or it would be a mode 'P' and + get returned in the RAWMODE clause. + + :param im: Image object + :param initial_call: Default false, set to true for a single frame. + :returns: Image object + """ + if im.mode in RAWMODE: + im.load() + return im + if Image.getmodebase(im.mode) == "RGB": + if initial_call: + palette_size = 256 + if im.palette: + palette_size = len(im.palette.getdata()[1]) // 3 + return im.convert("P", palette=Image.ADAPTIVE, colors=palette_size) + else: + return im.convert("P") + return im.convert("L") + + +def _normalize_palette(im, palette, info): + """ + Normalizes the palette for image. + - Sets the palette to the incoming palette, if provided. + - Ensures that there's a palette for L mode images + - Optimizes the palette if necessary/desired. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: Image object + """ + source_palette = None + if palette: + # a bytes palette + if isinstance(palette, (bytes, bytearray, list)): + source_palette = bytearray(palette[:768]) + if isinstance(palette, ImagePalette.ImagePalette): + source_palette = bytearray( + itertools.chain.from_iterable( + zip( + palette.palette[:256], + palette.palette[256:512], + palette.palette[512:768], + ) + ) + ) + + if im.mode == "P": + if not source_palette: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if not source_palette: + source_palette = bytearray(i // 3 for i in range(768)) + im.palette = ImagePalette.ImagePalette("RGB", palette=source_palette) + + used_palette_colors = _get_optimize(im, info) + if used_palette_colors is not None: + return im.remap_palette(used_palette_colors, source_palette) + + im.palette.palette = source_palette + return im + + +def _write_single_frame(im, fp, palette): + im_out = _normalize_mode(im, True) + for k, v in im_out.info.items(): + im.encoderinfo.setdefault(k, v) + im_out = _normalize_palette(im_out, palette, im.encoderinfo) + + for s in _get_global_header(im_out, im.encoderinfo): + fp.write(s) + + # local image header + flags = 0 + if get_interlace(im): + flags = flags | 64 + _write_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0) + im.size, 0, RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + +def _write_multiple_frames(im, fp, palette): + + duration = im.encoderinfo.get("duration", im.info.get("duration")) + disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) + + im_frames = [] + frame_count = 0 + background_im = None + for imSequence in itertools.chain([im], im.encoderinfo.get("append_images", [])): + for im_frame in ImageSequence.Iterator(imSequence): + # a copy is required here since seek can still mutate the image + im_frame = _normalize_mode(im_frame.copy()) + if frame_count == 0: + for k, v in im_frame.info.items(): + im.encoderinfo.setdefault(k, v) + im_frame = _normalize_palette(im_frame, palette, im.encoderinfo) + + encoderinfo = im.encoderinfo.copy() + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + frame_count += 1 + + if im_frames: + # delta frame + previous = im_frames[-1] + if encoderinfo.get("disposal") == 2: + if background_im is None: + background = _get_background( + im, + im.encoderinfo.get("background", im.info.get("background")), + ) + background_im = Image.new("P", im_frame.size, background) + background_im.putpalette(im_frames[0]["im"].palette) + base_im = background_im + else: + base_im = previous["im"] + if _get_palette_bytes(im_frame) == _get_palette_bytes(base_im): + delta = ImageChops.subtract_modulo(im_frame, base_im) + else: + delta = ImageChops.subtract_modulo( + im_frame.convert("RGB"), base_im.convert("RGB") + ) + bbox = delta.getbbox() + if not bbox: + # This frame is identical to the previous frame + if duration: + previous["encoderinfo"]["duration"] += encoderinfo["duration"] + continue + else: + bbox = None + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) + + if len(im_frames) > 1: + for frame_data in im_frames: + im_frame = frame_data["im"] + if not frame_data["bbox"]: + # global header + for s in _get_global_header(im_frame, frame_data["encoderinfo"]): + fp.write(s) + offset = (0, 0) + else: + # compress difference + frame_data["encoderinfo"]["include_color_table"] = True + + im_frame = im_frame.crop(frame_data["bbox"]) + offset = frame_data["bbox"][:2] + _write_frame_data(fp, im_frame, offset, frame_data["encoderinfo"]) + return True + elif "duration" in im.encoderinfo and isinstance( + im.encoderinfo["duration"], (list, tuple) + ): + # Since multiple frames will not be written, add together the frame durations + im.encoderinfo["duration"] = sum(im.encoderinfo["duration"]) + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + # header + if "palette" in im.encoderinfo or "palette" in im.info: + palette = im.encoderinfo.get("palette", im.info.get("palette")) + else: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if not save_all or not _write_multiple_frames(im, fp, palette): + _write_single_frame(im, fp, palette) + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im): + interlace = im.encoderinfo.get("interlace", 1) + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _write_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + pass + else: + transparency = int(transparency) + # optimize the block away if transparent color is not used + transparent_color_exists = True + + used_palette_colors = _get_optimize(im, im.encoderinfo) + if used_palette_colors is not None: + # adjust the transparency index after optimize + try: + transparency = used_palette_colors.index(transparency) + except ValueError: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + + disposal = int(im.encoderinfo.get("disposal", 0)) + + if transparent_color_exists or duration != 0 or disposal: + packed_flag = 1 if transparent_color_exists else 0 + packed_flag |= disposal << 2 + if not transparent_color_exists: + transparency = 0 + + fp.write( + b"!" + + o8(249) # extension intro + + o8(4) # length + + o8(packed_flag) # packed fields + + o16(duration) # duration + + o8(transparency) # transparency index + + o8(0) + ) + + if "comment" in im.encoderinfo and 1 <= len(im.encoderinfo["comment"]): + fp.write(b"!" + o8(254)) # extension intro + comment = im.encoderinfo["comment"] + if isinstance(comment, str): + comment = comment.encode() + for i in range(0, len(comment), 255): + subblock = comment[i : i + 255] + fp.write(o8(len(subblock)) + subblock) + fp.write(o8(0)) + if "loop" in im.encoderinfo: + number_of_loops = im.encoderinfo["loop"] + fp.write( + b"!" + + o8(255) # extension intro + + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) # number of loops + + o8(0) + ) + include_color_table = im.encoderinfo.get("include_color_table") + if include_color_table: + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + if color_table_size: + flags = flags | 128 # local color table flag + flags = flags | color_table_size + + fp.write( + b"," + + o16(offset[0]) # offset + + o16(offset[1]) + + o16(im.size[0]) # size + + o16(im.size[1]) + + o8(flags) # flags + ) + if include_color_table and color_table_size: + fp.write(_get_header_palette(palette_bytes)) + fp.write(o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # Unused by default. + # To use, uncomment the register_save call at the end of the file. + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + tempfile = im._dump() + + try: + with open(filename, "wb") as f: + if im.mode != "RGB": + subprocess.check_call( + ["ppmtogif", tempfile], stdout=f, stderr=subprocess.DEVNULL + ) + else: + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (tempfile, filename) + quant_cmd = ["ppmquant", "256", tempfile] + togif_cmd = ["ppmtogif"] + quant_proc = subprocess.Popen( + quant_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + togif_proc = subprocess.Popen( + togif_cmd, + stdin=quant_proc.stdout, + stdout=f, + stderr=subprocess.DEVNULL, + ) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise subprocess.CalledProcessError(retcode, togif_cmd) + finally: + try: + os.unlink(tempfile) + except OSError: + pass + + +# Force optimization so that we can test performance against +# cases where it took lots of memory and time previously. +_FORCE_OPTIMIZE = False + + +def _get_optimize(im, info): + """ + Palette optimization is a potentially expensive operation. + + This function determines if the palette should be optimized using + some heuristics, then returns the list of palette entries in use. + + :param im: Image object + :param info: encoderinfo + :returns: list of indexes of palette entries in use, or None + """ + if im.mode in ("P", "L") and info and info.get("optimize", 0): + # Potentially expensive operation. + + # The palette saves 3 bytes per color not used, but palette + # lengths are restricted to 3*(2**N) bytes. Max saving would + # be 768 -> 6 bytes if we went all the way down to 2 colors. + # * If we're over 128 colors, we can't save any space. + # * If there aren't any holes, it's not worth collapsing. + # * If we have a 'large' image, the palette is in the noise. + + # create the new palette if not every color is used + optimise = _FORCE_OPTIMIZE or im.mode == "L" + if optimise or im.width * im.height < 512 * 512: + # check which colors are used + used_palette_colors = [] + for i, count in enumerate(im.histogram()): + if count: + used_palette_colors.append(i) + + if optimise or ( + len(used_palette_colors) <= 128 + and max(used_palette_colors) > len(used_palette_colors) + ): + return used_palette_colors + + +def _get_color_table_size(palette_bytes): + # calculate the palette size for the header + if not palette_bytes: + return 0 + elif len(palette_bytes) < 9: + return 1 + else: + return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1 + + +def _get_header_palette(palette_bytes): + """ + Returns the palette, null padded to the next power of 2 (*3) bytes + suitable for direct inclusion in the GIF header + + :param palette_bytes: Unpadded palette bytes, in RGBRGB form + :returns: Null padded palette + """ + color_table_size = _get_color_table_size(palette_bytes) + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + return palette_bytes + + +def _get_palette_bytes(im): + """ + Gets the palette for inclusion in the gif header + + :param im: Image object + :returns: Bytes, len<=768 suitable for inclusion in gif header + """ + return im.palette.palette + + +def _get_background(im, infoBackground): + background = 0 + if infoBackground: + background = infoBackground + if isinstance(background, tuple): + # WebPImagePlugin stores an RGBA value in info["background"] + # So it must be converted to the same format as GifImagePlugin's + # info["background"] - a global color table index + background = im.palette.getcolor(background) + return background + + +def _get_global_header(im, info): + """Return a list of strings representing a GIF header""" + + # Header Block + # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + for extensionKey in ["transparency", "duration", "loop", "comment"]: + if info and extensionKey in info: + if (extensionKey == "duration" and info[extensionKey] == 0) or ( + extensionKey == "comment" and not (1 <= len(info[extensionKey]) <= 255) + ): + continue + version = b"89a" + break + else: + if im.info.get("version") == b"89a": + version = b"89a" + + background = _get_background(im, info.get("background")) + + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + + return [ + b"GIF" # signature + + version # version + + o16(im.size[0]) # canvas width + + o16(im.size[1]), # canvas height + # Logical Screen Descriptor + # size of global color table + global color table flag + o8(color_table_size + 128), # packed fields + # background + reserved/aspect + o8(background) + o8(0), + # Global Color Table + _get_header_palette(palette_bytes), + ] + + +def _write_frame_data(fp, im_frame, offset, params): + try: + im_frame.encoderinfo = params + + # local image header + _write_local_header(fp, im_frame, offset, 0) + + ImageFile._save( + im_frame, fp, [("gif", (0, 0) + im_frame.size, 0, RAWMODE[im_frame.mode])] + ) + + fp.write(b"\0") # end of image data + finally: + del im_frame.encoderinfo + + +# -------------------------------------------------------------------- +# Legacy GIF utilities + + +def getheader(im, palette=None, info=None): + """ + Legacy Method to get Gif data from image. + + Warning:: May modify image data. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: tuple of(list of header items, optimized palette) + + """ + used_palette_colors = _get_optimize(im, info) + + if info is None: + info = {} + + if "background" not in info and "background" in im.info: + info["background"] = im.info["background"] + + im_mod = _normalize_palette(im, palette, info) + im.palette = im_mod.palette + im.im = im_mod.im + header = _get_global_header(im, info) + + return header, used_palette_colors + + +# To specify duration, add the time in milliseconds to getdata(), +# e.g. getdata(im_frame, duration=1000) +def getdata(im, offset=(0, 0), **params): + """ + Legacy Method + + Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data. + + :param im: Image object + :param offset: Tuple of (x, y) pixels. Defaults to (0,0) + :param \\**params: E.g. duration or other encoder info parameters + :returns: List of Bytes containing gif encoded frame data + + """ + + class Collector: + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + _write_frame_data(fp, im, offset, params) + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/minor_project/lib/python3.6/site-packages/PIL/GimpGradientFile.py b/minor_project/lib/python3.6/site-packages/PIL/GimpGradientFile.py new file mode 100644 index 0000000..7ab7f99 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/GimpGradientFile.py @@ -0,0 +1,140 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +""" +Stuff to translate curve segments to palette values (derived from +the corresponding code in GIMP, written by Federico Mena Quintero. +See the GIMP distribution for more information.) +""" + + +from math import log, pi, sin, sqrt + +from ._binary import o8 + +EPSILON = 1e-10 +"""""" # Enable auto-doc for data member + + +def linear(middle, pos): + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle, pos): + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle, pos): + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle, pos): + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle, pos): + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] +"""""" # Enable auto-doc for data member + + +class GradientFile: + + gradient = None + + def getpalette(self, entries=256): + + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + + x = i / (entries - 1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +class GimpGradientFile(GradientFile): + """File handler for GIMP's gradient format.""" + + def __init__(self, fp): + + if fp.readline()[:13] != b"GIMP Gradient": + raise SyntaxError("not a GIMP gradient file") + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + gradient = [] + + for i in range(count): + + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + raise OSError("cannot handle HSV colour space") + + gradient.append((x0, x1, xm, rgb0, rgb1, segment)) + + self.gradient = gradient diff --git a/minor_project/lib/python3.6/site-packages/PIL/GimpPaletteFile.py b/minor_project/lib/python3.6/site-packages/PIL/GimpPaletteFile.py new file mode 100644 index 0000000..10fd3ad --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/GimpPaletteFile.py @@ -0,0 +1,56 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# + +import re + +from ._binary import o8 + + +class GimpPaletteFile: + """File handler for GIMP's palette format.""" + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [o8(i) * 3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + raise SyntaxError("not a GIMP palette file") + + for i in range(256): + + s = fp.readline() + if not s: + break + + # skip fields and comment lines + if re.match(br"\w+:|#", s): + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + raise ValueError("bad palette entry") + + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/minor_project/lib/python3.6/site-packages/PIL/GribStubImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000..b9bdd16 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/GribStubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific GRIB image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[0:4] == b"GRIB" and prefix[7] == 1 + + +class GribStubImageFile(ImageFile.StubImageFile): + + format = "GRIB" + format_description = "GRIB" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a GRIB file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise OSError("GRIB save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/minor_project/lib/python3.6/site-packages/PIL/Hdf5StubImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000..362f2d3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific HDF5 image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + + format = "HDF5" + format_description = "HDF5" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not an HDF file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise OSError("HDF5 save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"]) diff --git a/minor_project/lib/python3.6/site-packages/PIL/IcnsImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000..2a63d75 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/IcnsImagePlugin.py @@ -0,0 +1,383 @@ +# +# The Python Imaging Library. +# $Id$ +# +# macOS icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# +# See the README file for information on usage and redistribution. +# + +import io +import os +import shutil +import struct +import subprocess +import sys +import tempfile + +from PIL import Image, ImageFile, PngImagePlugin, features + +enable_jpeg2k = features.check_codec("jpg_2000") +if enable_jpeg2k: + from PIL import Jpeg2KImagePlugin + +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack(">4sI", fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b"\x00\x00\x00\x00": + raise SyntaxError("Unknown signature, expecting 0x00000000") + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = byte[0] + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + raise SyntaxError(f"Error reading channel [{repr(bytesleft)} left]") + band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a": + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + return {"RGBA": im} + elif ( + sig[:4] == b"\xff\x4f\xff\x51" + or sig[:4] == b"\x0d\x0a\x87\x0a" + or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ): + if not enable_jpeg2k: + raise ValueError( + "Unsupported icon subimage format (rebuild PIL " + "with JPEG 2000 support to fix this)" + ) + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + if im.mode != "RGBA": + im = im.convert("RGBA") + return {"RGBA": im} + else: + raise ValueError("Unsupported icon subimage format") + + +class IcnsFile: + + SIZES = { + (512, 512, 2): [(b"ic10", read_png_or_jpeg2000)], + (512, 512, 1): [(b"ic09", read_png_or_jpeg2000)], + (256, 256, 2): [(b"ic14", read_png_or_jpeg2000)], + (256, 256, 1): [(b"ic08", read_png_or_jpeg2000)], + (128, 128, 2): [(b"ic13", read_png_or_jpeg2000)], + (128, 128, 1): [ + (b"ic07", read_png_or_jpeg2000), + (b"it32", read_32t), + (b"t8mk", read_mk), + ], + (64, 64, 1): [(b"icp6", read_png_or_jpeg2000)], + (32, 32, 2): [(b"ic12", read_png_or_jpeg2000)], + (48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)], + (32, 32, 1): [ + (b"icp5", read_png_or_jpeg2000), + (b"il32", read_32), + (b"l8mk", read_mk), + ], + (16, 16, 2): [(b"ic11", read_png_or_jpeg2000)], + (16, 16, 1): [ + (b"icp4", read_png_or_jpeg2000), + (b"is32", read_32), + (b"s8mk", read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if sig != b"icns": + raise SyntaxError("not an icns file") + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + raise SyntaxError("invalid block header") + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, io.SEEK_CUR) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for (fmt, reader) in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + raise SyntaxError("No 32bit icon resources found") + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get("RGBA", None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self): + self.icns = IcnsFile(self.fp) + self.mode = "RGBA" + self.info["sizes"] = self.icns.itersizes() + self.best_size = self.icns.bestsize() + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + info_size = value + if info_size not in self.info["sizes"] and len(info_size) == 2: + info_size = (info_size[0], info_size[1], 1) + if ( + info_size not in self.info["sizes"] + and len(info_size) == 3 + and info_size[2] == 1 + ): + simple_sizes = [ + (size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"] + ] + if value in simple_sizes: + info_size = self.info["sizes"][simple_sizes.index(value)] + if info_size not in self.info["sizes"]: + raise ValueError("This is not one of the allowed sizes of this image") + self._size = value + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = ( + self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2], + ) + + Image.Image.load(self) + if self.im and self.im.size == self.size: + # Already loaded + return + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + im.load() + + self.im = im.im + self.mode = im.mode + self.size = im.size + self.load_end() + + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then converted to a .icns file + using the macOS command line utility 'iconutil'. + + macOS only. + """ + if hasattr(fp, "flush"): + fp.flush() + + # create the temporary set of pngs + with tempfile.TemporaryDirectory(".iconset") as iconset: + provided_images = { + im.width: im for im in im.encoderinfo.get("append_images", []) + } + last_w = None + second_path = None + for w in [16, 32, 128, 256, 512]: + prefix = f"icon_{w}x{w}" + + first_path = os.path.join(iconset, prefix + ".png") + if last_w == w: + shutil.copyfile(second_path, first_path) + else: + im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS)) + im_w.save(first_path) + + second_path = os.path.join(iconset, prefix + "@2x.png") + im_w2 = provided_images.get(w * 2, im.resize((w * 2, w * 2), Image.LANCZOS)) + im_w2.save(second_path) + last_w = w * 2 + + # iconutil -c icns -o {} {} + + fp_only = not filename + if fp_only: + f, filename = tempfile.mkstemp(".icns") + os.close(f) + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + convert_proc = subprocess.Popen( + convert_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + + convert_proc.stdout.close() + + retcode = convert_proc.wait() + + if retcode: + raise subprocess.CalledProcessError(retcode, convert_cmd) + + if fp_only: + with open(filename, "rb") as f: + fp.write(f.read()) + + +Image.register_open(IcnsImageFile.format, IcnsImageFile, lambda x: x[:4] == b"icns") +Image.register_extension(IcnsImageFile.format, ".icns") + +if sys.platform == "darwin": + Image.register_save(IcnsImageFile.format, _save) + + Image.register_mime(IcnsImageFile.format, "image/icns") + + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python IcnsImagePlugin.py [file]") + sys.exit() + + with open(sys.argv[1], "rb") as fp: + imf = IcnsImageFile(fp) + for size in imf.info["sizes"]: + imf.size = size + imf.save("out-%s-%s-%s.png" % size) + with Image.open(sys.argv[1]) as im: + im.save("out.png") + if sys.platform == "windows": + os.startfile("out.png") diff --git a/minor_project/lib/python3.6/site-packages/PIL/IcoImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/IcoImagePlugin.py new file mode 100644 index 0000000..e1bfa7a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/IcoImagePlugin.py @@ -0,0 +1,328 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * https://msdn.microsoft.com/en-us/library/ms997538.aspx + + +import struct +import warnings +from io import BytesIO +from math import ceil, log + +from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin +from ._binary import i16le as i16 +from ._binary import i32le as i32 + +# +# -------------------------------------------------------------------- + +_MAGIC = b"\0\0\1\0" + + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + sizes = im.encoderinfo.get( + "sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)], + ) + width, height = im.size + sizes = filter( + lambda x: False + if (x[0] > width or x[1] > height or x[0] > 256 or x[1] > 256) + else True, + sizes, + ) + sizes = list(sizes) + fp.write(struct.pack("=8bpp) + "reserved": s[3], + "planes": i16(s, 4), + "bpp": i16(s, 6), + "size": i32(s, 8), + "offset": i32(s, 12), + } + + # See Wikipedia + for j in ("width", "height"): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header["color_depth"] = ( + icon_header["bpp"] + or ( + icon_header["nb_color"] != 0 + and ceil(log(icon_header["nb_color"], 2)) + ) + or 256 + ) + + icon_header["dim"] = (icon_header["width"], icon_header["height"]) + icon_header["square"] = icon_header["width"] * icon_header["height"] + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x["color_depth"]) + # ICO images are usually squares + # self.entry = sorted(self.entry, key=lambda x: x['width']) + self.entry = sorted(self.entry, key=lambda x: x["square"]) + self.entry.reverse() + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return {(h["width"], h["height"]) for h in self.entry} + + def getentryindex(self, size, bpp=False): + for (i, h) in enumerate(self.entry): + if size == h["dim"] and (bpp is False or bpp == h["color_depth"]): + return i + return 0 + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + return self.frame(self.getentryindex(size, bpp)) + + def frame(self, idx): + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header["offset"]) + data = self.buf.read(8) + self.buf.seek(header["offset"]) + + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + Image._decompression_bomb_check(im.size) + + # change tile dimension to only encompass XOR image + im._size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + mode = a[0] + bpp = 8 + for k, v in BmpImagePlugin.BIT2MODE.items(): + if mode == v[1]: + bpp = k + break + + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + "L", # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + "raw", # raw decoder + ("L", 0, -1), # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + and_mask_offset = o + int(im.size[0] * im.size[1] * (bpp / 8.0)) + total_bytes = int((w * im.size[1]) / 8) + + self.buf.seek(and_mask_offset) + mask_data = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + "1", # 1 bpp + im.size, # (w, h) + mask_data, # source chars + "raw", # raw decoder + ("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert("RGBA") + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + When saving, PNG compression is used. Support for this was only added in + Windows Vista. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki + """ + + format = "ICO" + format_description = "Windows Icon" + + def _open(self): + self.ico = IcoFile(self.fp) + self.info["sizes"] = self.ico.sizes() + self.size = self.ico.entry[0]["dim"] + self.load() + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + if value not in self.info["sizes"]: + raise ValueError("This is not one of the allowed sizes of this image") + self._size = value + + def load(self): + if self.im and self.im.size == self.size: + # Already loaded + return + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.mode = im.mode + if im.size != self.size: + warnings.warn("Image was not the expected size") + + index = self.ico.getentryindex(self.size) + sizes = list(self.info["sizes"]) + sizes[index] = im.size + self.info["sizes"] = set(sizes) + + self.size = im.size + + def load_seek(self): + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass + + +# +# -------------------------------------------------------------------- + + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") + +Image.register_mime(IcoImageFile.format, "image/x-icon") diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/ImImagePlugin.py new file mode 100644 index 0000000..1dfc808 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImImagePlugin.py @@ -0,0 +1,376 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import os +import re + +from . import Image, ImageFile, ImagePalette + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = { + COMMENT: 0, + DATE: 0, + EQUIPMENT: 0, + FRAMES: 0, + LUT: 0, + NAME: 0, + SCALE: 0, + SIZE: 0, + MODE: 0, +} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "PA image": ("LA", "PA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGBX", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN[f"L {i} image"] = ("F", f"F;{i}") + OPEN[f"L*{i} image"] = ("F", f"F;{i}") +for i in ["16", "16L", "16B"]: + OPEN[f"L {i} image"] = (f"I;{i}", f"I;{i}") + OPEN[f"L*{i} image"] = (f"I;{i}", f"I;{i}") +for i in ["32S"]: + OPEN[f"L {i} image"] = ("I", f"I;{i}") + OPEN[f"L*{i} image"] = ("I", f"I;{i}") +for i in range(2, 33): + OPEN[f"L*{i} image"] = ("F", f"F;{i}") + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s): + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + + +class ImImageFile(ImageFile.ImageFile): + + format = "IM" + format_description = "IFUNC Image Memory" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b"\0" or s == b"\x1A": + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + raise SyntaxError("not an IM file") + + if s[-2:] == b"\r\n": + s = s[:-2] + elif s[-1:] == b"\n": + s = s[:-1] + + try: + m = split.match(s) + except re.error as e: + raise SyntaxError("not an IM file") from e + + if m: + + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode("latin-1", "replace") + v = v.decode("latin-1", "replace") + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + + raise SyntaxError( + "Syntax error in IM header: " + s.decode("ascii", "replace") + ) + + if not n: + raise SyntaxError("Not an IM file") + + # Basic attributes + self._size = self.info[SIZE] + self.mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[0:1] != b"\x1A": + s = self.fp.read(1) + if not s: + raise SyntaxError("File truncated") + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i + 256] == palette[i + 512]: + if palette[i] != i: + linear = 0 + else: + greyscale = 0 + if self.mode in ["L", "LA", "P", "PA"]: + if greyscale: + if not linear: + self.lut = list(palette[:256]) + else: + if self.mode in ["L", "P"]: + self.mode = self.rawmode = "P" + elif self.mode in ["LA", "PA"]: + self.mode = "PA" + self.rawmode = "PA;L" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = list(palette) + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self.__fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [ + ("raw", (0, 0) + self.size, offs, ("G", 0, -1)), + ("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)), + ("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)), + ] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self.__fp + + self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))] + + def tell(self): + return self.frame + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# +# -------------------------------------------------------------------- +# Save IM files + + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L"), +} + + +def _save(im, fp, filename): + + try: + image_type, rawmode = SAVE[im.mode] + except KeyError as e: + raise ValueError(f"Cannot save {im.mode} images as IM") from e + + frames = im.encoderinfo.get("frames", 1) + + fp.write(f"Image type: {image_type} image\r\n".encode("ascii")) + if filename: + # Each line must be 100 characters or less, + # or: SyntaxError("not an IM file") + # 8 characters are used for "Name: " and "\r\n" + # Keep just the filename, ditch the potentially overlong path + name, ext = os.path.splitext(os.path.basename(filename)) + name = "".join([name[: 92 - len(ext)], ext]) + + fp.write(f"Name: {name}\r\n".encode("ascii")) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii")) + fp.write(f"File size (no of images): {frames}\r\n".encode("ascii")) + if im.mode in ["P", "PA"]: + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511 - fp.tell()) + b"\032") + if im.mode in ["P", "PA"]: + fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))]) + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/minor_project/lib/python3.6/site-packages/PIL/Image.py b/minor_project/lib/python3.6/site-packages/PIL/Image.py new file mode 100644 index 0000000..e2540a2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/Image.py @@ -0,0 +1,3491 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import atexit +import builtins +import io +import logging +import math +import numbers +import os +import struct +import sys +import tempfile +import warnings +import xml.etree.ElementTree +from collections.abc import Callable, MutableMapping +from pathlib import Path + +# VERSION was removed in Pillow 6.0.0. +# PILLOW_VERSION is deprecated and will be removed in a future release. +# Use __version__ instead. +from . import ( + ImageMode, + TiffTags, + UnidentifiedImageError, + __version__, + _plugins, + _raise_version_warning, +) +from ._binary import i32le +from ._util import deferred_error, isPath + +if sys.version_info >= (3, 7): + + def __getattr__(name): + if name == "PILLOW_VERSION": + _raise_version_warning() + return __version__ + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +else: + + from . import PILLOW_VERSION + + # Silence warning + assert PILLOW_VERSION + + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class DecompressionBombError(Exception): + pass + + +# Limit to around a quarter gigabyte for a 24-bit (3 bpp) image +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 // 4 // 3) + + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from . import _imaging as core + + if __version__ != getattr(core, "PILLOW_VERSION", None): + raise ImportError( + "The _imaging extension was built for another version of Pillow or PIL:\n" + f"Core version: {getattr(core, 'PILLOW_VERSION', None)}\n" + f"Pillow version: {__version__}" + ) + +except ImportError as v: + core = deferred_error(ImportError("The _imaging C module is not installed.")) + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version of Python.", + RuntimeWarning, + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting.rst + raise + + +# works everywhere, win for pypy, not cpython +USE_CFFI_ACCESS = hasattr(sys, "pypy_version_info") +try: + import cffi +except ImportError: + cffi = None + + +def isImageType(t): + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + + +# +# Constants + +NONE = 0 + +# transpose +FLIP_LEFT_RIGHT = 0 +FLIP_TOP_BOTTOM = 1 +ROTATE_90 = 2 +ROTATE_180 = 3 +ROTATE_270 = 4 +TRANSPOSE = 5 +TRANSVERSE = 6 + +# transforms (also defined in Imaging.h) +AFFINE = 0 +EXTENT = 1 +PERSPECTIVE = 2 +QUAD = 3 +MESH = 4 + +# resampling filters (also defined in Imaging.h) +NEAREST = NONE = 0 +BOX = 4 +BILINEAR = LINEAR = 2 +HAMMING = 5 +BICUBIC = CUBIC = 3 +LANCZOS = ANTIALIAS = 1 + +_filters_support = {BOX: 0.5, BILINEAR: 1.0, HAMMING: 1.0, BICUBIC: 2.0, LANCZOS: 3.0} + + +# dithers +NEAREST = NONE = 0 +ORDERED = 1 # Not yet implemented +RASTERIZE = 2 # Not yet implemented +FLOYDSTEINBERG = 3 # default + +# palettes/quantizers +WEB = 0 +ADAPTIVE = 1 + +MEDIANCUT = 0 +MAXCOVERAGE = 1 +FASTOCTREE = 2 +LIBIMAGEQUANT = 3 + +# categories +NORMAL = 0 +SEQUENCE = 1 +CONTAINER = 2 + +if hasattr(core, "DEFAULT_STRATEGY"): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +ID = [] +OPEN = {} +MIME = {} +SAVE = {} +SAVE_ALL = {} +EXTENSION = {} +DECODERS = {} +ENCODERS = {} + +# -------------------------------------------------------------------- +# Modes supported by this version + +_MODEINFO = { + # NOTE: this table will be removed in future versions. use + # getmode* functions or ImageMode descriptors instead. + # official modes + "1": ("L", "L", ("1",)), + "L": ("L", "L", ("L",)), + "I": ("L", "I", ("I",)), + "F": ("L", "F", ("F",)), + "P": ("P", "L", ("P",)), + "RGB": ("RGB", "L", ("R", "G", "B")), + "RGBX": ("RGB", "L", ("R", "G", "B", "X")), + "RGBA": ("RGB", "L", ("R", "G", "B", "A")), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), + "LAB": ("RGB", "L", ("L", "A", "B")), + "HSV": ("RGB", "L", ("H", "S", "V")), + # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and + # BGR;24. Use these modes only if you know exactly what you're + # doing... +} + +if sys.byteorder == "little": + _ENDIAN = "<" +else: + _ENDIAN = ">" + +_MODE_CONV = { + # official modes + "1": ("|b1", None), # Bits need to be extended to bytes + "L": ("|u1", None), + "LA": ("|u1", 2), + "I": (_ENDIAN + "i4", None), + "F": (_ENDIAN + "f4", None), + "P": ("|u1", None), + "RGB": ("|u1", 3), + "RGBX": ("|u1", 4), + "RGBA": ("|u1", 4), + "CMYK": ("|u1", 4), + "YCbCr": ("|u1", 3), + "LAB": ("|u1", 3), # UNDONE - unsigned |u1i1i1 + "HSV": ("|u1", 3), + # I;16 == I;16L, and I;32 == I;32L + "I;16": ("u2", None), + "I;16L": ("i2", None), + "I;16LS": ("u4", None), + "I;32L": ("i4", None), + "I;32LS": ("= 1: + return + + try: + from . import BmpImagePlugin + + assert BmpImagePlugin + except ImportError: + pass + try: + from . import GifImagePlugin + + assert GifImagePlugin + except ImportError: + pass + try: + from . import JpegImagePlugin + + assert JpegImagePlugin + except ImportError: + pass + try: + from . import PpmImagePlugin + + assert PpmImagePlugin + except ImportError: + pass + try: + from . import PngImagePlugin + + assert PngImagePlugin + except ImportError: + pass + # try: + # import TiffImagePlugin + # assert TiffImagePlugin + # except ImportError: + # pass + + _initialized = 1 + + +def init(): + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + """ + + global _initialized + if _initialized >= 2: + return 0 + + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__(f"PIL.{plugin}", globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return 1 + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + + +def _getdecoder(mode, decoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + decoder = DECODERS[decoder_name] + except KeyError: + pass + else: + return decoder(mode, *args + extra) + + try: + # get decoder + decoder = getattr(core, decoder_name + "_decoder") + except AttributeError as e: + raise OSError(f"decoder {decoder_name} not available") from e + return decoder(mode, *args + extra) + + +def _getencoder(mode, encoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + encoder = ENCODERS[encoder_name] + except KeyError: + pass + else: + return encoder(mode, *args + extra) + + try: + # get encoder + encoder = getattr(core, encoder_name + "_encoder") + except AttributeError as e: + raise OSError(f"encoder {encoder_name} not available") from e + return encoder(mode, *args + extra) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + + +def coerce_e(value): + return value if isinstance(value, _E) else _E(value) + + +class _E: + def __init__(self, data): + self.data = data + + def __add__(self, other): + return _E((self.data, "__add__", coerce_e(other).data)) + + def __mul__(self, other): + return _E((self.data, "__mul__", coerce_e(other).data)) + + +def _getscaleoffset(expr): + stub = ["stub"] + data = expr(_E(stub)).data + try: + (a, b, c) = data # simplified syntax + if a is stub and b == "__mul__" and isinstance(c, numbers.Number): + return c, 0.0 + if a is stub and b == "__add__" and isinstance(c, numbers.Number): + return 1.0, c + except TypeError: + pass + try: + ((a, b, c), d, e) = data # full syntax + if ( + a is stub + and b == "__mul__" + and isinstance(c, numbers.Number) + and d == "__add__" + and isinstance(e, numbers.Number) + ): + return c, e + except TypeError: + pass + raise ValueError("illegal expression") + + +# -------------------------------------------------------------------- +# Implementation wrapper + + +class Image: + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + + format = None + format_description = None + _close_exclusive_fp_after_loading = True + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self.mode = "" + self._size = (0, 0) + self.palette = None + self.info = {} + self.category = NORMAL + self.readonly = 0 + self.pyaccess = None + self._exif = None + + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + + @property + def size(self): + return self._size + + def _new(self, im): + new = Image() + new.im = im + new.mode = im.mode + new._size = im.size + if im.mode in ("P", "PA"): + if self.palette: + new.palette = self.palette.copy() + else: + from . import ImagePalette + + new.palette = ImagePalette.ImagePalette() + new.info = self.info.copy() + return new + + # Context manager support + def __enter__(self): + return self + + def __exit__(self, *args): + if hasattr(self, "fp") and getattr(self, "_exclusive_fp", False): + if hasattr(self, "_close__fp"): + self._close__fp() + if self.fp: + self.fp.close() + self.fp = None + + def close(self): + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is only required to close images that have not + had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. See + :ref:`file-handling` for more information. + """ + try: + if hasattr(self, "_close__fp"): + self._close__fp() + if self.fp: + self.fp.close() + self.fp = None + except Exception as msg: + logger.debug("Error closing: %s", msg) + + if getattr(self, "map", None): + self.map = None + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = deferred_error(ValueError("Operation on closed image")) + + def _copy(self): + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _ensure_mutable(self): + if self.readonly: + self._copy() + else: + self.load() + + def _dump(self, file=None, format=None, **options): + suffix = "" + if format: + suffix = "." + format + + if not file: + f, filename = tempfile.mkstemp(suffix) + os.close(f) + else: + filename = file + if not filename.endswith(suffix): + filename = filename + suffix + + self.load() + + if not format or format == "PPM": + self.im.save_ppm(filename) + else: + self.save(filename, format, **options) + + return filename + + def __eq__(self, other): + return ( + self.__class__ is other.__class__ + and self.mode == other.mode + and self.size == other.size + and self.info == other.info + and self.category == other.category + and self.readonly == other.readonly + and self.getpalette() == other.getpalette() + and self.tobytes() == other.tobytes() + ) + + def __repr__(self): + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, + self.__class__.__name__, + self.mode, + self.size[0], + self.size[1], + id(self), + ) + + def _repr_png_(self): + """iPython display hook support + + :returns: png version of the image as bytes + """ + b = io.BytesIO() + try: + self.save(b, "PNG") + except Exception as e: + raise ValueError("Could not save to PNG for display") from e + return b.getvalue() + + @property + def __array_interface__(self): + # numpy array interface support + new = {} + shape, typestr = _conv_type_shape(self) + new["shape"] = shape + new["typestr"] = typestr + new["version"] = 3 + if self.mode == "1": + # Binary images need to be extended from bits to bytes + # See: https://github.com/python-pillow/Pillow/issues/350 + new["data"] = self.tobytes("raw", "L") + else: + new["data"] = self.tobytes() + return new + + def __getstate__(self): + return [self.info, self.mode, self.size, self.getpalette(), self.tobytes()] + + def __setstate__(self, state): + Image.__init__(self) + self.tile = [] + info, mode, size, palette, data = state + self.info = info + self.mode = mode + self._size = size + self.im = core.new(mode, size) + if mode in ("L", "LA", "P", "PA") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name="raw", *args): + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + :param args: Extra arguments to the encoder. + :returns: A :py:class:`bytes` object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if encoder_name == "raw" and args == (): + args = self.mode + + self.load() + + # unpack data + e = _getencoder(self.mode, encoder_name, args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + data = [] + while True: + l, s, d = e.encode(bufsize) + data.append(d) + if s: + break + if s < 0: + raise RuntimeError(f"encoder error {s} in tobytes") + + return b"".join(data) + + def tobitmap(self, name="image"): + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + raise ValueError("not a bitmap") + data = self.tobytes("xbm") + return b"".join( + [ + f"#define {name}_width {self.size[0]}\n".encode("ascii"), + f"#define {name}_height {self.size[1]}\n".encode("ascii"), + f"static char {name}_bits[] = {{\n".encode("ascii"), + data, + b"};", + ] + ) + + def frombytes(self, data, decoder_name="raw", *args): + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + # default format + if decoder_name == "raw" and args == (): + args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + def load(self): + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. + + If the file associated with the image was opened by Pillow, then this + method will close it. The exception to this is if the image has + multiple frames, in which case the file will be left open for seek + operations. See :ref:`file-handling` for more information. + + :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` + """ + if self.im and self.palette and self.palette.dirty: + # realize palette + mode, arr = self.palette.getdata() + if mode == "RGBA": + mode = "RGB" + self.info["transparency"] = arr[3::4] + arr = bytes( + value for (index, value) in enumerate(arr) if index % 4 != 3 + ) + self.im.putpalette(mode, arr) + self.palette.dirty = 0 + self.palette.rawmode = None + if "transparency" in self.info: + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + else: + self.palette.mode = "RGB" + + if self.im: + if cffi and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from . import PyAccess + + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + + def verify(self): + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert(self, mode=None, matrix=None, dither=None, palette=WEB, colors=256): + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + The current version supports all possible conversions between + "L", "RGB" and "CMYK." The ``matrix`` argument only supports "L" + and "RGB". + + When translating a color image to greyscale (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a greyscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is :data:`NONE`, all values larger than 128 are set to 255 (white), + all other values to 0 (black). To use other thresholds, use the + :py:meth:`~PIL.Image.Image.point` method. + + When converting from "RGBA" to "P" without a ``matrix`` argument, + this passes the operation to :py:meth:`~PIL.Image.Image.quantize`, + and ``dither`` and ``palette`` are ignored. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are :data:`NONE` or :data:`FLOYDSTEINBERG` (default). + Note that this is not used when ``matrix`` is supplied. + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are :data:`WEB` or :data:`ADAPTIVE`. + :param colors: Number of colors to use for the :data:`ADAPTIVE` palette. + Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if not mode and self.mode == "P": + # determine default mode + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + if not mode or (mode == self.mode and not matrix): + return self.copy() + + has_transparency = self.info.get("transparency") is not None + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + raise ValueError("illegal conversion") + im = self.im.convert_matrix(mode, matrix) + new = self._new(im) + if has_transparency and self.im.bands == 3: + transparency = new.info["transparency"] + + def convert_transparency(m, v): + v = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3] * 0.5 + return max(0, min(255, int(v))) + + if mode == "L": + transparency = convert_transparency(matrix, transparency) + elif len(mode) == 3: + transparency = tuple( + [ + convert_transparency( + matrix[i * 4 : i * 4 + 4], transparency + ) + for i in range(0, len(transparency)) + ] + ) + new.info["transparency"] = transparency + return new + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if has_transparency: + if self.mode in ("1", "L", "I", "RGB") and mode == "RGBA": + # Use transparent conversion to promote from transparent + # color to an alpha channel. + new_im = self._new( + self.im.convert_transparent(mode, self.info["transparency"]) + ) + del new_im.info["transparency"] + return new_im + elif self.mode in ("L", "RGB", "P") and mode in ("L", "RGB", "P"): + t = self.info["transparency"] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn( + "Palette images with Transparency expressed in bytes should be " + "converted to RGBA images" + ) + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = Image()._new(core.new(self.mode, (1, 1))) + if self.mode == "P": + trns_im.putpalette(self.palette) + if isinstance(t, tuple): + try: + t = trns_im.palette.getcolor(t) + except Exception as e: + raise ValueError( + "Couldn't allocate a palette color for transparency" + ) from e + trns_im.putpixel((0, 0), t) + + if mode in ("L", "RGB"): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert("RGB") + trns = trns_im.getpixel((0, 0)) + + elif self.mode == "P" and mode == "RGBA": + t = self.info["transparency"] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should be bytes or int") + + if mode == "P" and palette == ADAPTIVE: + im = self.im.quantize(colors) + new = self._new(im) + from . import ImagePalette + + new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB")) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del new.info["transparency"] + if trns is not None: + try: + new.info["transparency"] = new.palette.getcolor(trns) + except Exception: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del new.info["transparency"] + warnings.warn("Couldn't allocate palette entry for transparency") + return new + + # colorspace conversion + if dither is None: + dither = FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + im = self.im.convert(getmodebase(self.mode)) + im = im.convert(mode, dither) + except KeyError as e: + raise ValueError("illegal conversion") from e + + new_im = self._new(im) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del new_im.info["transparency"] + if trns is not None: + if new_im.mode == "P": + try: + new_im.info["transparency"] = new_im.palette.getcolor(trns) + except Exception: + del new_im.info["transparency"] + warnings.warn("Couldn't allocate palette entry for transparency") + else: + new_im.info["transparency"] = trns + return new_im + + def quantize(self, colors=256, method=None, kmeans=0, palette=None, dither=1): + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: :data:`MEDIANCUT` (median cut), + :data:`MAXCOVERAGE` (maximum coverage), + :data:`FASTOCTREE` (fast octree), + :data:`LIBIMAGEQUANT` (libimagequant; check support using + :py:func:`PIL.features.check_feature` + with ``feature="libimagequant"``). + :param kmeans: Integer + :param palette: Quantize to the palette of given + :py:class:`PIL.Image.Image`. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are :data:`NONE` or :data:`FLOYDSTEINBERG` (default). + Default: 1 (legacy setting) + :returns: A new image + + """ + + self.load() + + if method is None: + # defaults: + method = 0 + if self.mode == "RGBA": + method = 2 + + if self.mode == "RGBA" and method not in (2, 3): + # Caller specified an invalid mode. + raise ValueError( + "Fast Octree (method == 2) and libimagequant (method == 3) " + "are the only valid methods for quantizing RGBA images" + ) + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + raise ValueError("bad mode for palette image") + if self.mode != "RGB" and self.mode != "L": + raise ValueError( + "only RGB or L mode images can be quantized to a palette" + ) + im = self.im.convert("P", dither, palette.im) + return self._new(im) + + im = self._new(self.im.quantize(colors, method, kmeans)) + + from . import ImagePalette + + mode = im.im.getpalettemode() + im.palette = ImagePalette.ImagePalette(mode, im.im.getpalette(mode, mode)) + + return im + + def copy(self): + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + return self._new(self.im.copy()) + + __copy__ = copy + + def crop(self, box=None): + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. See :ref:`coordinate-system`. + + Note: Prior to Pillow 3.4.0, this was a lazy operation. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if box is None: + return self.copy() + + self.load() + return self._new(self._crop(self.im, box)) + + def _crop(self, im, box): + """ + Returns a rectangular region from the core image object im. + + This is equivalent to calling im.crop((x0, y0, x1, y1)), but + includes additional sanity checks. + + :param im: a core image object + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :returns: A core image object. + """ + + x0, y0, x1, y1 = map(int, map(round, box)) + + absolute_values = (abs(x1 - x0), abs(y1 - y0)) + + _decompression_bomb_check(absolute_values) + + return im.crop((x0, y0, x1, y1)) + + def draft(self, mode, size): + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it. + + If any changes are made, returns a tuple with the chosen ``mode`` and + ``box`` with coordinates of the original image within the altered one. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + Note: This method is not implemented for most images. It is + currently implemented only for JPEG and MPO images. + + :param mode: The requested mode. + :param size: The requested size. + """ + pass + + def _expand(self, xmargin, ymargin=None): + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin, 0)) + + def filter(self, filter): + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object.""" + + from . import ImageFilter + + self.load() + + if isinstance(filter, Callable): + filter = filter() + if not hasattr(filter, "filter"): + raise TypeError( + "filter argument should be ImageFilter.Filter instance or class" + ) + + multiband = isinstance(filter, ImageFilter.MultibandFilter) + if self.im.bands == 1 or multiband: + return self._new(filter.filter(self.im)) + + ims = [] + for c in range(self.im.bands): + ims.append(self._new(filter.filter(self.im.getband(c)))) + return merge(self.mode, ims) + + def getbands(self): + """ + Returns a tuple containing the name of each band in this image. + For example, ``getbands`` on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self): + """ + Calculates the bounding box of the non-zero regions in the + image. + + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. See + :ref:`coordinate-system`. If the image is completely empty, this + method returns None. + + """ + + self.load() + return self.im.getbbox() + + def getcolors(self, maxcolors=256): + """ + Returns a list of colors used in this image. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [] + for i in range(256): + if h[i]: + out.append((h[i], i)) + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band=None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use ``list(im.getdata())``. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self): + """ + Gets the the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + extrema = [] + for i in range(self.im.bands): + extrema.append(self.im.getband(i).getextrema()) + return tuple(extrema) + return self.im.getextrema() + + def getexif(self): + if self._exif is None: + self._exif = Exif() + + exif_info = self.info.get("exif") + if exif_info is None and "Raw profile type exif" in self.info: + exif_info = bytes.fromhex( + "".join(self.info["Raw profile type exif"].split("\n")[3:]) + ) + self._exif.load(exif_info) + + # XMP tags + if 0x0112 not in self._exif: + xmp_tags = self.info.get("XML:com.adobe.xmp") + if xmp_tags: + root = xml.etree.ElementTree.fromstring(xmp_tags) + for elem in root.iter(): + if elem.tag.endswith("}Description"): + orientation = elem.attrib.get( + "{http://ns.adobe.com/tiff/1.0/}Orientation" + ) + if orientation: + self._exif[0x0112] = int(orientation) + break + + return self._exif + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self): + """ + Returns the image palette as a list. + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + return list(self.im.getpalette()) + except ValueError: + return None # no palette + + def getpixel(self, xy): + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(xy) + + def getprojection(self): + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return list(x), list(y) + + def histogram(self, mask=None, extrema=None): + """ + Returns a histogram for the image. The histogram is returned as + a list of pixel counts, one for each pixel value in the source + image. If the image has more than one band, the histograms for + all bands are concatenated (for example, the histogram for an + "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a greyscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def entropy(self, mask=None, extrema=None): + """ + Calculates and returns the entropy for the image. + + A bilevel image (mode "1") is treated as a greyscale ("L") + image by this method. + + If a mask is provided, the method employs the histogram for + those parts of the image where the mask image is non-zero. + The mask image must have the same size as the image, and be + either a bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :param extrema: An optional tuple of manually-specified extrema. + :returns: A float value representing the image entropy + """ + self.load() + if mask: + mask.load() + return self.im.entropy((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.entropy(extrema) + return self.im.entropy() + + def paste(self, im, box=None, mask=None): + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). See :ref:`coordinate-system`. If a 4-tuple is given, the size + of the pasted image must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L" or "RGBA" + images (in the latter case, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box) and mask is None: + # abbreviated paste(im, mask) syntax + mask = box + box = None + + if box is None: + box = (0, 0) + + if len(box) == 2: + # upper left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + raise ValueError("cannot determine region size; use 4-item box") + box += (box[0] + size[0], box[1] + size[1]) + + if isinstance(im, str): + from . import ImageColor + + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self._ensure_mutable() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def alpha_composite(self, im, dest=(0, 0), source=(0, 0)): + """'In-place' analog of Image.alpha_composite. Composites an image + onto this image. + + :param im: image to composite over this one + :param dest: Optional 2 tuple (left, top) specifying the upper + left corner in this (destination) image. + :param source: Optional 2 (left, top) tuple for the upper left + corner in the overlay source image, or 4 tuple (left, top, right, + bottom) for the bounds of the source rectangle + + Performance Note: Not currently implemented in-place in the core layer. + """ + + if not isinstance(source, (list, tuple)): + raise ValueError("Source must be a tuple") + if not isinstance(dest, (list, tuple)): + raise ValueError("Destination must be a tuple") + if not len(source) in (2, 4): + raise ValueError("Source must be a 2 or 4-tuple") + if not len(dest) == 2: + raise ValueError("Destination must be a 2-tuple") + if min(source) < 0: + raise ValueError("Source must be non-negative") + if min(dest) < 0: + raise ValueError("Destination must be non-negative") + + if len(source) == 2: + source = source + im.size + + # over image, crop if it's not the whole thing. + if source == (0, 0) + im.size: + overlay = im + else: + overlay = im.crop(source) + + # target for the paste + box = dest + (dest[0] + overlay.width, dest[1] + overlay.height) + + # destination image. don't copy if we're using the whole image. + if box == (0, 0) + self.size: + background = self + else: + background = self.crop(box) + + result = alpha_composite(background, overlay) + self.paste(result, box) + + def point(self, lut, mode=None): + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65536 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + + It may also be an :py:class:`~PIL.Image.ImagePointHandler` + object:: + + class Example(Image.ImagePointHandler): + def point(self, data): + # Return result + :param mode: Output mode (default is same as input). In the + current version, this can only be used if the source image + has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + lut = [lut(i) for i in range(256)] * self.im.bands + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + raise ValueError("point operation not supported for this mode") + + return self._new(self.im.point(lut, mode)) + + def putalpha(self, alpha): + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer or + other color value. + """ + + self._ensure_mutable() + + if self.mode not in ("LA", "PA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + except (AttributeError, ValueError) as e: + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "PA", "RGBA"): + raise ValueError from e # sanity check + self.im = im + self.pyaccess = None + self.mode = self.im.mode + except KeyError as e: + raise ValueError("illegal image mode") from e + + if self.mode in ("LA", "PA"): + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + raise ValueError("illegal image mode") + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata(self, data, scale=1.0, offset=0.0): + """ + Copies pixel data to this image. This method copies data from a + sequence object into the image, starting at the upper left + corner (0, 0), and continuing until either the image or the + sequence ends. The scale and offset values are used to adjust + the sequence values: **pixel = value*scale + offset**. + + :param data: A sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self._ensure_mutable() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB"): + """ + Attaches a palette to this image. The image must be a "P", "PA", "L" + or "LA" image. + + The palette sequence must contain either 768 integer values, or 1024 + integer values if alpha is included. Each group of values represents + the red, green, blue (and alpha if included) values for the + corresponding pixel index. Instead of an integer sequence, you can use + an 8-bit string. + + :param data: A palette sequence (either a list or a string). + :param rawmode: The raw mode of the palette. + """ + from . import ImagePalette + + if self.mode not in ("L", "LA", "P", "PA"): + raise ValueError("illegal image mode") + self.load() + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + data = bytes(data) + palette = ImagePalette.raw(rawmode, data) + self.mode = "PA" if "A" in self.mode else "P" + self.palette = palette + self.palette.mode = "RGB" + self.load() # install new palette + + def putpixel(self, xy, value): + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. In addition to this, RGB and RGBA tuples are + accepted for P images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param value: The pixel value. + """ + + if self.readonly: + self._copy() + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + + if ( + self.mode == "P" + and isinstance(value, (list, tuple)) + and len(value) in [3, 4] + ): + # RGB or RGBA value for a P image + value = self.palette.getcolor(value) + return self.im.putpixel(xy, value) + + def remap_palette(self, dest_map, source_palette=None): + """ + Rewrites the image to reorder the palette. + + :param dest_map: A list of indexes into the original palette. + e.g. ``[1,0]`` would swap a two item palette, and ``list(range(256))`` + is the identity transform. + :param source_palette: Bytes or None. + :returns: An :py:class:`~PIL.Image.Image` object. + + """ + from . import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + + if source_palette is None: + if self.mode == "P": + real_source_palette = self.im.getpalette("RGB")[:768] + else: # L-mode + real_source_palette = bytearray(i // 3 for i in range(768)) + else: + real_source_palette = source_palette + + palette_bytes = b"" + new_positions = [0] * 256 + + # pick only the used colors from the palette + for i, oldPosition in enumerate(dest_map): + palette_bytes += real_source_palette[oldPosition * 3 : oldPosition * 3 + 3] + new_positions[oldPosition] = i + + # replace the palette color id of all pixel with the new id + + # Palette images are [0..255], mapped through a 1 or 3 + # byte/color map. We need to remap the whole image + # from palette 1 to palette 2. New_positions is + # an array of indexes into palette 1. Palette 2 is + # palette 1 with any holes removed. + + # We're going to leverage the convert mechanism to use the + # C code to remap the image from palette 1 to palette 2, + # by forcing the source image into 'L' mode and adding a + # mapping 'L' mode palette, then converting back to 'L' + # sans palette thus converting the image bytes, then + # assigning the optimized RGB palette. + + # perf reference, 9500x4000 gif, w/~135 colors + # 14 sec prepatch, 1 sec postpatch with optimization forced. + + mapping_palette = bytearray(new_positions) + + m_im = self.copy() + m_im.mode = "P" + + m_im.palette = ImagePalette.ImagePalette( + "RGB", palette=mapping_palette * 3, size=768 + ) + # possibly set palette dirty, then + # m_im.putpalette(mapping_palette, 'L') # converts to 'P' + # or just force it. + # UNDONE -- this is part of the general issue with palettes + m_im.im.putpalette(*m_im.palette.getdata()) + + m_im = m_im.convert("L") + + # Internally, we require 768 bytes for a palette. + new_palette_bytes = palette_bytes + (768 - len(palette_bytes)) * b"\x00" + m_im.putpalette(new_palette_bytes) + m_im.palette = ImagePalette.ImagePalette( + "RGB", palette=palette_bytes, size=len(palette_bytes) + ) + + return m_im + + def _get_safe_box(self, size, resample, box): + """Expands the box so it includes adjacent pixels + that may be used by resampling with the given resampling filter. + """ + filter_support = _filters_support[resample] - 0.5 + scale_x = (box[2] - box[0]) / size[0] + scale_y = (box[3] - box[1]) / size[1] + support_x = filter_support * scale_x + support_y = filter_support * scale_y + + return ( + max(0, int(box[0] - support_x)), + max(0, int(box[1] - support_y)), + min(self.size[0], math.ceil(box[2] + support_x)), + min(self.size[1], math.ceil(box[3] + support_y)), + ) + + def resize(self, size, resample=BICUBIC, box=None, reducing_gap=None): + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:data:`PIL.Image.NEAREST`, :py:data:`PIL.Image.BOX`, + :py:data:`PIL.Image.BILINEAR`, :py:data:`PIL.Image.HAMMING`, + :py:data:`PIL.Image.BICUBIC` or :py:data:`PIL.Image.LANCZOS`. + Default filter is :py:data:`PIL.Image.BICUBIC`. + If the image has mode "1" or "P", it is + always set to :py:data:`PIL.Image.NEAREST`. + See: :ref:`concept-filters`. + :param box: An optional 4-tuple of floats providing + the source image region to be scaled. + The values must be within (0, 0, width, height) rectangle. + If omitted or None, the entire source is used. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce`. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is None (no optimization). + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS, BOX, HAMMING): + message = f"Unknown resampling filter ({resample})." + + filters = [ + "{} ({})".format(filter[1], filter[0]) + for filter in ( + (NEAREST, "Image.NEAREST"), + (LANCZOS, "Image.LANCZOS"), + (BILINEAR, "Image.BILINEAR"), + (BICUBIC, "Image.BICUBIC"), + (BOX, "Image.BOX"), + (HAMMING, "Image.HAMMING"), + ) + ] + raise ValueError( + message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1] + ) + + if reducing_gap is not None and reducing_gap < 1.0: + raise ValueError("reducing_gap must be 1.0 or greater") + + size = tuple(size) + + if box is None: + box = (0, 0) + self.size + else: + box = tuple(box) + + if self.size == size and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ("1", "P"): + resample = NEAREST + + if self.mode in ["LA", "RGBA"]: + im = self.convert(self.mode[:-1] + "a") + im = im.resize(size, resample, box) + return im.convert(self.mode) + + self.load() + + if reducing_gap is not None and resample != NEAREST: + factor_x = int((box[2] - box[0]) / size[0] / reducing_gap) or 1 + factor_y = int((box[3] - box[1]) / size[1] / reducing_gap) or 1 + if factor_x > 1 or factor_y > 1: + reduce_box = self._get_safe_box(size, resample, box) + factor = (factor_x, factor_y) + if callable(self.reduce): + self = self.reduce(factor, box=reduce_box) + else: + self = Image.reduce(self, factor, box=reduce_box) + box = ( + (box[0] - reduce_box[0]) / factor_x, + (box[1] - reduce_box[1]) / factor_y, + (box[2] - reduce_box[0]) / factor_x, + (box[3] - reduce_box[1]) / factor_y, + ) + + return self._new(self.im.resize(size, resample, box)) + + def reduce(self, factor, box=None): + """ + Returns a copy of the image reduced ``factor`` times. + If the size of the image is not dividable by ``factor``, + the resulting size will be rounded up. + + :param factor: A greater than 0 integer or tuple of two integers + for width and height separately. + :param box: An optional 4-tuple of ints providing + the source image region to be reduced. + The values must be within ``(0, 0, width, height)`` rectangle. + If omitted or ``None``, the entire source is used. + """ + if not isinstance(factor, (list, tuple)): + factor = (factor, factor) + + if box is None: + box = (0, 0) + self.size + else: + box = tuple(box) + + if factor == (1, 1) and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ["LA", "RGBA"]: + im = self.convert(self.mode[:-1] + "a") + im = im.reduce(factor, box) + return im.convert(self.mode) + + self.load() + + return self._new(self.im.reduce(factor, box)) + + def rotate( + self, + angle, + resample=NEAREST, + expand=0, + center=None, + translate=None, + fillcolor=None, + ): + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:data:`PIL.Image.NEAREST` (use nearest neighbour), + :py:data:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:data:`PIL.Image.BICUBIC` + (cubic spline interpolation in a 4x4 environment). + If omitted, or if the image has mode "1" or "P", it is + set to :py:data:`PIL.Image.NEAREST`. See :ref:`concept-filters`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. Note that the expand flag assumes rotation around + the center and no translation. + :param center: Optional center of rotation (a 2-tuple). Origin is + the upper left corner. Default is the center of the image. + :param translate: An optional post-rotate translation (a 2-tuple). + :param fillcolor: An optional color for area outside the rotated image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + angle = angle % 360.0 + + # Fast paths regardless of filter, as long as we're not + # translating or changing the center. + if not (center or translate): + if angle == 0: + return self.copy() + if angle == 180: + return self.transpose(ROTATE_180) + if angle == 90 and expand: + return self.transpose(ROTATE_90) + if angle == 270 and expand: + return self.transpose(ROTATE_270) + + # Calculate the affine matrix. Note that this is the reverse + # transformation (from destination image to source) because we + # want to interpolate the (discrete) destination pixel from + # the local area around the (floating) source pixel. + + # The matrix we actually want (note that it operates from the right): + # (1, 0, tx) (1, 0, cx) ( cos a, sin a, 0) (1, 0, -cx) + # (0, 1, ty) * (0, 1, cy) * (-sin a, cos a, 0) * (0, 1, -cy) + # (0, 0, 1) (0, 0, 1) ( 0, 0, 1) (0, 0, 1) + + # The reverse matrix is thus: + # (1, 0, cx) ( cos -a, sin -a, 0) (1, 0, -cx) (1, 0, -tx) + # (0, 1, cy) * (-sin -a, cos -a, 0) * (0, 1, -cy) * (0, 1, -ty) + # (0, 0, 1) ( 0, 0, 1) (0, 0, 1) (0, 0, 1) + + # In any case, the final translation may be updated at the end to + # compensate for the expand flag. + + w, h = self.size + + if translate is None: + post_trans = (0, 0) + else: + post_trans = translate + if center is None: + # FIXME These should be rounded to ints? + rotn_center = (w / 2.0, h / 2.0) + else: + rotn_center = center + + angle = -math.radians(angle) + matrix = [ + round(math.cos(angle), 15), + round(math.sin(angle), 15), + 0.0, + round(-math.sin(angle), 15), + round(math.cos(angle), 15), + 0.0, + ] + + def transform(x, y, matrix): + (a, b, c, d, e, f) = matrix + return a * x + b * y + c, d * x + e * y + f + + matrix[2], matrix[5] = transform( + -rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix + ) + matrix[2] += rotn_center[0] + matrix[5] += rotn_center[1] + + if expand: + # calculate output size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y, matrix) + xx.append(x) + yy.append(y) + nw = math.ceil(max(xx)) - math.floor(min(xx)) + nh = math.ceil(max(yy)) - math.floor(min(yy)) + + # We multiply a translation matrix from the right. Because of its + # special form, this is the same as taking the image of the + # translation vector as new translation vector. + matrix[2], matrix[5] = transform(-(nw - w) / 2.0, -(nh - h) / 2.0, matrix) + w, h = nw, nh + + return self.transform((w, h), AFFINE, matrix, resample, fillcolor=fillcolor) + + def save(self, fp, format=None, **params): + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), pathlib.Path object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param params: Extra parameters to the image writer. + :returns: None + :exception ValueError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception OSError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename = "" + open_fp = False + if isPath(fp): + filename = fp + open_fp = True + elif isinstance(fp, Path): + filename = str(fp) + open_fp = True + if not filename and hasattr(fp, "name") and isPath(fp.name): + # only set the name for metadata purposes + filename = fp.name + + # may mutate self! + self._ensure_mutable() + + save_all = params.pop("save_all", False) + self.encoderinfo = params + self.encoderconfig = () + + preinit() + + ext = os.path.splitext(filename)[1].lower() + + if not format: + if ext not in EXTENSION: + init() + try: + format = EXTENSION[ext] + except KeyError as e: + raise ValueError(f"unknown file extension: {ext}") from e + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + if open_fp: + if params.get("append", False): + # Open also for reading ("+"), because TIFF save_all + # writer needs to go back and edit the written data. + fp = builtins.open(filename, "r+b") + else: + fp = builtins.open(filename, "w+b") + + try: + save_handler(self, fp, filename) + finally: + # do what we can to clean up + if open_fp: + fp.close() + + def seek(self, frame): + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + ``EOFError`` exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + See :py:meth:`~PIL.Image.Image.tell`. + + If defined, :attr:`~PIL.Image.Image.n_frames` refers to the + number of available frames. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + raise EOFError + + def show(self, title=None, command=None): + """ + Displays this image. This method is mainly intended for debugging purposes. + + This method calls :py:func:`PIL.ImageShow.show` internally. You can use + :py:func:`PIL.ImageShow.register` to override its default behaviour. + + The image is first saved to a temporary file. By default, it will be in + PNG format. + + On Unix, the image is then opened using the **display**, **eog** or + **xv** utility, depending on which one can be found. + + On macOS, the image is opened with the native Preview application. + + On Windows, the image is opened with the standard PNG display utility. + + :param title: Optional title to use for the image window, where possible. + """ + + if command is not None: + warnings.warn( + "The command parameter is deprecated and will be removed in Pillow 9 " + "(2022-01-02). Use a subclass of ImageShow.Viewer instead.", + DeprecationWarning, + ) + + _show(self, title=title, command=command) + + def split(self): + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + If you need only one band, :py:meth:`~PIL.Image.Image.getchannel` + method can be more convenient and faster. + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + ims = [self.copy()] + else: + ims = map(self._new, self.im.split()) + return tuple(ims) + + def getchannel(self, channel): + """ + Returns an image containing a single channel of the source image. + + :param channel: What channel to return. Could be index + (0 for "R" channel of "RGB") or channel name + ("A" for alpha channel of "RGBA"). + :returns: An image in "L" mode. + + .. versionadded:: 4.3.0 + """ + self.load() + + if isinstance(channel, str): + try: + channel = self.getbands().index(channel) + except ValueError as e: + raise ValueError(f'The image has no channel "{channel}"') from e + + return self._new(self.im.getband(channel)) + + def tell(self): + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + If defined, :attr:`~PIL.Image.Image.n_frames` refers to the + number of available frames. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail(self, size, resample=BICUBIC, reducing_gap=2.0): + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: Requested size. + :param resample: Optional resampling filter. This can be one + of :py:data:`PIL.Image.NEAREST`, :py:data:`PIL.Image.BOX`, + :py:data:`PIL.Image.BILINEAR`, :py:data:`PIL.Image.HAMMING`, + :py:data:`PIL.Image.BICUBIC` or :py:data:`PIL.Image.LANCZOS`. + If omitted, it defaults to :py:data:`PIL.Image.BICUBIC`. + (was :py:data:`PIL.Image.NEAREST` prior to version 2.5.0). + See: :ref:`concept-filters`. + :param reducing_gap: Apply optimization by resizing the image + in two steps. First, reducing the image by integer times + using :py:meth:`~PIL.Image.Image.reduce` or + :py:meth:`~PIL.Image.Image.draft` for JPEG images. + Second, resizing using regular resampling. The last step + changes size no less than by ``reducing_gap`` times. + ``reducing_gap`` may be None (no first step is performed) + or should be greater than 1.0. The bigger ``reducing_gap``, + the closer the result to the fair resampling. + The smaller ``reducing_gap``, the faster resizing. + With ``reducing_gap`` greater or equal to 3.0, the result is + indistinguishable from fair resampling in most cases. + The default value is 2.0 (very close to fair resampling + while still being faster in many cases). + :returns: None + """ + + x, y = map(math.floor, size) + if x >= self.width and y >= self.height: + return + + def round_aspect(number, key): + return max(min(math.floor(number), math.ceil(number), key=key), 1) + + # preserve aspect ratio + aspect = self.width / self.height + if x / y >= aspect: + x = round_aspect(y * aspect, key=lambda n: abs(aspect - n / y)) + else: + y = round_aspect( + x / aspect, key=lambda n: 0 if n == 0 else abs(aspect - x / n) + ) + size = (x, y) + + box = None + if reducing_gap is not None: + res = self.draft(None, (size[0] * reducing_gap, size[1] * reducing_gap)) + if res is not None: + box = res[1] + + if self.size != size: + im = self.resize(size, resample, box=box, reducing_gap=reducing_gap) + + self.im = im.im + self._size = size + self.mode = self.im.mode + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform( + self, size, method, data=None, resample=NEAREST, fill=1, fillcolor=None + ): + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size. + :param method: The transformation method. This is one of + :py:data:`PIL.Image.EXTENT` (cut out a rectangular subregion), + :py:data:`PIL.Image.AFFINE` (affine transform), + :py:data:`PIL.Image.PERSPECTIVE` (perspective transform), + :py:data:`PIL.Image.QUAD` (map a quadrilateral to a rectangle), or + :py:data:`PIL.Image.MESH` (map a number of source quadrilaterals + in one operation). + + It may also be an :py:class:`~PIL.Image.ImageTransformHandler` + object:: + + class Example(Image.ImageTransformHandler): + def transform(self, size, data, resample, fill=1): + # Return result + + It may also be an object with a ``method.getdata`` method + that returns a tuple supplying new ``method`` and ``data`` values:: + + class Example: + def getdata(self): + method = Image.EXTENT + data = (0, 0, 100, 100) + return method, data + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:data:`PIL.Image.NEAREST` (use nearest neighbour), + :py:data:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:data:`PIL.Image.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:data:`PIL.Image.NEAREST`. + See: :ref:`concept-filters`. + :param fill: If ``method`` is an + :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of + the arguments passed to it. Otherwise, it is unused. + :param fillcolor: Optional fill color for the area outside the + transform in the output image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode == "LA": + return ( + self.convert("La") + .transform(size, method, data, resample, fill, fillcolor) + .convert("LA") + ) + + if self.mode == "RGBA": + return ( + self.convert("RGBa") + .transform(size, method, data, resample, fill, fillcolor) + .convert("RGBA") + ) + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + + if data is None: + raise ValueError("missing method data") + + im = new(self.mode, size, fillcolor) + im.info = self.info.copy() + if method == MESH: + # list of quads + for box, quad in data: + im.__transformer(box, self, QUAD, quad, resample, fillcolor is None) + else: + im.__transformer( + (0, 0) + size, self, method, data, resample, fillcolor is None + ) + + return im + + def __transformer(self, box, image, method, data, resample=NEAREST, fill=1): + w = box[2] - box[0] + h = box[3] - box[1] + + if method == AFFINE: + data = data[0:6] + + elif method == EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = (x1 - x0) / w + ys = (y1 - y0) / h + method = AFFINE + data = (xs, 0, x0, 0, ys, y0) + + elif method == PERSPECTIVE: + data = data[0:8] + + elif method == QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[0:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = ( + x0, + (ne[0] - x0) * As, + (sw[0] - x0) * At, + (se[0] - sw[0] - ne[0] + x0) * As * At, + y0, + (ne[1] - y0) * As, + (sw[1] - y0) * At, + (se[1] - sw[1] - ne[1] + y0) * As * At, + ) + + else: + raise ValueError("unknown transformation method") + + if resample not in (NEAREST, BILINEAR, BICUBIC): + if resample in (BOX, HAMMING, LANCZOS): + message = { + BOX: "Image.BOX", + HAMMING: "Image.HAMMING", + LANCZOS: "Image.LANCZOS/Image.ANTIALIAS", + }[resample] + f" ({resample}) cannot be used." + else: + message = f"Unknown resampling filter ({resample})." + + filters = [ + "{} ({})".format(filter[1], filter[0]) + for filter in ( + (NEAREST, "Image.NEAREST"), + (BILINEAR, "Image.BILINEAR"), + (BICUBIC, "Image.BICUBIC"), + ) + ] + raise ValueError( + message + " Use " + ", ".join(filters[:-1]) + " or " + filters[-1] + ) + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = NEAREST + + self.im.transform2(box, image.im, method, data, resample, fill) + + def transpose(self, method): + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:data:`PIL.Image.FLIP_LEFT_RIGHT`, + :py:data:`PIL.Image.FLIP_TOP_BOTTOM`, :py:data:`PIL.Image.ROTATE_90`, + :py:data:`PIL.Image.ROTATE_180`, :py:data:`PIL.Image.ROTATE_270`, + :py:data:`PIL.Image.TRANSPOSE` or :py:data:`PIL.Image.TRANSVERSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + return self._new(self.im.effect_spread(distance)) + + def toqimage(self): + """Returns a QImage copy of this image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Abstract handlers. + + +class ImagePointHandler: + """ + Used as a mixin by point transforms + (for use with :py:meth:`~PIL.Image.Image.point`) + """ + + pass + + +class ImageTransformHandler: + """ + Used as a mixin by geometry transforms + (for use with :py:meth:`~PIL.Image.Image.transform`) + """ + + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + + +def _wedge(): + """Create greyscale wedge (for debugging only)""" + + return Image()._new(core.wedge("L")) + + +def _check_size(size): + """ + Common check to enforce type and sanity check on size tuples + + :param size: Should be a 2 tuple of (width, height) + :returns: True, or raises a ValueError + """ + + if not isinstance(size, (list, tuple)): + raise ValueError("Size must be a tuple") + if len(size) != 2: + raise ValueError("Size must be a tuple of length 2") + if size[0] < 0 or size[1] < 0: + raise ValueError("Width and height must be >= 0") + + return True + + +def new(mode, size, color=0): + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isinstance(color, str): + # css3-style specifier + + from . import ImageColor + + color = ImageColor.getcolor(color, mode) + + im = Image() + if mode == "P" and isinstance(color, (list, tuple)) and len(color) in [3, 4]: + # RGB or RGBA value for a P image + from . import ImagePalette + + im.palette = ImagePalette.ImagePalette() + color = im.palette.getcolor(color) + return im._new(core.fill(mode, size, color)) + + +def frombytes(mode, size, data, decoder_name="raw", *args): + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Decoder `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw" and args == (): + args = mode + + im = new(mode, size) + im.frombytes(data, decoder_name, args) + return im + + +def frombuffer(mode, size, data, decoder_name="raw", *args): + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load it. + + In the current version, the default parameters used for the "raw" decoder + differs from that used for :py:func:`~PIL.Image.frombytes`. This is a + bug, and will probably be fixed in a future release. The current release + issues a warning if you do this; to disable the warning, you should provide + the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + args = mode, 0, 1 + if args[0] in _MAPMODES: + im = new(mode, (1, 1)) + im = im._new(core.map_buffer(data, size, decoder_name, 0, args)) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +def fromarray(obj, mode=None): + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol). + + If ``obj`` is not contiguous, then the ``tobytes`` method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + If you have an image in NumPy:: + + from PIL import Image + import numpy as np + im = Image.open('hopper.jpg') + a = np.asarray(im) + + Then this can be used to convert it to a Pillow image:: + + im = Image.fromarray(a) + + :param obj: Object with array interface + :param mode: Mode to use (will be determined from type if None) + See: :ref:`concept-modes`. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr["shape"] + ndim = len(shape) + strides = arr.get("strides", None) + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr["typestr"] + except KeyError as e: + raise TypeError("Cannot handle this data type") from e + try: + mode, rawmode = _fromarray_typemap[typekey] + except KeyError as e: + raise TypeError("Cannot handle this data type: %s, %s" % typekey) from e + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + raise ValueError(f"Too many dimensions: {ndim} > {ndmax}.") + + size = 1 if ndim == 1 else shape[1], shape[0] + if strides is not None: + if hasattr(obj, "tobytes"): + obj = obj.tobytes() + else: + obj = obj.tostring() + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from . import ImageQt + + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + ((1, 1), "|b1"): ("1", "1;8"), + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "u2"): ("I", "I;16B"), + ((1, 1), "i2"): ("I", "I;16BS"), + ((1, 1), "u4"): ("I", "I;32B"), + ((1, 1), "i4"): ("I", "I;32BS"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 2), "|u1"): ("LA", "LA"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), +} + +# shortcuts +_fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I") +_fromarray_typemap[((1, 1), _ENDIAN + "f4")] = ("F", "F") + + +def _decompression_bomb_check(size): + if MAX_IMAGE_PIXELS is None: + return + + pixels = size[0] * size[1] + + if pixels > 2 * MAX_IMAGE_PIXELS: + raise DecompressionBombError( + f"Image size ({pixels} pixels) exceeds limit of {2 * MAX_IMAGE_PIXELS} " + "pixels, could be decompression bomb DOS attack." + ) + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + f"Image size ({pixels} pixels) exceeds limit of {MAX_IMAGE_PIXELS} pixels, " + "could be decompression bomb DOS attack.", + DecompressionBombWarning, + ) + + +def open(fp, mode="r", formats=None): + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. See :ref:`file-handling`. + + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement ``file.read``, + ``file.seek``, and ``file.tell`` methods, + and be opened in binary mode. + :param mode: The mode. If given, this argument must be "r". + :param formats: A list or tuple of formats to attempt to load the file in. + This can be used to restrict the set of formats checked. + Pass ``None`` to try all supported formats. You can print the set of + available formats by running ``python -m PIL`` or using + the :py:func:`PIL.features.pilinfo` function. + :returns: An :py:class:`~PIL.Image.Image` object. + :exception FileNotFoundError: If the file cannot be found. + :exception PIL.UnidentifiedImageError: If the image cannot be opened and + identified. + :exception ValueError: If the ``mode`` is not "r", or if a ``StringIO`` + instance is used for ``fp``. + :exception TypeError: If ``formats`` is not ``None``, a list or a tuple. + """ + + if mode != "r": + raise ValueError(f"bad mode {repr(mode)}") + elif isinstance(fp, io.StringIO): + raise ValueError( + "StringIO cannot be used to open an image. " + "Binary data must be used instead." + ) + + if formats is None: + formats = ID + elif not isinstance(formats, (list, tuple)): + raise TypeError("formats must be a list or tuple") + + exclusive_fp = False + filename = "" + if isinstance(fp, Path): + filename = str(fp.resolve()) + elif isPath(fp): + filename = fp + + if filename: + fp = builtins.open(filename, "rb") + exclusive_fp = True + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + exclusive_fp = True + + prefix = fp.read(16) + + preinit() + + accept_warnings = [] + + def _open_core(fp, filename, prefix, formats): + for i in formats: + if i not in OPEN: + init() + try: + factory, accept = OPEN[i] + result = not accept or accept(prefix) + if type(result) in [str, bytes]: + accept_warnings.append(result) + elif result: + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + # logger.debug("", exc_info=True) + continue + except BaseException: + if exclusive_fp: + fp.close() + raise + return None + + im = _open_core(fp, filename, prefix, formats) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix, formats) + + if im: + im._exclusive_fp = exclusive_fp + return im + + if exclusive_fp: + fp.close() + for message in accept_warnings: + warnings.warn(message) + raise UnidentifiedImageError( + "cannot identify image file %r" % (filename if filename else fp) + ) + + +# +# Image processing. + + +def alpha_composite(im1, im2): + """ + Alpha composite im2 over im1. + + :param im1: The first image. Must have mode RGBA. + :param im2: The second image. Must have mode RGBA, and the same size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1, im2, alpha): + """ + Creates a new image by interpolating between two input images, using + a constant alpha.:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1, image2, mask): + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode, bands): + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + raise ValueError("wrong number of bands") + for band in bands[1:]: + if band.mode != getmodetype(mode): + raise ValueError("mode mismatch") + if band.size != bands[0].size: + raise ValueError("size mismatch") + for band in bands: + band.load() + return bands[0]._new(core.merge(mode, *[b.im for b in bands])) + + +# -------------------------------------------------------------------- +# Plugin registry + + +def register_open(id, factory, accept=None): + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id, mimetype): + """ + Registers an image MIME type. This function should not be used + in application code. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save(id, driver): + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id, extension): + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +def register_extensions(id, extensions): + """ + Registers image extensions. This function should not be + used in application code. + + :param id: An image format identifier. + :param extensions: A list of extensions used for this format. + """ + for extension in extensions: + register_extension(id, extension) + + +def registered_extensions(): + """ + Returns a dictionary containing all file extensions belonging + to registered plugins + """ + if not EXTENSION: + init() + return EXTENSION + + +def register_decoder(name, decoder): + """ + Registers an image decoder. This function should not be + used in application code. + + :param name: The name of the decoder + :param decoder: A callable(mode, args) that returns an + ImageFile.PyDecoder object + + .. versionadded:: 4.1.0 + """ + DECODERS[name] = decoder + + +def register_encoder(name, encoder): + """ + Registers an image encoder. This function should not be + used in application code. + + :param name: The name of the encoder + :param encoder: A callable(mode, args) that returns an + ImageFile.PyEncoder object + + .. versionadded:: 4.1.0 + """ + ENCODERS[name] = encoder + + +# -------------------------------------------------------------------- +# Simple display support. + + +def _show(image, **options): + options["_internal_pillow"] = True + _showxv(image, **options) + + +def _showxv(image, title=None, **options): + from . import ImageShow + + if "_internal_pillow" in options: + del options["_internal_pillow"] + else: + warnings.warn( + "_showxv is deprecated and will be removed in Pillow 9 (2022-01-02). " + "Use Image.show instead.", + DeprecationWarning, + ) + ImageShow.show(image, title, **options) + + +# -------------------------------------------------------------------- +# Effects + + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y2). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + + +def linear_gradient(mode): + """ + Generate 256x256 linear gradient from black to white, top to bottom. + + :param mode: Input mode. + """ + return Image()._new(core.linear_gradient(mode)) + + +def radial_gradient(mode): + """ + Generate 256x256 radial gradient from black to white, centre to edge. + + :param mode: Input mode. + """ + return Image()._new(core.radial_gradient(mode)) + + +# -------------------------------------------------------------------- +# Resources + + +def _apply_env_variables(env=None): + if env is None: + env = os.environ + + for var_name, setter in [ + ("PILLOW_ALIGNMENT", core.set_alignment), + ("PILLOW_BLOCK_SIZE", core.set_block_size), + ("PILLOW_BLOCKS_MAX", core.set_blocks_max), + ]: + if var_name not in env: + continue + + var = env[var_name].lower() + + units = 1 + for postfix, mul in [("k", 1024), ("m", 1024 * 1024)]: + if var.endswith(postfix): + units = mul + var = var[: -len(postfix)] + + try: + var = int(var) * units + except ValueError: + warnings.warn(f"{var_name} is not int") + continue + + try: + setter(var) + except ValueError as e: + warnings.warn(f"{var_name}: {e}") + + +_apply_env_variables() +atexit.register(core.clear_cache) + + +class Exif(MutableMapping): + endian = "<" + + def __init__(self): + self._data = {} + self._ifds = {} + self._info = None + self._loaded_exif = None + + def _fixup(self, value): + try: + if len(value) == 1 and isinstance(value, tuple): + return value[0] + except Exception: + pass + return value + + def _fixup_dict(self, src_dict): + # Helper function + # returns a dict with any single item tuples/lists as individual values + return {k: self._fixup(v) for k, v in src_dict.items()} + + def _get_ifd_dict(self, tag): + try: + # an offset pointer to the location of the nested embedded IFD. + # It should be a long, but may be corrupted. + self.fp.seek(self[tag]) + except (KeyError, TypeError): + pass + else: + from . import TiffImagePlugin + + info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + info.load(self.fp) + return self._fixup_dict(info) + + def load(self, data): + # Extract EXIF information. This is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + if data == self._loaded_exif: + return + self._loaded_exif = data + self._data.clear() + self._ifds.clear() + self._info = None + if not data: + return + + if data.startswith(b"Exif\x00\x00"): + data = data[6:] + self.fp = io.BytesIO(data) + self.head = self.fp.read(8) + # process dictionary + from . import TiffImagePlugin + + self._info = TiffImagePlugin.ImageFileDirectory_v2(self.head) + self.endian = self._info._endian + self.fp.seek(self._info.next) + self._info.load(self.fp) + + # get EXIF extension + ifd = self._get_ifd_dict(0x8769) + if ifd: + self._data.update(ifd) + self._ifds[0x8769] = ifd + + def tobytes(self, offset=8): + from . import TiffImagePlugin + + if self.endian == "<": + head = b"II\x2A\x00\x08\x00\x00\x00" + else: + head = b"MM\x00\x2A\x00\x00\x00\x08" + ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head) + for tag, value in self.items(): + ifd[tag] = value + return b"Exif\x00\x00" + head + ifd.tobytes(offset) + + def get_ifd(self, tag): + if tag not in self._ifds and tag in self: + if tag in [0x8825, 0xA005]: + # gpsinfo, interop + self._ifds[tag] = self._get_ifd_dict(tag) + elif tag == 0x927C: # makernote + from .TiffImagePlugin import ImageFileDirectory_v2 + + if self[0x927C][:8] == b"FUJIFILM": + exif_data = self[0x927C] + ifd_offset = i32le(exif_data, 8) + ifd_data = exif_data[ifd_offset:] + + makernote = {} + for i in range(0, struct.unpack(" 4: + (offset,) = struct.unpack("H", ifd_data[:2])[0]): + ifd_tag, typ, count, data = struct.unpack( + ">HHL4s", ifd_data[i * 12 + 2 : (i + 1) * 12 + 2] + ) + if ifd_tag == 0x1101: + # CameraInfo + (offset,) = struct.unpack(">L", data) + self.fp.seek(offset) + + camerainfo = {"ModelID": self.fp.read(4)} + + self.fp.read(4) + # Seconds since 2000 + camerainfo["TimeStamp"] = i32le(self.fp.read(12)) + + self.fp.read(4) + camerainfo["InternalSerialNumber"] = self.fp.read(4) + + self.fp.read(12) + parallax = self.fp.read(4) + handler = ImageFileDirectory_v2._load_dispatch[ + TiffTags.FLOAT + ][1] + camerainfo["Parallax"] = handler( + ImageFileDirectory_v2(), parallax, False + ) + + self.fp.read(4) + camerainfo["Category"] = self.fp.read(2) + + makernote = {0x1101: dict(self._fixup_dict(camerainfo))} + self._ifds[0x927C] = makernote + return self._ifds.get(tag, {}) + + def __str__(self): + if self._info is not None: + # Load all keys into self._data + for tag in self._info.keys(): + self[tag] + + return str(self._data) + + def __len__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return len(keys) + + def __getitem__(self, tag): + if self._info is not None and tag not in self._data and tag in self._info: + self._data[tag] = self._fixup(self._info[tag]) + if tag == 0x8825: + self._data[tag] = self.get_ifd(tag) + del self._info[tag] + return self._data[tag] + + def __contains__(self, tag): + return tag in self._data or (self._info is not None and tag in self._info) + + def __setitem__(self, tag, value): + if self._info is not None and tag in self._info: + del self._info[tag] + self._data[tag] = value + + def __delitem__(self, tag): + if self._info is not None and tag in self._info: + del self._info[tag] + else: + del self._data[tag] + + def __iter__(self): + keys = set(self._data) + if self._info is not None: + keys.update(self._info) + return iter(keys) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageChops.py b/minor_project/lib/python3.6/site-packages/PIL/ImageChops.py new file mode 100644 index 0000000..61d3a29 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageChops.py @@ -0,0 +1,328 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +def constant(image, value): + """Fill a channel with a given grey level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image): + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image): + """ + Invert an image (channel). + + .. code-block:: python + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. + + .. code-block:: python + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the darker values. + + .. code-block:: python + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1, image2): + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. + + .. code-block:: python + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1, image2): + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. + + .. code-block:: python + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1, image2): + """ + Superimposes two inverted images on top of each other. + + .. code-block:: python + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def soft_light(image1, image2): + """ + Superimposes two images on top of each other using the Soft Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_soft_light(image2.im)) + + +def hard_light(image1, image2): + """ + Superimposes two images on top of each other using the Hard Light algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_hard_light(image2.im)) + + +def overlay(image1, image2): + """ + Superimposes two images on top of each other using the Overlay algorithm + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_overlay(image2.im)) + + +def add(image1, image2, scale=1.0, offset=0): + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract(image1, image2, scale=1.0, offset=0): + """ + Subtracts two images, dividing the result by scale and adding the offset. + If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1, image2): + """Add two images, without clipping the result. + + .. code-block:: python + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1, image2): + """Subtract two images, without clipping the result. + + .. code-block:: python + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1, image2): + """Logical AND between two images. + + Both of the images must have mode "1". If you would like to perform a + logical AND on an image with a mode other than "1", try + :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask + as the second image. + + .. code-block:: python + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1, image2): + """Logical OR between two images. + + Both of the images must have mode "1". + + .. code-block:: python + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1, image2): + """Logical XOR between two images. + + Both of the images must have mode "1". + + .. code-block:: python + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1, image2, alpha): + """Blend images using constant transparency weight. Alias for + :py:func:`PIL.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite(image1, image2, mask): + """Create composite using transparency mask. Alias for + :py:func:`PIL.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image, xoffset, yoffset=None): + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If ``yoffset`` is omitted, it + is assumed to be equal to ``xoffset``. + + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageCms.py b/minor_project/lib/python3.6/site-packages/PIL/ImageCms.py new file mode 100644 index 0000000..8c4740d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageCms.py @@ -0,0 +1,999 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color management support, based on Kevin Cazabon's PyCMS +# library. + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. + +import sys + +from PIL import Image + +try: + from PIL import _imagingcms +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from ._util import deferred_error + + _imagingcms = deferred_error(ex) + +DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + http://www.cazabon.com + + pyCMS home page: http://www.cazabon.com/pyCMS + littleCMS home page: http://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +VERSION = "1.0.0 pil" + +# --------------------------------------------------------------------. + +core = _imagingcms + +# +# intent/direction values + +INTENT_PERCEPTUAL = 0 +INTENT_RELATIVE_COLORIMETRIC = 1 +INTENT_SATURATION = 2 +INTENT_ABSOLUTE_COLORIMETRIC = 3 + +DIRECTION_INPUT = 0 +DIRECTION_OUTPUT = 1 +DIRECTION_PROOF = 2 + +# +# flags + +FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16, # Gridpoints +} + +_MAX_FLAG = 0 +for flag in FLAGS.values(): + if isinstance(flag, int): + _MAX_FLAG = _MAX_FLAG | flag + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + + +class ImageCmsProfile: + def __init__(self, profile): + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isinstance(profile, str): + if sys.platform == "win32": + profile_bytes_path = profile.encode() + try: + profile_bytes_path.decode("ascii") + except UnicodeDecodeError: + with open(profile, "rb") as f: + self._set(core.profile_frombytes(f.read())) + return + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + elif isinstance(profile, _imagingcms.CmsProfile): + self._set(profile) + else: + raise TypeError("Invalid type for Profile") + + def _set(self, profile, filename=None): + self.profile = profile + self.filename = filename + if profile: + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + else: + self.product_name = None + self.product_info = None + + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + + """ + Transform. This can be used with the procedural API, or with the standard + :py:func:`~PIL.Image.Image.point` method. + + Will return the output profile in the ``output.info['icc_profile']``. + """ + + def __init__( + self, + input, + output, + input_mode, + output_mode, + intent=INTENT_PERCEPTUAL, + proof=None, + proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=0, + ): + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, input_mode, output_mode, intent, flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, + output.profile, + proof.profile, + input_mode, + output_mode, + intent, + proof_intent, + flags, + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im): + return self.apply(im) + + def apply(self, im, imOut=None): + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info["icc_profile"] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im): + im.load() + if im.mode != self.output_mode: + raise ValueError("mode mismatch") # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info["icc_profile"] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle=None): + """ + (experimental) Fetches the profile for the current display device. + + :returns: ``None`` if the profile is not known. + """ + + if sys.platform != "win32": + return None + + from PIL import ImageWin + + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) + else: + profile = core.get_display_profile_win32(handle or 0) + if profile is None: + return None + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + + +class PyCMSError(Exception): + + """(pyCMS) Exception class. + This is used for all errors in the pyCMS API.""" + + pass + + +def profileToProfile( + im, + inputProfile, + outputProfile, + renderingIntent=INTENT_PERCEPTUAL, + outputMode=None, + inPlace=False, + flags=0, +): + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + ``inputProfile`` to ``outputProfile``. + + If the input or output profiles specified are not valid filenames, a + :exc:`PyCMSError` will be raised. If ``inPlace`` is ``True`` and + ``outputMode != im.mode``, a :exc:`PyCMSError` will be raised. + If an error occurs during application of the profiles, + a :exc:`PyCMSError` will be raised. + If ``outputMode`` is not a mode supported by the ``outputProfile`` (or by pyCMS), + a :exc:`PyCMSError` will be raised. + + This function applies an ICC transformation to im from ``inputProfile``'s + color space to ``outputProfile``'s color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + ``outputMode`` can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open :py:class:`~PIL.Image.Image` object (i.e. Image.new(...) + or Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean. If ``True``, the original image is modified in-place, + and ``None`` is returned. If ``False`` (default), a new + :py:class:`~PIL.Image.Image` object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new :py:class:`~PIL.Image.Image` object, depending on + the value of ``inPlace`` + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, + outputProfile, + im.mode, + outputMode, + renderingIntent, + flags=flags, + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + return imOut + + +def getOpenProfile(profileFilename): + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If ``profileFilename`` is not a valid filename for an ICC profile, + a :exc:`PyCMSError` will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def buildTransform( + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent=INTENT_PERCEPTUAL, + flags=0, +): + """ + (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the + ``outputProfile``. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + :exc:`PyCMSError` will be raised. If an error occurs during creation + of the transform, a :exc:`PyCMSError` will be raised. + + If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` + (or by pyCMS), a :exc:`PyCMSError` will be raised. + + This function builds and returns an ICC transform from the ``inputProfile`` + to the ``outputProfile`` using the ``renderingIntent`` to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in ``inMode`` to images that are in ``outMode`` color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the ``inMode`` and ``outMode`` attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags + ) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def buildProofTransform( + inputProfile, + outputProfile, + proofProfile, + inMode, + outMode, + renderingIntent=INTENT_PERCEPTUAL, + proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"], +): + """ + (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the + ``outputProfile``, but tries to simulate the result that would be + obtained on the ``proofProfile`` device. + + If the input, output, or proof profiles specified are not valid + filenames, a :exc:`PyCMSError` will be raised. + + If an error occurs during creation of the transform, + a :exc:`PyCMSError` will be raised. + + If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile`` + (or by pyCMS), a :exc:`PyCMSError` will be raised. + + This function builds and returns an ICC transform from the ``inputProfile`` + to the ``outputProfile``, but tries to simulate the result that would be + obtained on the ``proofProfile`` device using ``renderingIntent`` and + ``proofRenderingIntent`` to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in ``inMode`` to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the ``proofProfile`` device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent + you wish to use for proof->output transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError("flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, + outputProfile, + inMode, + outMode, + renderingIntent, + proofProfile, + proofRenderingIntent, + flags, + ) + except (OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform(im, transform, inPlace=False): + """ + (pyCMS) Applies a transform to a given image. + + If ``im.mode != transform.inMode``, a :exc:`PyCMSError` is raised. + + If ``inPlace`` is ``True`` and ``transform.inMode != transform.outMode``, a + :exc:`PyCMSError` is raised. + + If ``im.mode``, ``transform.inMode`` or ``transform.outMode`` is not + supported by pyCMSdll or the profiles you used for the transform, a + :exc:`PyCMSError` is raised. + + If an error occurs while the transform is being applied, + a :exc:`PyCMSError` is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set ``inPlace`` to ``True``. This can only be done if + ``transform.inMode`` and ``transform.outMode`` are the same, because we can't + change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new :py:class:`~PIL.Image.Image` + object of the same dimensions in mode ``transform.outMode``. + + :param im: An :py:class:`~PIL.Image.Image` object, and im.mode must be the same + as the ``inMode`` supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool. If ``True``, ``im`` is modified in place and ``None`` is + returned, if ``False``, a new :py:class:`~PIL.Image.Image` object with the + transform applied is returned (and ``im`` is not changed). The default is + ``False``. + :returns: Either ``None``, or a new :py:class:`~PIL.Image.Image` object, + depending on the value of ``inPlace``. The profile will be returned in + the image's ``info['icc_profile']``. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) from v + + return imOut + + +def createProfile(colorSpace, colorTemp=-1): + """ + (pyCMS) Creates a profile. + + If colorSpace not in ``["LAB", "XYZ", "sRGB"]``, + a :exc:`PyCMSError` is raised. + + If using LAB and ``colorTemp`` is not a positive integer, + a :exc:`PyCMSError` is raised. + + If an error occurs while creating the profile, + a :exc:`PyCMSError` is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive integer for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + raise PyCMSError( + f"Color space not supported for on-the-fly profile creation ({colorSpace})" + ) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except (TypeError, ValueError) as e: + raise PyCMSError( + f'Color temperature must be numeric, "{colorTemp}" not valid' + ) from e + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileName(profile): + """ + + (pyCMS) Gets the internal product name for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, + a :exc:`PyCMSError` is raised If an error occurs while trying + to obtain the name tag, a :exc:`PyCMSError` is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.model + manufacturer = profile.profile.manufacturer + + if not (model or manufacturer): + return (profile.profile.profile_description or "") + "\n" + if not manufacturer or len(model) > 30: + return model + "\n" + return f"{model} - {manufacturer}\n" + + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileInfo(profile): + """ + (pyCMS) Gets the internal product information for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, + a :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the info tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.profile_description + cpright = profile.profile.copyright + arr = [] + for elt in (description, cpright): + if elt: + arr.append(elt) + return "\r\n\r\n".join(arr) + "\r\n\r\n" + + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileCopyright(profile): + """ + (pyCMS) Gets the copyright for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the copyright tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.copyright or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileManufacturer(profile): + """ + (pyCMS) Gets the manufacturer for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.manufacturer or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileModel(profile): + """ + (pyCMS) Gets the model for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the model tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.model or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getProfileDescription(profile): + """ + (pyCMS) Gets the description for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the description tag, + a :exc:`PyCMSError` is raised. + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return (profile.profile.profile_description or "") + "\n" + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def getDefaultIntent(profile): + """ + (pyCMS) Gets the default intent name for the given profile. + + If ``profile`` isn't a valid CmsProfile object or filename to a profile, a + :exc:`PyCMSError` is raised. + + If an error occurs while trying to obtain the default intent, a + :exc:`PyCMSError` is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def isIntentSupported(profile, intent, direction): + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + ``intent`` with ``profile``, and that ``profile`` can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents, so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential :exc:`PyCMSError` that will occur if they don't + support the modes you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for + input, output, or proof + + INPUT = 0 (or use ImageCms.DIRECTION_INPUT) + OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT) + PROOF = 2 (or use ImageCms.DIRECTION_PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, OSError, TypeError, ValueError) as v: + raise PyCMSError(v) from v + + +def versions(): + """ + (pyCMS) Fetches versions. + """ + + return (VERSION, core.littlecms_version, sys.version.split()[0], Image.__version__) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageColor.py b/minor_project/lib/python3.6/site-packages/PIL/ImageColor.py new file mode 100644 index 0000000..9091174 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageColor.py @@ -0,0 +1,300 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re + +from . import Image + + +def getrgb(color): + """ + Convert a color string to an RGB tuple. If the string cannot be parsed, + this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + color = color.lower() + + rgb = colormap.get(color, None) + if rgb: + if isinstance(rgb, tuple): + return rgb + colormap[color] = rgb = getrgb(rgb) + return rgb + + # check for known string formats + if re.match("#[a-f0-9]{3}$", color): + return (int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16)) + + if re.match("#[a-f0-9]{4}$", color): + return ( + int(color[1] * 2, 16), + int(color[2] * 2, 16), + int(color[3] * 2, 16), + int(color[4] * 2, 16), + ) + + if re.match("#[a-f0-9]{6}$", color): + return (int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)) + + if re.match("#[a-f0-9]{8}$", color): + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16), + int(color[7:9], 16), + ) + + m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return (int(m.group(1)), int(m.group(2)), int(m.group(3))) + + m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5), + ) + + m = re.match( + r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) + if m: + from colorsys import hls_to_rgb + + rgb = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5), + ) + + m = re.match( + r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color + ) + if m: + from colorsys import hsv_to_rgb + + rgb = hsv_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(2)) / 100.0, + float(m.group(3)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5), + ) + + m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return (int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))) + raise ValueError(f"unknown color specifier: {repr(color)}") + + +def getcolor(color, mode): + """ + Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a + greyscale value if the mode is not color or a palette image. If the string + cannot be parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(graylevel [, alpha]) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + color, alpha = getrgb(color), 255 + if len(color) == 4: + color, alpha = color[0:3], color[3] + + if Image.getmodebase(mode) == "L": + r, g, b = color + # ITU-R Recommendation 601-2 for nonlinear RGB + # scaled to 24 bits to match the convert's implementation. + color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16 + if mode[-1] == "A": + return (color, alpha) + else: + if mode[-1] == "A": + return color + (alpha,) + return color + + +colormap = { + # X11 colour table from https://drafts.csswg.org/css-color-4/, with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "rebeccapurple": "#663399", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageDraw.py b/minor_project/lib/python3.6/site-packages/PIL/ImageDraw.py new file mode 100644 index 0000000..b823be9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageDraw.py @@ -0,0 +1,898 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import math +import numbers + +from . import Image, ImageColor + +""" +A simple 2D drawing interface for PIL images. +

+Application code should use the Draw factory, instead of +directly. +""" + + +class ImageDraw: + def __init__(self, im, mode=None): + """ + Create a drawing instance. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1) + else: + self.ink = self.draw.draw_ink(-1) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = 0 + self.font = None + + def getfont(self): + """ + Get the current default font. + + :returns: An image font.""" + if not self.font: + # FIXME: should add a font repository + from . import ImageFont + + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isinstance(ink, str): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink) + ink = self.draw.draw_ink(ink) + if fill is not None: + if isinstance(fill, str): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill) + fill = self.draw.draw_ink(fill) + return ink, fill + + def arc(self, xy, start, end, fill=None, width=1): + """Draw an arc.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink, width) + + def bitmap(self, xy, bitmap, fill=None): + """Draw a bitmap.""" + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + def chord(self, xy, start, end, fill=None, outline=None, width=1): + """Draw a chord.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_chord(xy, start, end, ink, 0, width) + + def ellipse(self, xy, fill=None, outline=None, width=1): + """Draw an ellipse.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_ellipse(xy, ink, 0, width) + + def line(self, xy, fill=None, width=0, joint=None): + """Draw a line, or a connected sequence of line segments.""" + ink = self._getink(fill)[0] + if ink is not None: + self.draw.draw_lines(xy, ink, width) + if joint == "curve" and width > 4: + if not isinstance(xy[0], (list, tuple)): + xy = [tuple(xy[i : i + 2]) for i in range(0, len(xy), 2)] + for i in range(1, len(xy) - 1): + point = xy[i] + angles = [ + math.degrees(math.atan2(end[0] - start[0], start[1] - end[1])) + % 360 + for start, end in ((xy[i - 1], point), (point, xy[i + 1])) + ] + if angles[0] == angles[1]: + # This is a straight line, so no joint is required + continue + + def coord_at_angle(coord, angle): + x, y = coord + angle -= 90 + distance = width / 2 - 1 + return tuple( + [ + p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) + for p, p_d in ( + (x, distance * math.cos(math.radians(angle))), + (y, distance * math.sin(math.radians(angle))), + ) + ] + ) + + flipped = ( + angles[1] > angles[0] and angles[1] - 180 > angles[0] + ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0]) + coords = [ + (point[0] - width / 2 + 1, point[1] - width / 2 + 1), + (point[0] + width / 2 - 1, point[1] + width / 2 - 1), + ] + if flipped: + start, end = (angles[1] + 90, angles[0] + 90) + else: + start, end = (angles[0] - 90, angles[1] - 90) + self.pieslice(coords, start - 90, end - 90, fill) + + if width > 8: + # Cover potential gaps between the line and the joint + if flipped: + gapCoords = [ + coord_at_angle(point, angles[0] + 90), + point, + coord_at_angle(point, angles[1] + 90), + ] + else: + gapCoords = [ + coord_at_angle(point, angles[0] - 90), + point, + coord_at_angle(point, angles[1] - 90), + ] + self.line(gapCoords, fill, width=3) + + def shape(self, shape, fill=None, outline=None): + """(Experimental) Draw a shape.""" + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_outline(shape, ink, 0) + + def pieslice(self, xy, start, end, fill=None, outline=None, width=1): + """Draw a pieslice.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_pieslice(xy, start, end, ink, 0, width) + + def point(self, xy, fill=None): + """Draw one or more individual pixels.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + def polygon(self, xy, fill=None, outline=None): + """Draw a polygon.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_polygon(xy, ink, 0) + + def regular_polygon( + self, bounding_circle, n_sides, rotation=0, fill=None, outline=None + ): + """Draw a regular polygon.""" + xy = _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation) + self.polygon(xy, fill, outline) + + def rectangle(self, xy, fill=None, outline=None, width=1): + """Draw a rectangle.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None and ink != fill and width != 0: + self.draw.draw_rectangle(xy, ink, 0, width) + + def _multiline_check(self, text): + """Draw text.""" + split_character = "\n" if isinstance(text, str) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, str) else b"\n" + + return text.split(split_character) + + def text( + self, + xy, + text, + fill=None, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + embedded_color=False, + *args, + **kwargs, + ): + if self._multiline_check(text): + return self.multiline_text( + xy, + text, + fill, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + stroke_fill, + embedded_color, + ) + + if embedded_color and self.mode not in ("RGB", "RGBA"): + raise ValueError("Embedded color supported only in RGB and RGBA modes") + + if font is None: + font = self.getfont() + + def getink(fill): + ink, fill = self._getink(fill) + if ink is None: + return fill + return ink + + def draw_text(ink, stroke_width=0, stroke_offset=None): + mode = self.fontmode + if stroke_width == 0 and embedded_color: + mode = "RGBA" + coord = xy + try: + mask, offset = font.getmask2( + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + anchor=anchor, + ink=ink, + *args, + **kwargs, + ) + coord = coord[0] + offset[0], coord[1] + offset[1] + except AttributeError: + try: + mask = font.getmask( + text, + mode, + direction, + features, + language, + stroke_width, + anchor, + ink, + *args, + **kwargs, + ) + except TypeError: + mask = font.getmask(text) + if stroke_offset: + coord = coord[0] + stroke_offset[0], coord[1] + stroke_offset[1] + if mode == "RGBA": + # font.getmask2(mode="RGBA") returns color in RGB bands and mask in A + # extract mask and set text alpha + color, mask = mask, mask.getband(3) + color.fillband(3, (ink >> 24) & 0xFF) + coord2 = coord[0] + mask.size[0], coord[1] + mask.size[1] + self.im.paste(color, coord + coord2, mask) + else: + self.draw.draw_bitmap(coord, mask, ink) + + ink = getink(fill) + if ink is not None: + stroke_ink = None + if stroke_width: + stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink + + if stroke_ink is not None: + # Draw stroked text + draw_text(stroke_ink, stroke_width) + + # Draw normal text + draw_text(ink, 0) + else: + # Only draw normal text + draw_text(ink) + + def multiline_text( + self, + xy, + text, + fill=None, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + stroke_fill=None, + embedded_color=False, + ): + if direction == "ttb": + raise ValueError("ttb direction is unsupported for multiline text") + + if anchor is None: + anchor = "la" + elif len(anchor) != 2: + raise ValueError("anchor must be a 2 character string") + elif anchor[1] in "tb": + raise ValueError("anchor not supported for multiline text") + + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = ( + self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing + ) + for line in lines: + line_width = self.textlength( + line, font, direction=direction, features=features, language=language + ) + widths.append(line_width) + max_width = max(max_width, line_width) + + top = xy[1] + if anchor[1] == "m": + top -= (len(lines) - 1) * line_spacing / 2.0 + elif anchor[1] == "d": + top -= (len(lines) - 1) * line_spacing + + for idx, line in enumerate(lines): + left = xy[0] + width_difference = max_width - widths[idx] + + # first align left by anchor + if anchor[0] == "m": + left -= width_difference / 2.0 + elif anchor[0] == "r": + left -= width_difference + + # then align by align parameter + if align == "left": + pass + elif align == "center": + left += width_difference / 2.0 + elif align == "right": + left += width_difference + else: + raise ValueError('align must be "left", "center" or "right"') + + self.text( + (left, top), + line, + fill, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + stroke_fill=stroke_fill, + embedded_color=embedded_color, + ) + top += line_spacing + + def textsize( + self, + text, + font=None, + spacing=4, + direction=None, + features=None, + language=None, + stroke_width=0, + ): + """Get the size of a given string, in pixels.""" + if self._multiline_check(text): + return self.multiline_textsize( + text, font, spacing, direction, features, language, stroke_width + ) + + if font is None: + font = self.getfont() + return font.getsize(text, direction, features, language, stroke_width) + + def multiline_textsize( + self, + text, + font=None, + spacing=4, + direction=None, + features=None, + language=None, + stroke_width=0, + ): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = ( + self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing + ) + for line in lines: + line_width, line_height = self.textsize( + line, font, spacing, direction, features, language, stroke_width + ) + max_width = max(max_width, line_width) + return max_width, len(lines) * line_spacing - spacing + + def textlength( + self, + text, + font=None, + direction=None, + features=None, + language=None, + embedded_color=False, + ): + """Get the length of a given string, in pixels with 1/64 precision.""" + if self._multiline_check(text): + raise ValueError("can't measure length of multiline text") + if embedded_color and self.mode not in ("RGB", "RGBA"): + raise ValueError("Embedded color supported only in RGB and RGBA modes") + + if font is None: + font = self.getfont() + mode = "RGBA" if embedded_color else self.fontmode + try: + return font.getlength(text, mode, direction, features, language) + except AttributeError: + size = self.textsize( + text, font, direction=direction, features=features, language=language + ) + if direction == "ttb": + return size[1] + return size[0] + + def textbbox( + self, + xy, + text, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + embedded_color=False, + ): + """Get the bounding box of a given string, in pixels.""" + if embedded_color and self.mode not in ("RGB", "RGBA"): + raise ValueError("Embedded color supported only in RGB and RGBA modes") + + if self._multiline_check(text): + return self.multiline_textbbox( + xy, + text, + font, + anchor, + spacing, + align, + direction, + features, + language, + stroke_width, + embedded_color, + ) + + if font is None: + font = self.getfont() + mode = "RGBA" if embedded_color else self.fontmode + bbox = font.getbbox( + text, mode, direction, features, language, stroke_width, anchor + ) + return bbox[0] + xy[0], bbox[1] + xy[1], bbox[2] + xy[0], bbox[3] + xy[1] + + def multiline_textbbox( + self, + xy, + text, + font=None, + anchor=None, + spacing=4, + align="left", + direction=None, + features=None, + language=None, + stroke_width=0, + embedded_color=False, + ): + if direction == "ttb": + raise ValueError("ttb direction is unsupported for multiline text") + + if anchor is None: + anchor = "la" + elif len(anchor) != 2: + raise ValueError("anchor must be a 2 character string") + elif anchor[1] in "tb": + raise ValueError("anchor not supported for multiline text") + + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = ( + self.textsize("A", font=font, stroke_width=stroke_width)[1] + spacing + ) + for line in lines: + line_width = self.textlength( + line, + font, + direction=direction, + features=features, + language=language, + embedded_color=embedded_color, + ) + widths.append(line_width) + max_width = max(max_width, line_width) + + top = xy[1] + if anchor[1] == "m": + top -= (len(lines) - 1) * line_spacing / 2.0 + elif anchor[1] == "d": + top -= (len(lines) - 1) * line_spacing + + bbox = None + + for idx, line in enumerate(lines): + left = xy[0] + width_difference = max_width - widths[idx] + + # first align left by anchor + if anchor[0] == "m": + left -= width_difference / 2.0 + elif anchor[0] == "r": + left -= width_difference + + # then align by align parameter + if align == "left": + pass + elif align == "center": + left += width_difference / 2.0 + elif align == "right": + left += width_difference + else: + raise ValueError('align must be "left", "center" or "right"') + + bbox_line = self.textbbox( + (left, top), + line, + font, + anchor, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + embedded_color=embedded_color, + ) + if bbox is None: + bbox = bbox_line + else: + bbox = ( + min(bbox[0], bbox_line[0]), + min(bbox[1], bbox_line[1]), + max(bbox[2], bbox_line[2]), + max(bbox[3], bbox_line[3]), + ) + + top += line_spacing + + if bbox is None: + return xy[0], xy[1], xy[0], xy[1] + return bbox + + +def Draw(im, mode=None): + """ + A simple 2D drawing interface for PIL images. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +def getdraw(im=None, hints=None): + """ + (Experimental) A more advanced 2D drawing interface for PIL images, + based on the WCK interface. + + :param im: The image to draw in. + :param hints: An optional list of hints. + :returns: A (drawing context, drawing resource factory) tuple. + """ + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from . import _imagingagg as handler + except ImportError: + pass + if handler is None: + from . import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +def floodfill(image, xy, value, border=None, thresh=0): + """ + (experimental) Fills a bounded region with a given color. + + :param image: Target image. + :param xy: Seed position (a 2-item coordinate tuple). See + :ref:`coordinate-system`. + :param value: Fill color. + :param border: Optional border value. If given, the region consists of + pixels with a color different from the border color. If not given, + the region consists of pixels having the same color as the seed + pixel. + :param thresh: Optional threshold value which specifies a maximum + tolerable difference of a pixel value from the 'background' in + order for it to be replaced. Useful for filling regions of + non-homogeneous, but similar, colors. + """ + # based on an implementation by Eric S. Raymond + # amended by yo1995 @20180806 + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if _color_diff(value, background) <= thresh: + return # seed point already has fill color + pixel[x, y] = value + except (ValueError, IndexError): + return # seed point outside image + edge = {(x, y)} + # use a set to keep record of current and previous edge pixels + # to reduce memory consumption + full_edge = set() + while edge: + new_edge = set() + for (x, y) in edge: # 4 adjacent method + for (s, t) in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)): + # If already processed, or if a coordinate is negative, skip + if (s, t) in full_edge or s < 0 or t < 0: + continue + try: + p = pixel[s, t] + except (ValueError, IndexError): + pass + else: + full_edge.add((s, t)) + if border is None: + fill = _color_diff(p, background) <= thresh + else: + fill = p != value and p != border + if fill: + pixel[s, t] = value + new_edge.add((s, t)) + full_edge = edge # discard pixels processed + edge = new_edge + + +def _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation): + """ + Generate a list of vertices for a 2D regular polygon. + + :param bounding_circle: The bounding circle is a tuple defined + by a point and radius. The polygon is inscribed in this circle. + (e.g. ``bounding_circle=(x, y, r)`` or ``((x, y), r)``) + :param n_sides: Number of sides + (e.g. ``n_sides=3`` for a triangle, ``6`` for a hexagon) + :param rotation: Apply an arbitrary rotation to the polygon + (e.g. ``rotation=90``, applies a 90 degree rotation) + :return: List of regular polygon vertices + (e.g. ``[(25, 50), (50, 50), (50, 25), (25, 25)]``) + + How are the vertices computed? + 1. Compute the following variables + - theta: Angle between the apothem & the nearest polygon vertex + - side_length: Length of each polygon edge + - centroid: Center of bounding circle (1st, 2nd elements of bounding_circle) + - polygon_radius: Polygon radius (last element of bounding_circle) + - angles: Location of each polygon vertex in polar grid + (e.g. A square with 0 degree rotation => [225.0, 315.0, 45.0, 135.0]) + + 2. For each angle in angles, get the polygon vertex at that angle + The vertex is computed using the equation below. + X= xcos(φ) + ysin(φ) + Y= −xsin(φ) + ycos(φ) + + Note: + φ = angle in degrees + x = 0 + y = polygon_radius + + The formula above assumes rotation around the origin. + In our case, we are rotating around the centroid. + To account for this, we use the formula below + X = xcos(φ) + ysin(φ) + centroid_x + Y = −xsin(φ) + ycos(φ) + centroid_y + """ + # 1. Error Handling + # 1.1 Check `n_sides` has an appropriate value + if not isinstance(n_sides, int): + raise TypeError("n_sides should be an int") + if n_sides < 3: + raise ValueError("n_sides should be an int > 2") + + # 1.2 Check `bounding_circle` has an appropriate value + if not isinstance(bounding_circle, (list, tuple)): + raise TypeError("bounding_circle should be a tuple") + + if len(bounding_circle) == 3: + *centroid, polygon_radius = bounding_circle + elif len(bounding_circle) == 2: + centroid, polygon_radius = bounding_circle + else: + raise ValueError( + "bounding_circle should contain 2D coordinates " + "and a radius (e.g. (x, y, r) or ((x, y), r) )" + ) + + if not all(isinstance(i, (int, float)) for i in (*centroid, polygon_radius)): + raise ValueError("bounding_circle should only contain numeric data") + + if not len(centroid) == 2: + raise ValueError( + "bounding_circle centre should contain 2D coordinates (e.g. (x, y))" + ) + + if polygon_radius <= 0: + raise ValueError("bounding_circle radius should be > 0") + + # 1.3 Check `rotation` has an appropriate value + if not isinstance(rotation, (int, float)): + raise ValueError("rotation should be an int or float") + + # 2. Define Helper Functions + def _apply_rotation(point, degrees, centroid): + return ( + round( + point[0] * math.cos(math.radians(360 - degrees)) + - point[1] * math.sin(math.radians(360 - degrees)) + + centroid[0], + 2, + ), + round( + point[1] * math.cos(math.radians(360 - degrees)) + + point[0] * math.sin(math.radians(360 - degrees)) + + centroid[1], + 2, + ), + ) + + def _compute_polygon_vertex(centroid, polygon_radius, angle): + start_point = [polygon_radius, 0] + return _apply_rotation(start_point, angle, centroid) + + def _get_angles(n_sides, rotation): + angles = [] + degrees = 360 / n_sides + # Start with the bottom left polygon vertex + current_angle = (270 - 0.5 * degrees) + rotation + for _ in range(0, n_sides): + angles.append(current_angle) + current_angle += degrees + if current_angle > 360: + current_angle -= 360 + return angles + + # 3. Variable Declarations + angles = _get_angles(n_sides, rotation) + + # 4. Compute Vertices + return [ + _compute_polygon_vertex(centroid, polygon_radius, angle) for angle in angles + ] + + +def _color_diff(color1, color2): + """ + Uses 1-norm distance to calculate difference between two values. + """ + if isinstance(color2, tuple): + return sum([abs(color1[i] - color2[i]) for i in range(0, len(color2))]) + else: + return abs(color1 - color2) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageDraw2.py b/minor_project/lib/python3.6/site-packages/PIL/ImageDraw2.py new file mode 100644 index 0000000..1f63110 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageDraw2.py @@ -0,0 +1,179 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +""" +(Experimental) WCK-style drawing interface operations + +.. seealso:: :py:mod:`PIL.ImageDraw` +""" + + +from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath + + +class Pen: + """Stores an outline color and width.""" + + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush: + """Stores a fill color""" + + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font: + """Stores a TrueType font and color""" + + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw: + """ + (Experimental) WCK-style drawing interface + """ + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + """Sets a transformation offset.""" + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + """ + Draws an arc (a portion of a circle outline) between the start and end + angles, inside the given bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc` + """ + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + """ + Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points + with a straight line. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord` + """ + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + """ + Draws an ellipse inside the given bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse` + """ + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + """ + Draws a line between the coordinates in the ``xy`` list. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line` + """ + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + """ + Same as arc, but also draws straight lines between the end points and the + center of the bounding box. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice` + """ + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + """ + Draws a polygon. + + The polygon outline consists of straight lines between the given + coordinates, plus a straight line between the last and the first + coordinate. + + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon` + """ + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + """ + Draws a rectangle. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle` + """ + self.render("rectangle", xy, *options) + + def text(self, xy, text, font): + """ + Draws the string at the given position. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text` + """ + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + """ + Return the size of the given string, in pixels. + + .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textsize` + """ + return self.draw.textsize(text, font=font.font) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageEnhance.py b/minor_project/lib/python3.6/site-packages/PIL/ImageEnhance.py new file mode 100644 index 0000000..3b79d5c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageEnhance.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFilter, ImageStat + + +class _Enhance: + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + + def __init__(self, image): + self.image = image + self.intermediate_mode = "L" + if "A" in image.getbands(): + self.intermediate_mode = "LA" + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if "A" in image.getbands(): + self.degenerate.putalpha(image.getchannel("A")) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageFile.py b/minor_project/lib/python3.6/site-packages/PIL/ImageFile.py new file mode 100644 index 0000000..f2a55cb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageFile.py @@ -0,0 +1,697 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import io +import struct +import sys +import warnings + +from . import Image +from ._util import isPath + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024 * 1024 + +LOAD_TRUNCATED_IMAGES = False +"""Whether or not to load truncated image files. User code may change this.""" + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error", +} +"""Dict of known error codes returned from :meth:`.PyDecoder.decode`.""" + + +# +# -------------------------------------------------------------------- +# Helpers + + +def raise_oserror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = f"decoder error {error}" + raise OSError(message + " when reading image file") + + +def raise_ioerror(error): + warnings.warn( + "raise_ioerror is deprecated and will be removed in Pillow 9 (2022-01-02). " + "Use raise_oserror instead.", + DeprecationWarning, + ) + return raise_oserror(error) + + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + + +class ImageFile(Image.Image): + """Base class for image file format handlers.""" + + def __init__(self, fp=None, filename=None): + super().__init__() + + self._min_frame = 0 + + self.custom_mimetype = None + + self.tile = None + """ A list of tile descriptors, or ``None`` """ + + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if isPath(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + self._exclusive_fp = True + else: + # stream + self.fp = fp + self.filename = filename + # can be overridden + self._exclusive_fp = None + + try: + try: + self._open() + except ( + IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error, + ) as v: + raise SyntaxError(v) from v + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + except BaseException: + # close the file only if we have opened it this constructor + if self._exclusive_fp: + self.fp.close() + raise + + def get_format_mimetype(self): + if self.custom_mimetype: + return self.custom_mimetype + if self.format is not None: + return Image.MIME.get(self.format.upper()) + + def verify(self): + """Check file integrity""" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def load(self): + """Load image data based on tile list""" + + if self.tile is None: + raise OSError("cannot load this image") + + pixel = Image.Image.load(self) + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, "pypy_version_info") + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + decoder_name, extents, offset, args = self.tile[0] + if ( + decoder_name == "raw" + and len(args) >= 3 + and args[0] == self.mode + and args[0] in Image._MAPMODES + ): + try: + if hasattr(Image.core, "map"): + # use built-in mapper WIN32 only + self.map = Image.core.map(self.filename) + self.map.seek(offset) + self.im = self.map.readimage( + self.mode, self.size, args[1], args[2] + ) + else: + # use mmap, if possible + import mmap + + with open(self.filename) as fp: + self.map = mmap.mmap( + fp.fileno(), 0, access=mmap.ACCESS_READ + ) + self.im = Image.core.map_buffer( + self.map, self.size, decoder_name, offset, args + ) + readonly = 1 + # After trashing self.im, + # we might need to reload the palette data. + if self.palette: + self.palette.dirty = 1 + except (AttributeError, OSError, ImportError): + self.map = None + + self.load_prepare() + err_code = -3 # initialize to unknown error + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + for decoder_name, extents, offset, args in self.tile: + decoder = Image._getdecoder( + self.mode, decoder_name, args, self.decoderconfig + ) + try: + seek(offset) + decoder.setimage(self.im, extents) + if decoder.pulls_fd: + decoder.setfd(self.fp) + status, err_code = decoder.decode(b"") + else: + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error) as e: + # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise OSError("image file is truncated") from e + + if not s: # truncated jpeg + if LOAD_TRUNCATED_IMAGES: + break + else: + raise OSError( + "image file is truncated " + f"({len(b)} bytes not processed)" + ) + + b = b + s + n, err_code = decoder.decode(b) + if n < 0: + break + b = b[n:] + finally: + # Need to cleanup here to prevent leaks + decoder.cleanup() + + self.tile = [] + self.readonly = readonly + + self.load_end() + + if self._exclusive_fp and self._close_exclusive_fp_after_loading: + self.fp.close() + self.fp = None + + if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: + # still raised if decoder fails to return anything + raise_oserror(err_code) + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + def _seek_check(self, frame): + if ( + frame < self._min_frame + # Only check upper limit on frames if additional seek operations + # are not required to do so + or ( + not (hasattr(self, "_n_frames") and self._n_frames is None) + and frame >= self.n_frames + self._min_frame + ) + ): + raise EOFError("attempt to seek outside sequence") + + return self.tell() != frame + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError("StubImageFile subclass must implement _open") + + def load(self): + loader = self._load() + if loader is None: + raise OSError(f"cannot find loader for this {self.format} file") + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + + def _load(self): + """(Hook) Find actual image loader.""" + raise NotImplementedError("StubImageFile subclass must implement _load") + + +class Parser: + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + """ + + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception OSError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_oserror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + with io.BytesIO(self.data) as fp: + im = Image.open(fp) + except OSError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset :] + self.offset = 0 + + self.image = im + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception OSError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise OSError("image was incomplete") + if not self.image: + raise OSError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + with io.BytesIO(self.data) as fp: + try: + self.image = Image.open(fp) + finally: + self.image.load() + return self.image + + +# -------------------------------------------------------------------- + + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return + try: + fh = fp.fileno() + fp.flush() + except (AttributeError, io.UnsupportedOperation) as exc: + # compress to Python file-compatible object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l, s = e.encode_to_pyfd() + else: + while True: + l, s, d = e.encode(bufsize) + fp.write(d) + if s: + break + if s < 0: + raise OSError(f"encoder error {s} when writing image file") from exc + e.cleanup() + else: + # slight speedup: compress to real file object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l, s = e.encode_to_pyfd() + else: + s = e.encode_to_file(fh, bufsize) + if s < 0: + raise OSError(f"encoder error {s} when writing image file") + e.cleanup() + if hasattr(fp, "flush"): + fp.flush() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing up to size bytes of data. + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + return fp.read(size) + data = [] + while size > 0: + block = fp.read(min(size, SAFEBLOCK)) + if not block: + break + data.append(block) + size -= len(block) + return b"".join(data) + + +class PyCodecState: + def __init__(self): + self.xsize = 0 + self.ysize = 0 + self.xoff = 0 + self.yoff = 0 + + def extents(self): + return (self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize) + + +class PyDecoder: + """ + Python implementation of a format decoder. Override this class and + add the decoding logic in the :meth:`decode` method. + + See :ref:`Writing Your Own File Decoder in Python` + """ + + _pulls_fd = False + + def __init__(self, mode, *args): + self.im = None + self.state = PyCodecState() + self.fd = None + self.mode = mode + self.init(args) + + def init(self, args): + """ + Override to perform decoder specific initialization + + :param args: Array of args items from the tile entry + :returns: None + """ + self.args = args + + @property + def pulls_fd(self): + return self._pulls_fd + + def decode(self, buffer): + """ + Override to perform the decoding process. + + :param buffer: A bytes object with the data to be decoded. + :returns: A tuple of ``(bytes consumed, errcode)``. + If finished with decoding return <0 for the bytes consumed. + Err codes are from :data:`.ImageFile.ERRORS`. + """ + raise NotImplementedError() + + def cleanup(self): + """ + Override to perform decoder specific cleanup + + :returns: None + """ + pass + + def setfd(self, fd): + """ + Called from ImageFile to set the python file-like object + + :param fd: A python file-like object + :returns: None + """ + self.fd = fd + + def setimage(self, im, extents=None): + """ + Called from ImageFile to set the core output image for the decoder + + :param im: A core image object + :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle + for this tile + :returns: None + """ + + # following c code + self.im = im + + if extents: + (x0, y0, x1, y1) = extents + else: + (x0, y0, x1, y1) = (0, 0, 0, 0) + + if x0 == 0 and x1 == 0: + self.state.xsize, self.state.ysize = self.im.size + else: + self.state.xoff = x0 + self.state.yoff = y0 + self.state.xsize = x1 - x0 + self.state.ysize = y1 - y0 + + if self.state.xsize <= 0 or self.state.ysize <= 0: + raise ValueError("Size cannot be negative") + + if ( + self.state.xsize + self.state.xoff > self.im.size[0] + or self.state.ysize + self.state.yoff > self.im.size[1] + ): + raise ValueError("Tile cannot extend outside image") + + def set_as_raw(self, data, rawmode=None): + """ + Convenience method to set the internal image from a stream of raw data + + :param data: Bytes to be set + :param rawmode: The rawmode to be used for the decoder. + If not specified, it will default to the mode of the image + :returns: None + """ + + if not rawmode: + rawmode = self.mode + d = Image._getdecoder(self.mode, "raw", (rawmode)) + d.setimage(self.im, self.state.extents()) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageFilter.py b/minor_project/lib/python3.6/site-packages/PIL/ImageFilter.py new file mode 100644 index 0000000..9ca17d9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageFilter.py @@ -0,0 +1,534 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +import functools + +try: + import numpy +except ImportError: # pragma: no cover + numpy = None + + +class Filter: + pass + + +class MultibandFilter(Filter): + pass + + +class BuiltinFilter(MultibandFilter): + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class Kernel(BuiltinFilter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. The default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + + name = "Kernel" + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a + b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the ``rank``'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size // 2, self.size // 2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size * size // 2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size * size - 1 + + +class ModeFilter(Filter): + """ + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(MultibandFilter): + """Gaussian blur filter. + + :param radius: Blur radius. + """ + + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class BoxBlur(MultibandFilter): + """Blurs the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + """ + + name = "BoxBlur" + + def __init__(self, radius): + self.radius = radius + + def filter(self, image): + return image.box_blur(self.radius) + + +class UnsharpMask(MultibandFilter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ # noqa: E501 + + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + # fmt: off + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on + + +class CONTOUR(BuiltinFilter): + name = "Contour" + # fmt: off + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1, + ) + # fmt: on + + +class DETAIL(BuiltinFilter): + name = "Detail" + # fmt: off + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0, + ) + # fmt: on + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + # fmt: off + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1, + ) + # fmt: on + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + # fmt: off + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1, + ) + # fmt: on + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + # fmt: off + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0, + ) + # fmt: on + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + # fmt: off + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1, + ) + # fmt: on + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + # fmt: off + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2, + ) + # fmt: on + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + # fmt: off + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1, + ) + # fmt: on + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + # fmt: off + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1, + ) + # fmt: on + + +class Color3DLUT(MultibandFilter): + """Three-dimensional color lookup table. + + Transforms 3-channel pixels using the values of the channels as coordinates + in the 3D lookup table and interpolating the nearest elements. + + This method allows you to apply almost any color transformation + in constant time by using pre-calculated decimated tables. + + .. versionadded:: 5.2.0 + + :param size: Size of the table. One int or tuple of (int, int, int). + Minimal size in any dimension is 2, maximum is 65. + :param table: Flat lookup table. A list of ``channels * size**3`` + float elements or a list of ``size**3`` channels-sized + tuples with floats. Channels are changed first, + then first dimension, then second, then third. + Value 0.0 corresponds lowest value of output, 1.0 highest. + :param channels: Number of channels in the table. Could be 3 or 4. + Default is 3. + :param target_mode: A mode for the result image. Should have not less + than ``channels`` channels. Default is ``None``, + which means that mode wouldn't be changed. + """ + + name = "Color 3D LUT" + + def __init__(self, size, table, channels=3, target_mode=None, **kwargs): + if channels not in (3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + self.size = size = self._check_size(size) + self.channels = channels + self.mode = target_mode + + # Hidden flag `_copy_table=False` could be used to avoid extra copying + # of the table if the table is specially made for the constructor. + copy_table = kwargs.get("_copy_table", True) + items = size[0] * size[1] * size[2] + wrong_size = False + + if numpy and isinstance(table, numpy.ndarray): + if copy_table: + table = table.copy() + + if table.shape in [ + (items * channels,), + (items, channels), + (size[2], size[1], size[0], channels), + ]: + table = table.reshape(items * channels) + else: + wrong_size = True + + else: + if copy_table: + table = list(table) + + # Convert to a flat list + if table and isinstance(table[0], (list, tuple)): + table, raw_table = [], table + for pixel in raw_table: + if len(pixel) != channels: + raise ValueError( + "The elements of the table should " + "have a length of {}.".format(channels) + ) + table.extend(pixel) + + if wrong_size or len(table) != items * channels: + raise ValueError( + "The table should have either channels * size**3 float items " + "or size**3 items of channels-sized tuples with floats. " + f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. " + f"Actual length: {len(table)}" + ) + self.table = table + + @staticmethod + def _check_size(size): + try: + _, _, _ = size + except ValueError as e: + raise ValueError( + "Size should be either an integer or a tuple of three integers." + ) from e + except TypeError: + size = (size, size, size) + size = [int(x) for x in size] + for size1D in size: + if not 2 <= size1D <= 65: + raise ValueError("Size should be in [2, 65] range.") + return size + + @classmethod + def generate(cls, size, callback, channels=3, target_mode=None): + """Generates new LUT using provided callback. + + :param size: Size of the table. Passed to the constructor. + :param callback: Function with three parameters which correspond + three color channels. Will be called ``size**3`` + times with values from 0.0 to 1.0 and should return + a tuple with ``channels`` elements. + :param channels: The number of channels which should return callback. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + size1D, size2D, size3D = cls._check_size(size) + if channels not in (3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + + table = [0] * (size1D * size2D * size3D * channels) + idx_out = 0 + for b in range(size3D): + for g in range(size2D): + for r in range(size1D): + table[idx_out : idx_out + channels] = callback( + r / (size1D - 1), g / (size2D - 1), b / (size3D - 1) + ) + idx_out += channels + + return cls( + (size1D, size2D, size3D), + table, + channels=channels, + target_mode=target_mode, + _copy_table=False, + ) + + def transform(self, callback, with_normals=False, channels=None, target_mode=None): + """Transforms the table values using provided callback and returns + a new LUT with altered values. + + :param callback: A function which takes old lookup table values + and returns a new set of values. The number + of arguments which function should take is + ``self.channels`` or ``3 + self.channels`` + if ``with_normals`` flag is set. + Should return a tuple of ``self.channels`` or + ``channels`` elements if it is set. + :param with_normals: If true, ``callback`` will be called with + coordinates in the color cube as the first + three arguments. Otherwise, ``callback`` + will be called only with actual color values. + :param channels: The number of channels in the resulting lookup table. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + if channels not in (None, 3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + ch_in = self.channels + ch_out = channels or ch_in + size1D, size2D, size3D = self.size + + table = [0] * (size1D * size2D * size3D * ch_out) + idx_in = 0 + idx_out = 0 + for b in range(size3D): + for g in range(size2D): + for r in range(size1D): + values = self.table[idx_in : idx_in + ch_in] + if with_normals: + values = callback( + r / (size1D - 1), + g / (size2D - 1), + b / (size3D - 1), + *values, + ) + else: + values = callback(*values) + table[idx_out : idx_out + ch_out] = values + idx_in += ch_in + idx_out += ch_out + + return type(self)( + self.size, + table, + channels=ch_out, + target_mode=target_mode or self.mode, + _copy_table=False, + ) + + def __repr__(self): + r = [ + f"{self.__class__.__name__} from {self.table.__class__.__name__}", + "size={:d}x{:d}x{:d}".format(*self.size), + f"channels={self.channels:d}", + ] + if self.mode: + r.append(f"target_mode={self.mode}") + return "<{}>".format(" ".join(r)) + + def filter(self, image): + from . import Image + + return image.color_lut_3d( + self.mode or image.mode, + Image.LINEAR, + self.channels, + self.size[0], + self.size[1], + self.size[2], + self.table, + ) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageFont.py b/minor_project/lib/python3.6/site-packages/PIL/ImageFont.py new file mode 100644 index 0000000..c48d898 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageFont.py @@ -0,0 +1,1057 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import base64 +import os +import sys +import warnings +from io import BytesIO + +from . import Image, features +from ._util import isDirectory, isPath + +LAYOUT_BASIC = 0 +LAYOUT_RAQM = 1 + + +class _imagingft_not_installed: + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + + +try: + from . import _imagingft as core +except ImportError: + core = _imagingft_not_installed() + + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont: + "PIL font wrapper" + + def _load_pilfont(self, filename): + + with open(filename, "rb") as fp: + image = None + for ext in (".png", ".gif", ".pbm"): + if image: + image.close() + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except Exception: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + if image: + image.close() + raise OSError("cannot find glyph data file") + + self.file = fullname + + self._load_pilfont_data(fp, image) + image.close() + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256 * 20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + def getsize(self, text, *args, **kwargs): + """ + Returns width and height (in pixels) of given text. + + :param text: Text to measure. + + :return: (width, height) + """ + return self.font.getsize(text) + + def getmask(self, text, mode="", *args, **kwargs): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + return self.font.getmask(text, mode) + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + + +class FreeTypeFont: + "FreeType font wrapper (requires _imagingft service)" + + def __init__(self, font=None, size=10, index=0, encoding="", layout_engine=None): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + try: + from packaging.version import parse as parse_version + except ImportError: + pass + else: + freetype_version = parse_version(features.version_module("freetype2")) + if freetype_version < parse_version("2.8"): + warnings.warn( + "Support for FreeType 2.7 is deprecated and will be removed" + " in Pillow 9 (2022-01-02). Please upgrade to FreeType 2.8 " + "or newer, preferably FreeType 2.10.4 which fixes " + "CVE-2020-15999.", + DeprecationWarning, + ) + + if layout_engine not in (LAYOUT_BASIC, LAYOUT_RAQM): + layout_engine = LAYOUT_BASIC + if core.HAVE_RAQM: + layout_engine = LAYOUT_RAQM + elif layout_engine == LAYOUT_RAQM and not core.HAVE_RAQM: + layout_engine = LAYOUT_BASIC + + self.layout_engine = layout_engine + + def load_from_bytes(f): + self.font_bytes = f.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes, layout_engine + ) + + if isPath(font): + if sys.platform == "win32": + font_bytes_path = font if isinstance(font, bytes) else font.encode() + try: + font_bytes_path.decode("ascii") + except UnicodeDecodeError: + # FreeType cannot load fonts with non-ASCII characters on Windows + # So load it into memory first + with open(font, "rb") as f: + load_from_bytes(f) + return + self.font = core.getfont( + font, size, index, encoding, layout_engine=layout_engine + ) + else: + load_from_bytes(font) + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, str) else b"\n" + return text.split(split_character) + + def getname(self): + """ + :return: A tuple of the font family (e.g. Helvetica) and the font style + (e.g. Bold) + """ + return self.font.family, self.font.style + + def getmetrics(self): + """ + :return: A tuple of the font ascent (the distance from the baseline to + the highest outline point) and descent (the distance from the + baseline to the lowest outline point, a negative value) + """ + return self.font.ascent, self.font.descent + + def getlength(self, text, mode="", direction=None, features=None, language=None): + """ + Returns length (in pixels with 1/64 precision) of given text when rendered + in font with provided direction, features, and language. + + This is the amount by which following text should be offset. + Text bounding box may extend past the length in some fonts, + e.g. when using italics or accents. + + The result is returned as a float; it is a whole number if using basic layout. + + Note that the sum of two lengths may not equal the length of a concatenated + string due to kerning. If you need to adjust for kerning, include the following + character and subtract its length. + + For example, instead of + + .. code-block:: python + + hello = font.getlength("Hello") + world = font.getlength("World") + hello_world = hello + world # not adjusted for kerning + assert hello_world == font.getlength("HelloWorld") # may fail + + use + + .. code-block:: python + + hello = font.getlength("HelloW") - font.getlength("W") # adjusted for kerning + world = font.getlength("World") + hello_world = hello + world # adjusted for kerning + assert hello_world == font.getlength("HelloWorld") # True + + or disable kerning with (requires libraqm) + + .. code-block:: python + + hello = draw.textlength("Hello", font, features=["-kern"]) + world = draw.textlength("World", font, features=["-kern"]) + hello_world = hello + world # kerning is disabled, no need to adjust + assert hello_world == draw.textlength("HelloWorld", font, features=["-kern"]) + + .. versionadded:: 8.0.0 + + :param text: Text to measure. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + :return: Width for horizontal, height for vertical text. + """ + return self.font.getlength(text, mode, direction, features, language) / 64 + + def getbbox( + self, + text, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ): + """ + Returns bounding box (in pixels) of given text relative to given anchor + when rendered in font with provided direction, features, and language. + + Use :py:meth:`getlength()` to get the offset of following text with + 1/64 pixel precision. The bounding box includes extra margins for + some fonts, e.g. italics or accents. + + .. versionadded:: 8.0.0 + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + :param stroke_width: The width of the text stroke. + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left. + See :ref:`text-anchors` for valid values. + + :return: ``(left, top, right, bottom)`` bounding box + """ + size, offset = self.font.getsize( + text, mode, direction, features, language, anchor + ) + left, top = offset[0] - stroke_width, offset[1] - stroke_width + width, height = size[0] + 2 * stroke_width, size[1] + 2 * stroke_width + return left, top, left + width, top + height + + def getsize( + self, text, direction=None, features=None, language=None, stroke_width=0 + ): + """ + Returns width and height (in pixels) of given text if rendered in font with + provided direction, features, and language. + + Use :py:meth:`getlength()` to measure the offset of following text with + 1/64 pixel precision. + Use :py:meth:`getbbox()` to get the exact bounding box based on an anchor. + + .. note:: For historical reasons this function measures text height from + the ascender line instead of the top, see :ref:`text-anchors`. + If you wish to measure text height from the top, it is recommended + to use the bottom value of :meth:`getbbox` with ``anchor='lt'`` instead. + + :param text: Text to measure. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: (width, height) + """ + # vertical offset is added for historical reasons + # see https://github.com/python-pillow/Pillow/pull/4910#discussion_r486682929 + size, offset = self.font.getsize(text, "L", direction, features, language) + return ( + size[0] + stroke_width * 2, + size[1] + stroke_width * 2 + offset[1], + ) + + def getsize_multiline( + self, + text, + direction=None, + spacing=4, + features=None, + language=None, + stroke_width=0, + ): + """ + Returns width and height (in pixels) of given text if rendered in font + with provided direction, features, and language, while respecting + newline characters. + + :param text: Text to measure. + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + :param spacing: The vertical gap between lines, defaulting to 4 pixels. + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :return: (width, height) + """ + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.getsize("A", stroke_width=stroke_width)[1] + spacing + for line in lines: + line_width, line_height = self.getsize( + line, direction, features, language, stroke_width + ) + max_width = max(max_width, line_width) + + return max_width, len(lines) * line_spacing - spacing + + def getoffset(self, text): + """ + Returns the offset of given text. This is the gap between the + starting coordinate and the first marking. Note that this gap is + included in the result of :py:func:`~PIL.ImageFont.FreeTypeFont.getsize`. + + :param text: Text to measure. + + :return: A tuple of the x and y offset + """ + return self.font.getsize(text)[1] + + def getmask( + self, + text, + mode="", + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ink=0, + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. If the font has embedded color data, the bitmap + should have mode ``RGBA``. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left. + See :ref:`text-anchors` for valid values. + + .. versionadded:: 8.0.0 + + :param ink: Foreground ink for rendering in RGBA mode. + + .. versionadded:: 8.0.0 + + :return: An internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module. + """ + return self.getmask2( + text, + mode, + direction=direction, + features=features, + language=language, + stroke_width=stroke_width, + anchor=anchor, + ink=ink, + )[0] + + def getmask2( + self, + text, + mode="", + fill=Image.core.fill, + direction=None, + features=None, + language=None, + stroke_width=0, + anchor=None, + ink=0, + *args, + **kwargs, + ): + """ + Create a bitmap for the text. + + If the font uses antialiasing, the bitmap should have mode ``L`` and use a + maximum value of 255. If the font has embedded color data, the bitmap + should have mode ``RGBA``. Otherwise, it should have mode ``1``. + + :param text: Text to render. + :param mode: Used by some graphics drivers to indicate what mode the + driver prefers; if empty, the renderer may return either + mode. Note that the mode is always a string, to simplify + C-level implementations. + + .. versionadded:: 1.1.5 + + :param direction: Direction of the text. It can be 'rtl' (right to + left), 'ltr' (left to right) or 'ttb' (top to bottom). + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param features: A list of OpenType font features to be used during text + layout. This is usually used to turn on optional + font features that are not enabled by default, + for example 'dlig' or 'ss01', but can be also + used to turn off default font features for + example '-liga' to disable ligatures or '-kern' + to disable kerning. To get all supported + features, see + https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist + Requires libraqm. + + .. versionadded:: 4.2.0 + + :param language: Language of the text. Different languages may use + different glyph shapes or ligatures. This parameter tells + the font which language the text is in, and to apply the + correct substitutions as appropriate, if available. + It should be a `BCP 47 language code + `_ + Requires libraqm. + + .. versionadded:: 6.0.0 + + :param stroke_width: The width of the text stroke. + + .. versionadded:: 6.2.0 + + :param anchor: The text anchor alignment. Determines the relative location of + the anchor to the text. The default alignment is top left. + See :ref:`text-anchors` for valid values. + + .. versionadded:: 8.0.0 + + :param ink: Foreground ink for rendering in RGBA mode. + + .. versionadded:: 8.0.0 + + :return: A tuple of an internal PIL storage memory instance as defined by the + :py:mod:`PIL.Image.core` interface module, and the text offset, the + gap between the starting coordinate and the first marking + """ + size, offset = self.font.getsize( + text, mode, direction, features, language, anchor + ) + size = size[0] + stroke_width * 2, size[1] + stroke_width * 2 + offset = offset[0] - stroke_width, offset[1] - stroke_width + im = fill("RGBA" if mode == "RGBA" else "L", size, 0) + self.font.render( + text, im.id, mode, direction, features, language, stroke_width, ink + ) + return im, offset + + def font_variant( + self, font=None, size=None, index=None, encoding=None, layout_engine=None + ): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont( + font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else encoding, + layout_engine=layout_engine or self.layout_engine, + ) + + def get_variation_names(self): + """ + :returns: A list of the named styles in a variation font. + :exception OSError: If the font is not a variation font. + """ + try: + names = self.font.getvarnames() + except AttributeError as e: + raise NotImplementedError("FreeType 2.9.1 or greater is required") from e + return [name.replace(b"\x00", b"") for name in names] + + def set_variation_by_name(self, name): + """ + :param name: The name of the style. + :exception OSError: If the font is not a variation font. + """ + names = self.get_variation_names() + if not isinstance(name, bytes): + name = name.encode() + index = names.index(name) + + if index == getattr(self, "_last_variation_index", None): + # When the same name is set twice in a row, + # there is an 'unknown freetype error' + # https://savannah.nongnu.org/bugs/?56186 + return + self._last_variation_index = index + + self.font.setvarname(index) + + def get_variation_axes(self): + """ + :returns: A list of the axes in a variation font. + :exception OSError: If the font is not a variation font. + """ + try: + axes = self.font.getvaraxes() + except AttributeError as e: + raise NotImplementedError("FreeType 2.9.1 or greater is required") from e + for axis in axes: + axis["name"] = axis["name"].replace(b"\x00", b"") + return axes + + def set_variation_by_axes(self, axes): + """ + :param axes: A list of values for each axis. + :exception OSError: If the font is not a variation font. + """ + try: + self.font.setvaraxes(axes) + except AttributeError as e: + raise NotImplementedError("FreeType 2.9.1 or greater is required") from e + + +class TransposedFont: + "Wrapper for writing rotated or mirrored text" + + def __init__(self, font, orientation=None): + """ + Wrapper that creates a transposed font from any existing font + object. + + :param font: A font object. + :param orientation: An optional orientation. If given, this should + be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, + Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. + """ + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text, *args, **kwargs): + w, h = self.font.getsize(text) + if self.orientation in (Image.ROTATE_90, Image.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode="", *args, **kwargs): + im = self.font.getmask(text, mode, *args, **kwargs) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception OSError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding="", layout_engine=None): + """ + Load a TrueType or OpenType font from a file or file-like object, + and create a font object. + This function loads a font object from the given file or file-like + object, and creates a font object for a font of the given size. + + Pillow uses FreeType to open font files. If you are opening many fonts + simultaneously on Windows, be aware that Windows limits the number of files + that can be open in C at once to 512. If you approach that limit, an + ``OSError`` may be thrown, reporting that FreeType "cannot open resource". + + This function requires the _imagingft service. + + :param font: A filename or file-like object containing a TrueType font. + If the file is not found in this filename, the loader may also + search in other directories, such as the :file:`fonts/` + directory on Windows or :file:`/Library/Fonts/`, + :file:`/System/Library/Fonts/` and :file:`~/Library/Fonts/` on + macOS. + + :param size: The requested size, in points. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Possible + encodings include (see the FreeType documentation for more + information): + + * "unic" (Unicode) + * "symb" (Microsoft Symbol) + * "ADOB" (Adobe Standard) + * "ADBE" (Adobe Expert) + * "ADBC" (Adobe Custom) + * "armn" (Apple Roman) + * "sjis" (Shift JIS) + * "gb " (PRC) + * "big5" + * "wans" (Extended Wansung) + * "joha" (Johab) + * "lat1" (Latin-1) + + This specifies the character set to use. It does not alter the + encoding of any text provided in subsequent operations. + :param layout_engine: Which layout engine to use, if available: + :data:`.ImageFont.LAYOUT_BASIC` or :data:`.ImageFont.LAYOUT_RAQM`. + + You can check support for Raqm layout using + :py:func:`PIL.features.check_feature` with ``feature="raqm"``. + + .. versionadded:: 4.2.0 + :return: A font object. + :exception OSError: If the file could not be read. + """ + + def freetype(font): + return FreeTypeFont(font, size, index, encoding, layout_engine) + + try: + return freetype(font) + except OSError: + if not isPath(font): + raise + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ("linux", "linux2"): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = "/usr/share" + dirs += [os.path.join(lindir, "fonts") for lindir in lindirs.split(":")] + elif sys.platform == "darwin": + dirs += [ + "/Library/Fonts", + "/System/Library/Fonts", + os.path.expanduser("~/Library/Fonts"), + ] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + return freetype(os.path.join(walkroot, walkfilename)) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == ".ttf": + return freetype(fontpath) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return freetype(first_font_with_a_different_extension) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception OSError: If the file could not be read. + """ + for directory in sys.path: + if isDirectory(directory): + if not isinstance(filename, str): + filename = filename.decode("utf-8") + try: + return load(os.path.join(directory, filename)) + except OSError: + pass + raise OSError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO( + base64.b64decode( + b""" +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +""" + ) + ), + Image.open( + BytesIO( + base64.b64decode( + b""" +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +""" + ) + ) + ), + ) + return f diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageGrab.py b/minor_project/lib/python3.6/site-packages/PIL/ImageGrab.py new file mode 100644 index 0000000..b93ec3f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageGrab.py @@ -0,0 +1,120 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import sys + +from . import Image + +if sys.platform == "darwin": + import os + import subprocess + import tempfile + + +def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None): + if xdisplay is None: + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".png") + os.close(fh) + subprocess.call(["screencapture", "-x", filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + if bbox: + im_cropped = im.crop(bbox) + im.close() + return im_cropped + return im + elif sys.platform == "win32": + offset, size, data = Image.core.grabscreen_win32( + include_layered_windows, all_screens + ) + im = Image.frombytes( + "RGB", + size, + data, + # RGB, 32-bit line padding, origin lower left corner + "raw", + "BGR", + (size[0] * 3 + 3) & -4, + -1, + ) + if bbox: + x0, y0 = offset + left, top, right, bottom = bbox + im = im.crop((left - x0, top - y0, right - x0, bottom - y0)) + return im + # use xdisplay=None for default display on non-win32/macOS systems + if not Image.core.HAVE_XCB: + raise OSError("Pillow was built without XCB support") + size, data = Image.core.grabscreen_x11(xdisplay) + im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp(".jpg") + os.close(fh) + commands = [ + 'set theFile to (open for access POSIX file "' + + filepath + + '" with write permission)', + "try", + " write (the clipboard as JPEG picture) to theFile", + "end try", + "close access theFile", + ] + script = ["osascript"] + for command in commands: + script += ["-e", command] + subprocess.call(script) + + im = None + if os.stat(filepath).st_size != 0: + im = Image.open(filepath) + im.load() + os.unlink(filepath) + return im + elif sys.platform == "win32": + fmt, data = Image.core.grabclipboard_win32() + if fmt == "file": # CF_HDROP + import struct + + o = struct.unpack_from("I", data)[0] + if data[16] != 0: + files = data[o:].decode("utf-16le").split("\0") + else: + files = data[o:].decode("mbcs").split("\0") + return files[: files.index("")] + if isinstance(data, bytes): + import io + + data = io.BytesIO(data) + if fmt == "png": + from . import PngImagePlugin + + return PngImagePlugin.PngImageFile(data) + elif fmt == "DIB": + from . import BmpImagePlugin + + return BmpImagePlugin.DibImageFile(data) + return None + else: + raise NotImplementedError("ImageGrab.grabclipboard() is macOS and Windows only") diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageMath.py b/minor_project/lib/python3.6/site-packages/PIL/ImageMath.py new file mode 100644 index 0000000..7f9c88e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageMath.py @@ -0,0 +1,253 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import builtins + +from . import Image, _imagingmath + +VERBOSE = 0 + + +def _isconstant(v): + return isinstance(v, (int, float)) + + +class _Operand: + """Wraps an image operand, providing standard operators""" + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError(f"unsupported mode: {im1.im.mode}") + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op + "_" + im1.mode) + except AttributeError as e: + raise TypeError(f"bad operand type for '{op}'") from e + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.mode != im2.mode: + raise ValueError("mode mismatch") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, size, None) + else: + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op + "_" + im1.mode) + except AttributeError as e: + raise TypeError(f"bad operand type for '{op}'") from e + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = builtins.eval(expression, args) + try: + return out.im + except AttributeError: + return out diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageMode.py b/minor_project/lib/python3.6/site-packages/PIL/ImageMode.py new file mode 100644 index 0000000..9882883 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageMode.py @@ -0,0 +1,64 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# mode descriptor cache +_modes = None + + +class ModeDescriptor: + """Wrapper for mode strings.""" + + def __init__(self, mode, bands, basemode, basetype): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + + def __str__(self): + return self.mode + + +def getmode(mode): + """Gets a mode descriptor for the given mode.""" + global _modes + if not _modes: + # initialize mode cache + + from . import Image + + modes = {} + # core modes + for m, (basemode, basetype, bands) in Image._MODEINFO.items(): + modes[m] = ModeDescriptor(m, bands, basemode, basetype) + # extra experimental modes + modes["RGBa"] = ModeDescriptor("RGBa", ("R", "G", "B", "a"), "RGB", "L") + modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") + modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L") + modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") + # mapping modes + for i16mode in ( + "I;16", + "I;16S", + "I;16L", + "I;16LS", + "I;16B", + "I;16BS", + "I;16N", + "I;16NS", + ): + modes[i16mode] = ModeDescriptor(i16mode, ("I",), "L", "L") + # set global mode cache atomically + _modes = modes + return _modes[mode] diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageMorph.py b/minor_project/lib/python3.6/site-packages/PIL/ImageMorph.py new file mode 100644 index 0000000..b76dfa0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageMorph.py @@ -0,0 +1,245 @@ +# A binary morphology add-on for the Python Imaging Library +# +# History: +# 2014-06-04 Initial version. +# +# Copyright (c) 2014 Dov Grobgeld + +import re + +from . import Image, _imagingmorph + +LUT_SIZE = 1 << 9 + +# fmt: off +ROTATION_MATRIX = [ + 6, 3, 0, + 7, 4, 1, + 8, 5, 2, +] +MIRROR_MATRIX = [ + 2, 1, 0, + 5, 4, 3, + 8, 7, 6, +] +# fmt: on + + +class LutBuilder: + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"], + "dilation4": ["4:(... .0. .1.)->1"], + "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"], + "erosion4": ["4:(... .1. .0.)->0"], + "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"], + "edge": [ + "1:(... ... ...)->0", + "4:(.0. .1. ...)->1", + "4:(01. .1. ...)->1", + ], + } + if op_name not in known_patterns: + raise Exception("Unknown pattern " + op_name + "!") + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE)) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert len(permutation) == 9 + return "".join(pattern[p] for p in permutation) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if "4" in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res) + ) + # mirror + if "M" in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res)) + + # negate + if "N" in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + # Swap 0 and 1 + pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1") + res = 1 - int(res) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", "")) + if not m: + raise Exception('Syntax error in pattern "' + p + '"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(" ", "").replace("\n", "") + + patterns += self._pattern_permute(pattern, options, result) + + # compile the patterns into regular expressions for speed + for i, pattern in enumerate(patterns): + p = pattern[0].replace(".", "X").replace("X", "[01]") + p = re.compile(p) + patterns[i] = (p, pattern[1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp: + """A class for binary morphological operators""" + + def __init__(self, lut=None, op_name=None, patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception("No operator loaded") + + if image.mode != "L": + raise Exception("Image must be binary, meaning it must use mode L") + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + if self.lut is None: + raise Exception("No operator loaded") + + if image.mode != "L": + raise Exception("Image must be binary, meaning it must use mode L") + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + + if image.mode != "L": + raise Exception("Image must be binary, meaning it must use mode L") + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, "rb") as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != LUT_SIZE: + self.lut = None + raise Exception("Wrong size operator file!") + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception("No operator loaded") + with open(filename, "wb") as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageOps.py b/minor_project/lib/python3.6/site-packages/PIL/ImageOps.py new file mode 100644 index 0000000..14602a5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageOps.py @@ -0,0 +1,558 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import functools +import operator + +from . import Image + +# +# helpers + + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isinstance(color, str): + from . import ImageColor + + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise OSError("not supported for this image mode") + + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None, mask=None): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image (or mask region), removes ``cutoff`` percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: The percent to cut off from the histogram on the low and + high ends. Either a tuple of (low, high), or a single + number for both. + :param ignore: The background pixel value (use None for no background). + :param mask: Histogram used in contrast operation is computed using pixels + within the mask. If no mask is given the entire image is used + for histogram computation. + :return: An image. + """ + histogram = image.histogram(mask) + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer : layer + 256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + if not isinstance(cutoff, tuple): + cutoff = (cutoff, cutoff) + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff[0] // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the high end + cut = n * cutoff[1] // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127): + """ + Colorize grayscale image. + This function calculates a color wedge which maps all black pixels in + the source image to the first color and all white pixels to the + second color. If ``mid`` is specified, it uses three-color mapping. + The ``black`` and ``white`` arguments should be RGB tuples or color names; + optionally you can use three-color mapping by also specifying ``mid``. + Mapping positions for any of the colors can be specified + (e.g. ``blackpoint``), where these parameters are the integer + value corresponding to where the corresponding color should be mapped. + These parameters must have logical order, such that + ``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified). + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :param mid: The color to use for midtone input pixels. + :param blackpoint: an int value [0, 255] for the black mapping. + :param whitepoint: an int value [0, 255] for the white mapping. + :param midpoint: an int value [0, 255] for the midtone mapping. + :return: An image. + """ + + # Initial asserts + assert image.mode == "L" + if mid is None: + assert 0 <= blackpoint <= whitepoint <= 255 + else: + assert 0 <= blackpoint <= midpoint <= whitepoint <= 255 + + # Define colors from arguments + black = _color(black, "RGB") + white = _color(white, "RGB") + if mid is not None: + mid = _color(mid, "RGB") + + # Empty lists for the mapping + red = [] + green = [] + blue = [] + + # Create the low-end values + for i in range(0, blackpoint): + red.append(black[0]) + green.append(black[1]) + blue.append(black[2]) + + # Create the mapping (2-color) + if mid is None: + + range_map = range(0, whitepoint - blackpoint) + + for i in range_map: + red.append(black[0] + i * (white[0] - black[0]) // len(range_map)) + green.append(black[1] + i * (white[1] - black[1]) // len(range_map)) + blue.append(black[2] + i * (white[2] - black[2]) // len(range_map)) + + # Create the mapping (3-color) + else: + + range_map1 = range(0, midpoint - blackpoint) + range_map2 = range(0, whitepoint - midpoint) + + for i in range_map1: + red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1)) + green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1)) + blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1)) + for i in range_map2: + red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2)) + green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2)) + blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2)) + + # Create the high-end values + for i in range(0, 256 - whitepoint): + red.append(white[0]) + green.append(white[1]) + blue.append(white[2]) + + # Return converted image + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def pad(image, size, method=Image.BICUBIC, color=None, centering=(0.5, 0.5)): + """ + Returns a sized and padded version of the image, expanded to fill the + requested aspect ratio and size. + + :param image: The image to size and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :param color: The background color of the padded image. + :param centering: Control the position of the original image within the + padded version. + + (0.5, 0.5) will keep the image centered + (0, 0) will keep the image aligned to the top left + (1, 1) will keep the image aligned to the bottom + right + :return: An image. + """ + + im_ratio = image.width / image.height + dest_ratio = size[0] / size[1] + + if im_ratio == dest_ratio: + out = image.resize(size, resample=method) + else: + out = Image.new(image.mode, size, color) + if im_ratio > dest_ratio: + new_height = int(image.height / image.width * size[0]) + if new_height != size[1]: + image = image.resize((size[0], new_height), resample=method) + + y = int((size[1] - new_height) * max(0, min(centering[1], 1))) + out.paste(image, (0, y)) + else: + new_width = int(image.width / image.height * size[1]) + if new_width != size[0]: + image = image.resize((new_width, size[1]), resample=method) + + x = int((size[0] - new_width) * max(0, min(centering[0], 1))) + out.paste(image, (x, 0)) + return out + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop((left, top, image.size[0] - right, image.size[1] - bottom)) + + +def scale(image, factor, resample=Image.BICUBIC): + """ + Returns a rescaled image by a specific factor given in parameter. + A factor greater than 1 expands the image, between 0 and 1 contracts the + image. + + :param image: The image to rescale. + :param factor: The expansion factor, as a float. + :param resample: What resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + if factor == 1: + return image.copy() + elif factor <= 0: + raise ValueError("the factor must be greater than 0") + else: + size = (round(factor * image.width), round(factor * image.height)) + return image.resize(size, resample) + + +def deform(image, deformer, resample=Image.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + ``getmesh`` method can be used. + :param resample: An optional resampling filter. Same values possible as + in the PIL.Image.transform function. + :return: An image. + """ + return image.transform(image.size, Image.MESH, deformer.getmesh(image), resample) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b : b + 256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i + b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + out = Image.new(image.mode, (width, height), _color(fill, image.mode)) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.BICUBIC, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a sized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param image: The image to size and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.BICUBIC`. See :ref:`concept-filters`. + :param bleed: Remove a border around the outside of the image from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + Cannot be greater than or equal to 0.5. + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # http://www.cazabon.com + + # ensure centering is mutable + centering = list(centering) + + if not 0.0 <= centering[0] <= 1.0: + centering[0] = 0.5 + if not 0.0 <= centering[1] <= 1.0: + centering[1] = 0.5 + + if not 0.0 <= bleed < 0.5: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleed_pixels = (bleed * image.size[0], bleed * image.size[1]) + + live_size = ( + image.size[0] - bleed_pixels[0] * 2, + image.size[1] - bleed_pixels[1] * 2, + ) + + # calculate the aspect ratio of the live_size + live_size_ratio = live_size[0] / live_size[1] + + # calculate the aspect ratio of the output image + output_ratio = size[0] / size[1] + + # figure out if the sides or top/bottom will be cropped off + if live_size_ratio == output_ratio: + # live_size is already the needed ratio + crop_width = live_size[0] + crop_height = live_size[1] + elif live_size_ratio >= output_ratio: + # live_size is wider than what's needed, crop the sides + crop_width = output_ratio * live_size[1] + crop_height = live_size[1] + else: + # live_size is taller than what's needed, crop the top and bottom + crop_width = live_size[0] + crop_height = live_size[0] / output_ratio + + # make the crop + crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0] + crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1] + + crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height) + + # resize the image and return it + return image.resize(size, method, box=crop) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255 - i) + return _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2 ** (8 - bits) - 1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255 - i) + return _lut(image, lut) + + +def exif_transpose(image): + """ + If an image has an EXIF Orientation tag, return a new image that is + transposed accordingly. Otherwise, return a copy of the image. + + :param image: The image to transpose. + :return: An image. + """ + exif = image.getexif() + orientation = exif.get(0x0112) + method = { + 2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(orientation) + if method is not None: + transposed_image = image.transpose(method) + del exif[0x0112] + transposed_image.info["exif"] = exif.tobytes() + return transposed_image + return image.copy() diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImagePalette.py b/minor_project/lib/python3.6/site-packages/PIL/ImagePalette.py new file mode 100644 index 0000000..d060411 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImagePalette.py @@ -0,0 +1,221 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array + +from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile + + +class ImagePalette: + """ + Color palette for palette mapped images + + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or bytearray(range(256)) * len(self.mode) + self.colors = {} + self.dirty = None + if (size == 0 and len(self.mode) * 256 != len(self.palette)) or ( + size != 0 and size != len(self.palette) + ): + raise ValueError("wrong palette size") + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode + ";L", self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + if hasattr(arr, "tobytes"): + return arr.tobytes() + return arr.tostring() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + try: + return self.colors[color] + except KeyError as e: + # allocate new color slot + if isinstance(self.palette, bytes): + self.palette = bytearray(self.palette) + index = len(self.colors) + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") from e + self.colors[color] = index + self.palette[index] = color[0] + self.palette[index + 256] = color[1] + self.palette[index + 512] = color[2] + self.dirty = 1 + return index + else: + raise ValueError(f"unknown color specifier: {repr(color)}") + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write(f"# Mode: {self.mode}\n") + for i in range(256): + fp.write(f"{i}") + for j in range(i * len(self.mode), (i + 1) * len(self.mode)): + try: + fp.write(f" {self.palette[j]}") + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white * i // 255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256)) + palette.reverse() + return ImagePalette(mode, palette * len(mode)) + + +def random(mode="RGB"): + from random import randint + + palette = [] + for i in range(256 * len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + r, g, b = ImageColor.getrgb(white) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) + return ImagePalette("RGB", r + g + b) + + +def wedge(mode="RGB"): + return ImagePalette(mode, list(range(256)) * len(mode)) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + with open(filename, "rb") as fp: + + for paletteHandler in [ + GimpPaletteFile.GimpPaletteFile, + GimpGradientFile.GimpGradientFile, + PaletteFile.PaletteFile, + ]: + try: + fp.seek(0) + lut = paletteHandler(fp).getpalette() + if lut: + break + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + else: + raise OSError("cannot load palette") + + return lut # data, rawmode diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImagePath.py b/minor_project/lib/python3.6/site-packages/PIL/ImagePath.py new file mode 100644 index 0000000..3d3538c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImagePath.py @@ -0,0 +1,19 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from . import Image + +Path = Image.core.path diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageQt.py b/minor_project/lib/python3.6/site-packages/PIL/ImageQt.py new file mode 100644 index 0000000..64f07be --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageQt.py @@ -0,0 +1,202 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import sys +from io import BytesIO + +from . import Image +from ._util import isPath + +qt_versions = [ + ["side6", "PySide6"], + ["5", "PyQt5"], + ["side2", "PySide2"], +] + +# If a version has already been imported, attempt it first +qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True) +for qt_version, qt_module in qt_versions: + try: + if qt_module == "PySide6": + from PySide6.QtCore import QBuffer, QIODevice + from PySide6.QtGui import QImage, QPixmap, qRgba + elif qt_module == "PyQt5": + from PyQt5.QtCore import QBuffer, QIODevice + from PyQt5.QtGui import QImage, QPixmap, qRgba + elif qt_module == "PySide2": + from PySide2.QtCore import QBuffer, QIODevice + from PySide2.QtGui import QImage, QPixmap, qRgba + except (ImportError, RuntimeError): + continue + qt_is_installed = True + break +else: + qt_is_installed = False + qt_version = None + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return qRgba(r, g, b, a) & 0xFFFFFFFF + + +def fromqimage(im): + """ + :param im: A PIL Image object, or a file name + (given either as Python string or a PyQt string object) + """ + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alpha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, "png") + else: + im.save(buffer, "ppm") + + b = BytesIO() + b.write(buffer.data()) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = {"1": 1, "L": 8, "P": 8}[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append( + bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + + b"\x00" * extra_padding + ) + + return b"".join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + im = str(im.toUtf8(), "utf-8") + if isPath(im): + im = Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i : i + 3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + data = im.tobytes("raw", "BGRA") + format = QImage.Format_ARGB32 + else: + raise ValueError(f"unsupported image mode {repr(im.mode)}") + + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return {"data": __data, "im": im, "format": format, "colortable": colortable} + + +if qt_is_installed: + + class ImageQt(QImage): + def __init__(self, im): + """ + An PIL image wrapper for Qt. This is a subclass of PyQt's QImage + class. + + :param im: A PIL Image object, or a file name (given either as + Python string or a PyQt string object). + """ + im_data = _toqclass_helper(im) + # must keep a reference, or Qt will crash! + # All QImage constructors that take data operate on an existing + # buffer, so this buffer has to hang on for the life of the image. + # Fixes https://github.com/python-pillow/Pillow/issues/1370 + self.__data = im_data["data"] + super().__init__( + self.__data, + im_data["im"].size[0], + im_data["im"].size[1], + im_data["format"], + ) + if im_data["colortable"]: + self.setColorTable(im_data["colortable"]) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == "RGB": + im = im.convert("RGBA") + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageSequence.py b/minor_project/lib/python3.6/site-packages/PIL/ImageSequence.py new file mode 100644 index 0000000..9df910a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageSequence.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator: + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + self.position = getattr(self.im, "_min_frame", 0) + + def __getitem__(self, ix): + try: + self.im.seek(ix) + return self.im + except EOFError as e: + raise IndexError from e # end of sequence + + def __iter__(self): + return self + + def __next__(self): + try: + self.im.seek(self.position) + self.position += 1 + return self.im + except EOFError as e: + raise StopIteration from e + + +def all_frames(im, func=None): + """ + Applies a given function to all frames in an image or a list of images. + The frames are returned as a list of separate images. + + :param im: An image, or a list of images. + :param func: The function to apply to all of the image frames. + :returns: A list of images. + """ + if not isinstance(im, list): + im = [im] + + ims = [] + for imSequence in im: + current = imSequence.tell() + + ims += [im_frame.copy() for im_frame in Iterator(imSequence)] + + imSequence.seek(current) + return [func(im) for im in ims] if func else ims diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageShow.py b/minor_project/lib/python3.6/site-packages/PIL/ImageShow.py new file mode 100644 index 0000000..1ada825 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageShow.py @@ -0,0 +1,236 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# +import os +import shutil +import subprocess +import sys +import tempfile +from shlex import quote + +from PIL import Image + +_viewers = [] + + +def register(viewer, order=1): + """ + The :py:func:`register` function is used to register additional viewers. + + :param viewer: The viewer to be registered. + :param order: + Zero or a negative integer to prepend this viewer to the list, + a positive integer to append it. + """ + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + else: + _viewers.insert(0, viewer) + + +def show(image, title=None, **options): + r""" + Display a given image. + + :param image: An image object. + :param title: Optional title. Not all viewers can display the title. + :param \**options: Additional viewer options. + :returns: ``True`` if a suitable viewer was found, ``False`` otherwise. + """ + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return 1 + return 0 + + +class Viewer: + """Base class for viewers.""" + + # main api + + def show(self, image, **options): + """ + The main function for displaying an image. + Converts the given image to the target format and displays it. + """ + + # save temporary image to disk + if not ( + image.mode in ("1", "RGBA") + or (self.format == "PNG" and image.mode in ("I;16", "LA")) + ): + base = Image.getmodebase(image.mode) + if image.mode != base: + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + """The format to convert the image into.""" + options = {} + """Additional options used to convert the image.""" + + def get_format(self, image): + """Return format name, or ``None`` to save as PGM/PPM.""" + return self.format + + def get_command(self, file, **options): + """ + Returns the command used to display the file. + Not implemented in the base class. + """ + raise NotImplementedError + + def save_image(self, image): + """Save to temporary file and return filename.""" + return image._dump(format=self.get_format(image), **self.options) + + def show_image(self, image, **options): + """Display the given image.""" + return self.show_file(self.save_image(image), **options) + + def show_file(self, file, **options): + """Display the given file.""" + os.system(self.get_command(file, **options)) + return 1 + + +# -------------------------------------------------------------------- + + +class WindowsViewer(Viewer): + """The default viewer on Windows is the default system application for PNG files.""" + + format = "PNG" + options = {"compress_level": 1} + + def get_command(self, file, **options): + return ( + f'start "Pillow" /WAIT "{file}" ' + "&& ping -n 2 127.0.0.1 >NUL " + f'&& del /f "{file}"' + ) + + +if sys.platform == "win32": + register(WindowsViewer) + + +class MacViewer(Viewer): + """The default viewer on MacOS using ``Preview.app``.""" + + format = "PNG" + options = {"compress_level": 1} + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a Preview.app" + command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&" + return command + + def show_file(self, file, **options): + """Display given file""" + fd, path = tempfile.mkstemp() + with os.fdopen(fd, "w") as f: + f.write(file) + with open(path) as f: + subprocess.Popen( + ["im=$(cat); open -a Preview.app $im; sleep 20; rm -f $im"], + shell=True, + stdin=f, + ) + os.remove(path) + return 1 + + +if sys.platform == "darwin": + register(MacViewer) + + +class UnixViewer(Viewer): + format = "PNG" + options = {"compress_level": 1} + + def get_command(self, file, **options): + command = self.get_command_ex(file, **options)[0] + return f"({command} {quote(file)}; rm -f {quote(file)})&" + + def show_file(self, file, **options): + """Display given file""" + fd, path = tempfile.mkstemp() + with os.fdopen(fd, "w") as f: + f.write(file) + with open(path) as f: + command = self.get_command_ex(file, **options)[0] + subprocess.Popen( + ["im=$(cat);" + command + " $im; rm -f $im"], shell=True, stdin=f + ) + os.remove(path) + return 1 + + +class DisplayViewer(UnixViewer): + """The ImageMagick ``display`` command.""" + + def get_command_ex(self, file, **options): + command = executable = "display" + return command, executable + + +class EogViewer(UnixViewer): + """The GNOME Image Viewer ``eog`` command.""" + + def get_command_ex(self, file, **options): + command = executable = "eog" + return command, executable + + +class XVViewer(UnixViewer): + """ + The X Viewer ``xv`` command. + This viewer supports the ``title`` parameter. + """ + + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += f" -name {quote(title)}" + return command, executable + + +if sys.platform not in ("win32", "darwin"): # unixoids + if shutil.which("display"): + register(DisplayViewer) + if shutil.which("eog"): + register(EogViewer) + if shutil.which("xv"): + register(XVViewer) + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python ImageShow.py imagefile [title]") + sys.exit() + + with Image.open(sys.argv[1]) as im: + print(show(im, *sys.argv[2:])) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageStat.py b/minor_project/lib/python3.6/site-packages/PIL/ImageStat.py new file mode 100644 index 0000000..50bafc9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageStat.py @@ -0,0 +1,147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import functools +import math +import operator + + +class Stat: + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + """Calculate missing attribute""" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + """Get min/max values for each band in the image""" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + """Get total number of pixels in each layer""" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i : i + 256])) + return v + + def _getsum(self): + """Get sum of all pixels in each layer""" + + v = [] + for i in range(0, len(self.h), 256): + layerSum = 0.0 + for j in range(256): + layerSum += j * self.h[i + j] + v.append(layerSum) + return v + + def _getsum2(self): + """Get squared sum of all pixels in each layer""" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j ** 2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + """Get average pixel level for each layer""" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + """Get median pixel level for each layer""" + + v = [] + for i in self.bands: + s = 0 + half = self.count[i] // 2 + b = i * 256 + for j in range(256): + s = s + self.h[b + j] + if s > half: + break + v.append(j) + return v + + def _getrms(self): + """Get RMS for each layer""" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + """Get variance for each layer""" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i] - (self.sum[i] ** 2.0) / n) / n) + return v + + def _getstddev(self): + """Get standard deviation for each layer""" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + + +Global = Stat # compatibility diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageTk.py b/minor_project/lib/python3.6/site-packages/PIL/ImageTk.py new file mode 100644 index 0000000..62db7a7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageTk.py @@ -0,0 +1,300 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import tkinter +from io import BytesIO + +from . import Image + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data=f"PIL:{im.im.id}") + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +def _get_image_from_kw(kw): + source = None + if "file" in kw: + source = kw.pop("file") + elif "data" in kw: + source = BytesIO(kw.pop("data")) + if source: + return Image.open(source) + + +# -------------------------------------------------------------------- +# PhotoImage + + +class PhotoImage: + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the ``file`` or ``data`` options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except Exception: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and lower pixel + coordinate. See :ref:`coordinate-system`. If None is given + instead of a tuple, all of the image is assumed. + """ + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + tk = self.__photo.tk + + try: + tk.call("PyImagingPhoto", self.__photo, block.id) + except tkinter.TclError: + # activate Tkinter hook + try: + from . import _imagingtk + + try: + if hasattr(tk, "interp"): + # Required for PyPy, which always has CFFI installed + from cffi import FFI + + ffi = FFI() + + # PyPy is using an FFI CDATA element + # (Pdb) self.tk.interp + # + _imagingtk.tkinit(int(ffi.cast("uintptr_t", tk.interp)), 1) + else: + _imagingtk.tkinit(tk.interpaddr(), 1) + except AttributeError: + _imagingtk.tkinit(id(tk), 0) + tk.call("PyImagingPhoto", self.__photo, block.id) + except (ImportError, AttributeError, tkinter.TclError): + raise # configuration problem; cannot attach to Tkinter + + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage: + """ + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is ``foreground``, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = f"PIL:{image.im.id}" + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except Exception: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """Copies the contents of a PhotoImage to a PIL image memory.""" + im = Image.new("RGBA", (photo.width(), photo.height())) + block = im.im + + photo.tk.call("PyImagingPhotoGet", photo, block.id) + + return im + + +def _show(image, title): + """Helper for the Image.show method.""" + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + super().__init__(master, image=self.image, bg="black", bd=0) + + if not tkinter._default_root: + raise OSError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageTransform.py b/minor_project/lib/python3.6/site-packages/PIL/ImageTransform.py new file mode 100644 index 0000000..77791ab --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageTransform.py @@ -0,0 +1,102 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +class AffineTransform(Transform): + """ + Define an affine image transform. + + This function takes a 6-tuple (a, b, c, d, e, f) which contain the first + two rows from an affine transform matrix. For each pixel (x, y) in the + output image, the new value is taken from a position (a x + b y + c, + d x + e y + f) in the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + See :py:meth:`~PIL.Image.Image.transform` + + :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows + from an affine transform matrix. + """ + + method = Image.AFFINE + + +class ExtentTransform(Transform): + """ + Define a transform to extract a subregion from an image. + + Maps a rectangle (defined by two corners) from the image to a rectangle of + the given size. The resulting image will contain data sampled from between + the corners, such that (x0, y0) in the input image will end up at (0,0) in + the output image, and (x1, y1) at size. + + This method can be used to crop, stretch, shrink, or mirror an arbitrary + rectangle in the current image. It is slightly slower than crop, but about + as fast as a corresponding resize operation. + + See :py:meth:`~PIL.Image.Image.transform` + + :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the + input image's coordinate system. See :ref:`coordinate-system`. + """ + + method = Image.EXTENT + + +class QuadTransform(Transform): + """ + Define a quad image transform. + + Maps a quadrilateral (a region defined by four corners) from the image to a + rectangle of the given size. + + See :py:meth:`~PIL.Image.Image.transform` + + :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the + upper left, lower left, lower right, and upper right corner of the + source quadrilateral. + """ + + method = Image.QUAD + + +class MeshTransform(Transform): + """ + Define a mesh image transform. A mesh transform consists of one or more + individual quad transforms. + + See :py:meth:`~PIL.Image.Image.transform` + + :param data: A list of (bbox, quad) tuples. + """ + + method = Image.MESH diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImageWin.py b/minor_project/lib/python3.6/site-packages/PIL/ImageWin.py new file mode 100644 index 0000000..ca9b14c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImageWin.py @@ -0,0 +1,230 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +class HDC: + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND: + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib: + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + ``palette`` method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use + ``CDC.GetHandleAttrib()`` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. See :ref:`coordinate-system`. If + None is given instead of a tuple, all of the image is + assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from :py:func:`~PIL.ImageWin.Dib.tobytes`) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + +class Window: + """Create a Window with the given title size.""" + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +class ImageWindow(Window): + """Create an image window which displays the given image.""" + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + super().__init__(title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/minor_project/lib/python3.6/site-packages/PIL/ImtImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..21ffd74 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/ImtImagePlugin.py @@ -0,0 +1,93 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from . import Image, ImageFile + +# +# -------------------------------------------------------------------- + +field = re.compile(br"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + xsize = ysize = 0 + + while True: + + s = self.fp.read(1) + if not s: + break + + if s == b"\x0C": + + # image data begins + self.tile = [ + ("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1)) + ] + + break + + else: + + # read key/value pair + # FIXME: dangerous, may read whole file + s = s + self.fp.readline() + if len(s) == 1 or len(s) > 100: + break + if s[0] == ord(b"*"): + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == "width": + xsize = int(v) + self._size = xsize, ysize + elif k == "height": + ysize = int(v) + self._size = xsize, ysize + elif k == "pixel" and v == "n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/minor_project/lib/python3.6/site-packages/PIL/IptcImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..0bbe506 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/IptcImagePlugin.py @@ -0,0 +1,230 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# +import os +import tempfile + +from . import Image, ImageFile +from ._binary import i8 +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 + +COMPRESSION = {1: "raw", 5: "jpeg"} + +PAD = o8(0) * 4 + + +# +# Helpers + + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=" ") + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = s[1], s[2] + + # syntax + if s[0] != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = s[3] + if size > 132: + raise OSError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size - 128)) + else: + size = i16(s, 3) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in self.info: + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0]) - 1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self._size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError as e: + raise OSError("Unknown IPTC image compression") from e + + # tile + if tag == (8, 10): + self.tile = [ + ("iptc", (compression, offset), (0, 0, self.size[0], self.size[1])) + ] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + with Image.open(outfile) as _im: + _im.load() + self.im = _im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +def getiptcinfo(im): + """ + Get IPTC information from TIFF, JPEG, or IPTC file. + + :param im: An image containing IPTC data. + :returns: A dictionary containing IPTC information, or None if + no IPTC information block was found. + """ + import io + + from . import JpegImagePlugin, TiffImagePlugin + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + photoshop = im.info.get("photoshop") + if photoshop: + data = photoshop.get(0x0404) + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage: + pass + + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/minor_project/lib/python3.6/site-packages/PIL/Jpeg2KImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..0b0d433 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,314 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +import io +import os +import struct + +from . import Image, ImageFile + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack(">H", hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from( + ">HHIIIIIIIIH", siz + ) + ssiz = [None] * csiz + xrsiz = [None] * csiz + yrsiz = [None] * csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7F) > 8: + mode = "I;16" + else: + mode = "L" + elif csiz == 2: + mode = "LA" + elif csiz == 3: + mode = "RGB" + elif csiz == 4: + mode = "RGBA" + else: + mode = None + + return (size, mode) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count and + color space information, returning a (size, mode, mimetype) tuple.""" + + # Find the JP2 header box + header = None + mimetype = None + while True: + lbox, tbox = struct.unpack(">I4s", fp.read(8)) + if lbox == 1: + lbox = struct.unpack(">Q", fp.read(8))[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen: + raise SyntaxError("Invalid JP2 header length") + + if tbox == b"jp2h": + header = fp.read(lbox - hlen) + break + elif tbox == b"ftyp": + if fp.read(4) == b"jpx ": + mimetype = "image/jpx" + fp.seek(lbox - hlen - 4, os.SEEK_CUR) + else: + fp.seek(lbox - hlen, os.SEEK_CUR) + + if header is None: + raise SyntaxError("could not find JP2 header") + + size = None + mode = None + bpc = None + nc = None + + hio = io.BytesIO(header) + while True: + lbox, tbox = struct.unpack(">I4s", hio.read(8)) + if lbox == 1: + lbox = struct.unpack(">Q", hio.read(8))[0] + hlen = 16 + else: + hlen = 8 + + content = hio.read(lbox - hlen) + + if tbox == b"ihdr": + height, width, nc, bpc, c, unkc, ipr = struct.unpack(">IIHBBBB", content) + size = (width, height) + if unkc: + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" + elif nc == 1: + mode = "L" + elif nc == 2: + mode = "LA" + elif nc == 3: + mode = "RGB" + elif nc == 4: + mode = "RGBA" + break + elif tbox == b"colr": + meth, prec, approx = struct.unpack_from(">BBB", content) + if meth == 1: + cs = struct.unpack_from(">I", content, 3)[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" + elif nc == 1: + mode = "L" + elif nc == 3: + mode = "RGB" + elif nc == 4: + mode = "RGBA" + break + elif cs == 17: # grayscale + if nc == 1 and (bpc & 0x7F) > 8: + mode = "I;16" + elif nc == 1: + mode = "L" + elif nc == 2: + mode = "LA" + break + elif cs == 18: # sYCC + if nc == 3: + mode = "RGB" + elif nc == 4: + mode = "RGBA" + break + + if size is None or mode is None: + raise SyntaxError("Malformed jp2 header") + + return (size, mode, mimetype) + + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b"\xff\x4f\xff\x51": + self.codec = "j2k" + self._size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a": + self.codec = "jp2" + header = _parse_jp2_header(self.fp) + self._size, self.mode, self.custom_mimetype = header + else: + raise SyntaxError("not a JPEG 2000 file") + + if self.size is None or self.mode is None: + raise SyntaxError("unable to determine size/mode") + + self._reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except Exception: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, io.SEEK_END) + length = self.fp.tell() + self.fp.seek(pos) + except Exception: + length = -1 + + self.tile = [ + ( + "jpeg2k", + (0, 0) + self.size, + 0, + (self.codec, self._reduce, self.layers, fd, length), + ) + ] + + @property + def reduce(self): + # https://github.com/python-pillow/Pillow/issues/4343 found that the + # new Image 'reduce' method was shadowed by this plugin's 'reduce' + # property. This attempts to allow for both scenarios + return self._reduce or super().reduce + + @reduce.setter + def reduce(self, value): + self._reduce = value + + def load(self): + if self.tile and self._reduce: + power = 1 << self._reduce + adjust = power >> 1 + self._size = ( + int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power), + ) + + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + return ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return ( + prefix[:4] == b"\xff\x4f\xff\x51" + or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a" + ) + + +# ------------------------------------------------------------ +# Save support + + +def _save(im, fp, filename): + if filename.endswith(".j2k"): + kind = "j2k" + else: + kind = "jp2" + + # Get the keyword arguments + info = im.encoderinfo + + offset = info.get("offset", None) + tile_offset = info.get("tile_offset", None) + tile_size = info.get("tile_size", None) + quality_mode = info.get("quality_mode", "rates") + quality_layers = info.get("quality_layers", None) + if quality_layers is not None and not ( + isinstance(quality_layers, (list, tuple)) + and all( + [ + isinstance(quality_layer, (int, float)) + for quality_layer in quality_layers + ] + ) + ): + raise ValueError("quality_layers must be a sequence of numbers") + + num_resolutions = info.get("num_resolutions", 0) + cblk_size = info.get("codeblock_size", None) + precinct_size = info.get("precinct_size", None) + irreversible = info.get("irreversible", False) + progression = info.get("progression", "LRCP") + cinema_mode = info.get("cinema_mode", "no") + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except Exception: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + fd, + ) + + ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)]) + + +# ------------------------------------------------------------ +# Registry stuff + + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extensions( + Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"] +) + +Image.register_mime(Jpeg2KImageFile.format, "image/jp2") diff --git a/minor_project/lib/python3.6/site-packages/PIL/JpegImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..054495e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/JpegImagePlugin.py @@ -0,0 +1,805 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# +import array +import io +import os +import struct +import subprocess +import sys +import tempfile +import warnings + +from . import Image, ImageFile, TiffImagePlugin +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from .JpegPresets import presets + +# +# Parser + + +def Skip(self, marker): + n = i16(self.fp.read(2)) - 2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = s[7] + jfif_density = i16(s, 8), i16(s, 10) + except Exception: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + if "exif" not in self.info: + # extract EXIF information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00": + # parse the image resource block + offset = 14 + photoshop = self.info.setdefault("photoshop", {}) + while s[offset : offset + 4] == b"8BIM": + try: + offset += 4 + # resource code + code = i16(s, offset) + offset += 2 + # resource name (usually empty) + name_len = s[offset] + # name = s[offset+1:offset+1+name_len] + offset += 1 + name_len + offset += offset & 1 # align + # resource data block + size = i32(s, offset) + offset += 4 + data = s[offset : offset + size] + if code == 0x03ED: # ResolutionInfo + data = { + "XResolution": i32(data, 0) / 65536, + "DisplayedUnitsX": i16(data, 4), + "YResolution": i32(data, 8) / 65536, + "DisplayedUnitsY": i16(data, 12), + } + photoshop[code] = data + offset += size + offset += offset & 1 # align + except struct.error: + break # insufficient data + + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = s[1] + except Exception: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + # If DPI isn't in JPEG header, fetch from EXIF + if "dpi" not in self.info and "exif" in self.info: + try: + exif = self.getexif() + resolution_unit = exif[0x0128] + x_resolution = exif[0x011A] + try: + dpi = float(x_resolution[0]) / x_resolution[1] + except TypeError: + dpi = x_resolution + if resolution_unit == 3: # cm + # 1 dpcm = 2.54 dpi + dpi *= 2.54 + self.info["dpi"] = int(dpi + 0.5), int(dpi + 0.5) + except (KeyError, SyntaxError, ValueError, ZeroDivisionError): + # SyntaxError for invalid/unreadable EXIF + # KeyError for dpi not included + # ZeroDivisionError for invalid dpi rational value + # ValueError for x_resolution[0] being an invalid float + self.info["dpi"] = 72, 72 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + + self.info["comment"] = s + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + self._size = i16(s, 3), i16(s, 1) + + self.bits = s[0] + if self.bits != 8: + raise SyntaxError(f"cannot handle {self.bits}-bit layers") + + self.layers = s[5] + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError(f"cannot handle {self.layers}-layer images") + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if self.icclist[0][13] == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = [] + + for i in range(6, len(s), 3): + t = s[i : i + 3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], t[1] // 16, t[1] & 15, t[2])) + + +def DQT(self, marker): + # + # Define quantization table. Note that there might be more + # than one table in each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2)) - 2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + v = s[0] + precision = 1 if (v // 16 == 0) else 2 # in bytes + qt_length = 1 + precision * 64 + if len(s) < qt_length: + raise SyntaxError("bad quantization table marker") + data = array.array("B" if precision == 1 else "H", s[1:qt_length]) + if sys.byteorder == "little" and precision > 1: + data.byteswap() # the values are always big-endian + self.quantization[v & 15] = data + s = s[qt_length:] + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM), +} + + +def _accept(prefix): + # Magic number was taken from https://en.wikipedia.org/wiki/JPEG + return prefix[0:3] == b"\xFF\xD8\xFF" + + +## +# Image plugin for JPEG and JFIF images. + + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(3) + + if not _accept(s): + raise SyntaxError("not a JPEG file") + s = b"\xFF" + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = s[0] + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) + s = self.fp.read(1) + else: + raise SyntaxError("no marker found") + + def load_read(self, read_bytes): + """ + internal: read more image data + For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker + so libjpeg can finish decoding + """ + s = self.fp.read(read_bytes) + + if not s and ImageFile.LOAD_TRUNCATED_IMAGES: + # Premature EOF. + # Pretend file is finished adding EOI marker + return b"\xFF\xD9" + + return s + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + # Protect from second call + if self.decoderconfig: + return + + d, e, o, a = self.tile[0] + scale = 1 + original_size = self.size + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = min(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = ( + e[0], + e[1], + (e[2] - e[0] + s - 1) // s + e[0], + (e[3] - e[1] + s - 1) // s + e[1], + ) + self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + box = (0, 0, original_size[0] / scale, original_size[1] / scale) + return (self.mode, box) + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + with Image.open(path) as _im: + _im.load() + self.im = _im.im + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self._size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + +def _getexif(self): + if "exif" not in self.info: + return None + return dict(self.getexif()) + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plugin. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<" + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + file_contents.seek(info.next) + info.load(file_contents) + mp = dict(info) + except Exception as e: + raise SyntaxError("malformed MP Index (unreadable directory)") from e + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError as e: + raise SyntaxError("malformed MP Index (no number of images)") from e + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = struct.unpack_from( + f"{endianness}LLLHH", rawmpentries, entrynum * 16 + ) + labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2") + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)), + "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)), + "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)), + "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27, + "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24, + "MPType": mpentry["Attribute"] & 0x00FFFFFF, + } + if mpentryattr["ImageDataFormat"] == 0: + mpentryattr["ImageDataFormat"] = "JPEG" + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: "Undefined", + 0x010001: "Large Thumbnail (VGA Equivalent)", + 0x010002: "Large Thumbnail (Full HD Equivalent)", + 0x020001: "Multi-Frame Image (Panorama)", + 0x020002: "Multi-Frame Image: (Disparity)", + 0x020003: "Multi-Frame Image: (Multi-Angle)", + 0x030000: "Baseline MP Primary Image", + } + mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown") + mpentry["Attribute"] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError as e: + raise SyntaxError("malformed MP Index (bad MP Entry)") from e + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +# fmt: off +zigzag_index = ( + 0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63, +) + +samplings = { + (1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, +} +# fmt: on + + +def convert_dict_qtables(qtables): + qtables = [qtables[key] for key in range(len(qtables)) if key in qtables] + for idx, table in enumerate(qtables): + qtables[idx] = [table[i] for i in zigzag_index] + return qtables + + +def get_sampling(im): + # There's no subsampling when images have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to the default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, "layers") or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + + try: + rawmode = RAWMODE[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as JPEG") from e + + info = im.encoderinfo + + dpi = [round(x) for x in info.get("dpi", (0, 0))] + + quality = info.get("quality", -1) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = -1 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = -1 + subsampling = preset.get("subsampling", -1) + qtables = preset.get("quantization") + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get("subsampling", -1) + if isinstance(qtables, str) and qtables in presets: + qtables = presets[qtables].get("quantization") + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:2:0": + subsampling = 2 + elif subsampling == "4:1:1": + # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0. + # Set 4:2:0 if someone is still using that value. + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError("Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isinstance(qtables, str): + try: + lines = [ + int(num) + for line in qtables.splitlines() + for num in line.split("#", 1)[0].split() + ] + except ValueError as e: + raise ValueError("Invalid quantization table") from e + else: + qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = convert_dict_qtables(qtables) + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise TypeError + table = array.array("H", table) + except TypeError as e: + raise ValueError("Invalid quantization table") from e + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError("Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = b"" + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += ( + b"\xFF\xE2" + + size + + b"ICC_PROFILE\0" + + o8(i) + + o8(len(markers)) + + marker + ) + i += 1 + + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + progressive = info.get("progressive", False) or info.get("progression", False) + + optimize = info.get("optimize", False) + + exif = info.get("exif", b"") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + + # get keyword arguments + im.encoderconfig = ( + quality, + progressive, + info.get("smooth", 0), + optimize, + info.get("streamtype", 0), + dpi[0], + dpi[1], + subsampling, + qtables, + extra, + exif, + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if optimize or progressive: + # CMYK can be bigger + if im.mode == "CMYK": + bufsize = 4 * im.size[0] * im.size[1] + # keep sets quality to -1, but the actual value may be high. + elif quality >= 95 or quality == -1: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The EXIF info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough. Same with the icc_profile block. + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(exif) + 5, len(extra) + 1) + + ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + + # Don't reload everything, just convert it. + im = MpoImageFile.adopt(im, mpheader) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn( + "Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file" + ) + return im + + +# --------------------------------------------------------------------- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"]) + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/minor_project/lib/python3.6/site-packages/PIL/JpegPresets.py b/minor_project/lib/python3.6/site-packages/PIL/JpegPresets.py new file mode 100644 index 0000000..79d10eb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/JpegPresets.py @@ -0,0 +1,248 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. +Can be used when saving JPEG files. + +The following presets are available by default: +``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``, +``low``, ``medium``, ``high``, ``maximum``. +More presets can be added to the :py:data:`presets` dict if needed. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:2:0. + +You can get the subsampling of a JPEG with the +:func:`.JpegImagePlugin.get_sampling` function. + +In JPEG compressed data a JPEG marker is used instead of an EXIF tag. +(ref.: https://www.exiv2.org/tags.html) + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The tables format between im.quantization and quantization in presets differ in +3 ways: + +1. The base container of the preset is a list with sublists instead of dict. + dict[0] -> list[0], dict[1] -> list[1], ... +2. Each table in a preset is a list instead of an array. +3. The zigzag order is remove in the preset (needed by libjpeg >= 6a). + +You can convert the dict format to the preset format with the +:func:`.JpegImagePlugin.convert_dict_qtables()` function. + +Libjpeg ref.: +https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +# fmt: off +presets = { + 'web_low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} +# fmt: on diff --git a/minor_project/lib/python3.6/site-packages/PIL/McIdasImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..cd047fe --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/McIdasImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct + +from . import Image, ImageFile + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self._size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10] * w[11] * w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/minor_project/lib/python3.6/site-packages/PIL/MicImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/MicImagePlugin.py new file mode 100644 index 0000000..2aed260 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/MicImagePlugin.py @@ -0,0 +1,107 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +import olefile + +from . import Image, TiffImagePlugin + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = olefile.OleFileIO(self.fp) + except OSError as e: + raise SyntaxError("not an MIC file; invalid OLE file") from e + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.__fp = self.fp + self.frame = None + self._n_frames = len(self.images) + self.is_animated = self._n_frames > 1 + + if len(self.images) > 1: + self.category = Image.CONTAINER + + self.seek(0) + + def seek(self, frame): + if not self._seek_check(frame): + return + try: + filename = self.images[frame] + except IndexError as e: + raise EOFError("no such frame") from e + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + return self.frame + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/minor_project/lib/python3.6/site-packages/PIL/MpegImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..a358dfd --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/MpegImagePlugin.py @@ -0,0 +1,83 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile +from ._binary import i8 + +# +# Bitstream parser + + +class BitStream: + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self._size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"]) + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/minor_project/lib/python3.6/site-packages/PIL/MpoImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..575cc9c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/MpoImagePlugin.py @@ -0,0 +1,134 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile, JpegImagePlugin +from ._binary import i16be as i16 + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + _close_exclusive_fp_after_loading = False + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self._after_jpeg_open() + + def _after_jpeg_open(self, mpheader=None): + self.mpinfo = mpheader if mpheader is not None else self._getmp() + self.n_frames = self.mpinfo[0xB001] + self.__mpoffsets = [ + mpent["DataOffset"] + self.info["mpoffset"] for mpent in self.mpinfo[0xB002] + ] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.n_frames == len(self.__mpoffsets) + del self.info["mpoffset"] # no longer needed + self.is_animated = self.n_frames > 1 + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + def seek(self, frame): + if not self._seek_check(frame): + return + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + + self.fp.seek(self.offset + 2) # skip SOI marker + segment = self.fp.read(2) + if not segment: + raise ValueError("No data found for frame") + if i16(segment) == 0xFFE1: # APP1 + n = i16(self.fp.read(2)) - 2 + self.info["exif"] = ImageFile._safe_read(self.fp, n) + + exif = self.getexif() + if 40962 in exif and 40963 in exif: + self._size = (exif[40962], exif[40963]) + elif "exif" in self.info: + del self.info["exif"] + + self.tile = [("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))] + self.__frame = frame + + def tell(self): + return self.__frame + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + @staticmethod + def adopt(jpeg_instance, mpheader=None): + """ + Transform the instance of JpegImageFile into + an instance of MpoImageFile. + After the call, the JpegImageFile is extended + to be an MpoImageFile. + + This is essentially useful when opening a JPEG + file that reveals itself as an MPO, to avoid + double call to _open. + """ + jpeg_instance.__class__ = MpoImageFile + jpeg_instance._after_jpeg_open(mpheader) + return jpeg_instance + + +# --------------------------------------------------------------------- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/minor_project/lib/python3.6/site-packages/PIL/MspImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/MspImagePlugin.py new file mode 100644 index 0000000..e1fdc1f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/MspImagePlugin.py @@ -0,0 +1,194 @@ +# +# The Python Imaging Library. +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# 17-02-21 es Fixed RLE interpretation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# Copyright (c) Eric Soroos 2017. +# +# See the README file for information on usage and redistribution. +# +# More info on this format: https://archive.org/details/gg243631 +# Page 313: +# Figure 205. Windows Paint Version 1: "DanM" Format +# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03 +# +# See also: http://www.fileformat.info/format/mspaint/egff.htm + +import io +import struct + +from . import Image, ImageFile +from ._binary import i16le as i16 +from ._binary import o16le as o16 + +# +# read MSP files + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if not _accept(s): + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s, i) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self._size = i16(s, 4), i16(s, 6) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))] + else: + self.tile = [("MSP", (0, 0) + self.size, 32, None)] + + +class MspDecoder(ImageFile.PyDecoder): + # The algo for the MSP decoder is from + # http://www.fileformat.info/format/mspaint/egff.htm + # cc-by-attribution -- That page references is taken from the + # Encyclopedia of Graphics File Formats and is licensed by + # O'Reilly under the Creative Common/Attribution license + # + # For RLE encoded files, the 32byte header is followed by a scan + # line map, encoded as one 16bit word of encoded byte length per + # line. + # + # NOTE: the encoded length of the line can be 0. This was not + # handled in the previous version of this encoder, and there's no + # mention of how to handle it in the documentation. From the few + # examples I've seen, I've assumed that it is a fill of the + # background color, in this case, white. + # + # + # Pseudocode of the decoder: + # Read a BYTE value as the RunType + # If the RunType value is zero + # Read next byte as the RunCount + # Read the next byte as the RunValue + # Write the RunValue byte RunCount times + # If the RunType value is non-zero + # Use this value as the RunCount + # Read and write the next RunCount bytes literally + # + # e.g.: + # 0x00 03 ff 05 00 01 02 03 04 + # would yield the bytes: + # 0xff ff ff 00 01 02 03 04 + # + # which are then interpreted as a bit packed mode '1' image + + _pulls_fd = True + + def decode(self, buffer): + + img = io.BytesIO() + blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8)) + try: + self.fd.seek(32) + rowmap = struct.unpack_from( + f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2) + ) + except struct.error as e: + raise OSError("Truncated MSP file in row map") from e + + for x, rowlen in enumerate(rowmap): + try: + if rowlen == 0: + img.write(blank_line) + continue + row = self.fd.read(rowlen) + if len(row) != rowlen: + raise OSError( + "Truncated MSP file, expected %d bytes on row %s", (rowlen, x) + ) + idx = 0 + while idx < rowlen: + runtype = row[idx] + idx += 1 + if runtype == 0: + (runcount, runval) = struct.unpack_from("Bc", row, idx) + img.write(runval * runcount) + idx += 2 + else: + runcount = runtype + img.write(row[idx : idx + runcount]) + idx += runcount + + except struct.error as e: + raise OSError(f"Corrupted MSP file in row {x}") from e + + self.set_as_raw(img.getvalue(), ("1", 0, 1)) + + return 0, 0 + + +Image.register_decoder("MSP", MspDecoder) + + +# +# write MSP files (uncompressed only) + + +def _save(im, fp, filename): + + if im.mode != "1": + raise OSError(f"cannot write mode {im.mode} as MSP") + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))]) + + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PSDraw.py b/minor_project/lib/python3.6/site-packages/PIL/PSDraw.py new file mode 100644 index 0000000..c1bd933 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PSDraw.py @@ -0,0 +1,235 @@ +# +# The Python Imaging Library +# $Id$ +# +# Simple PostScript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import sys + +from . import EpsImagePlugin + +## +# Simple PostScript graphics interface. + + +class PSDraw: + """ + Sets up printing to the given file. If ``fp`` is omitted, + :py:data:`sys.stdout` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + fp = sys.stdout + self.fp = fp + + def _fp_write(self, to_write): + if self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, "UTF-8")) + + def begin_document(self, id=None): + """Set up printing of a document. (Write PostScript DSC header.)""" + # FIXME: incomplete + self._fp_write( + "%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n" + ) + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write PostScript DSC footer.)""" + self._fp_write("%%EndDocument\nrestore showpage\n%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A PostScript font name + :param size: Size in points. + """ + if font not in self.isofont: + # reencode font + self._fp_write(f"/PSDraw-{font} ISOLatin1Encoding /{font} E\n") + self.isofont[font] = 1 + # rough + self._fp_write(f"/F0 {size} /PSDraw-{font} F\n") + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + PostScript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + self._fp_write("%d %d %d %d Vl\n" % (*xy0, *xy1)) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A 4-tuple of integers whose order and function is currently + undocumented. + + Hint: the tuple is passed into this format string: + + .. code-block:: python + + %d %d M %d %d 0 Vr\n + """ + self._fp_write("%d %d M %d %d 0 Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = "\\(".join(text.split("(")) + text = "\\)".join(text.split(")")) + self._fp_write(f"{xy[0]} {xy[1]} M ({text}) S\n") + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = im.size[0] * 72 / dpi + y = im.size[1] * 72 / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self._fp_write(f"gsave\n{dx:f} {dy:f} translate\n") + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self._fp_write(f"{sx:f} {sy:f} scale\n") + EpsImagePlugin._save(im, self.fp, None, 0) + self._fp_write("\ngrestore\n") + + +# -------------------------------------------------------------------- +# PostScript driver + +# +# EDROFF.PS -- PostScript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + + +EDROFF_PS = """\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- PostScript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = """\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup neg 0 exch rlineto + exch neg 0 rlineto + 0 exch rlineto + 100 div setgray fill 0 setgray } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = """\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/minor_project/lib/python3.6/site-packages/PIL/PaletteFile.py b/minor_project/lib/python3.6/site-packages/PIL/PaletteFile.py new file mode 100644 index 0000000..6ccaa1f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PaletteFile.py @@ -0,0 +1,53 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from ._binary import o8 + + +class PaletteFile: + """File handler for Teragon-style palette files.""" + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[0:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/minor_project/lib/python3.6/site-packages/PIL/PalmImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..700f10e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PalmImagePlugin.py @@ -0,0 +1,227 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from . import Image, ImageFile +from ._binary import o8 +from ._binary import o16be as o16b + +# fmt: off +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) +# fmt: on + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues))) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for colormapValue in _Palm8BitColormapValues: + palettedata += colormapValue + palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues)) + image.putpalette(palettedata) + return image + + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000} + +_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00} + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + + +def _save(im, fp, filename): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif im.mode == "L": + if im.encoderinfo.get("bpp") in (1, 2, 4): + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8 - bpp, maxval=(1 << bpp) - 1: maxval - (x >> shift) + ) + elif im.info.get("bpp") in (1, 2, 4): + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp) - 1: maxval - (x & maxval)) + else: + raise OSError(f"cannot write mode {im.mode} as Palm") + + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise OSError(f"cannot write mode {im.mode} as Palm") + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == "RGB": + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2]) + ) + elif colormapmode == "RGBA": + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2]) + ) + + # now convert data to raw form + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PcdImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..38caf5c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PcdImagePlugin.py @@ -0,0 +1,63 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = s[1538] & 3 + self.tile_post_rotate = None + if orientation == 1: + self.tile_post_rotate = 90 + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self._size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)] + + def load_end(self): + if self.tile_post_rotate: + # Handle rotated PCDs + self.im = self.im.rotate(self.tile_post_rotate) + self._size = self.im.size + + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PcfFontFile.py b/minor_project/lib/python3.6/site-packages/PIL/PcfFontFile.py new file mode 100644 index 0000000..6a4eb22 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PcfFontFile.py @@ -0,0 +1,248 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import io + +from . import FontFile, Image +from ._binary import i8 +from ._binary import i16be as b16 +from ._binary import i16le as l16 +from ._binary import i32be as b32 +from ._binary import i32le as l32 + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = 1 << 0 +PCF_ACCELERATORS = 1 << 1 +PCF_METRICS = 1 << 2 +PCF_BITMAPS = 1 << 3 +PCF_INK_METRICS = 1 << 4 +PCF_BDF_ENCODINGS = 1 << 5 +PCF_SWIDTHS = 1 << 6 +PCF_GLYPH_NAMES = 1 << 7 +PCF_BDF_ACCELERATORS = 1 << 8 + +BYTES_PER_ROW = [ + lambda bits: ((bits + 7) >> 3), + lambda bits: ((bits + 15) >> 3) & ~1, + lambda bits: ((bits + 31) >> 3) & ~3, + lambda bits: ((bits + 63) >> 3) & ~7, +] + + +def sz(s, o): + return s[o : s.index(b"\0", o)] + + +class PcfFontFile(FontFile.FontFile): + """Font file plugin for the X11 PCF format.""" + + name = "name" + + def __init__(self, fp, charset_encoding="iso8859-1"): + + self.charset_encoding = charset_encoding + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + super().__init__() + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch in range(256): + ix = encoding[ch] + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d - y, x + l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xFF00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append((xsize, ysize, left, right, width, ascent, descent, 0)) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append((xsize, ysize, left, right, width, ascent, descent, attributes)) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise OSError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmapSizes = [] + for i in range(4): + bitmapSizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmapSizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i + 1] + bitmaps.append(Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x))) + + return bitmaps + + def _load_encoding(self): + + # map character code to bitmap index + encoding = [None] * 256 + + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) + firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) + + i16(fp.read(2)) # default + + nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) + + encodingOffsets = [i16(fp.read(2)) for _ in range(nencoding)] + + for i in range(firstCol, len(encoding)): + try: + encodingOffset = encodingOffsets[ + ord(bytearray([i]).decode(self.charset_encoding)) + ] + if encodingOffset != 0xFFFF: + encoding[i] = encodingOffset + except UnicodeDecodeError: + # character is not supported in selected encoding + pass + + return encoding diff --git a/minor_project/lib/python3.6/site-packages/PIL/PcxImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..a24d44b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PcxImagePlugin.py @@ -0,0 +1,213 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import io +import logging + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +logger = logging.getLogger(__name__) + + +def _accept(prefix): + return prefix[0] == 10 and prefix[1] in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = s[1] + bits = s[3] + planes = s[65] + ignored_stride = i16(s, 66) + logger.debug( + "PCX version %s, bits %s, planes %s, stride %s", + version, + bits, + planes, + ignored_stride, + ) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, io.SEEK_END) + s = self.fp.read(769) + if len(s) == 769 and s[0] == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise OSError("unknown PCX mode") + + self.mode = mode + self._size = bbox[2] - bbox[0], bbox[3] - bbox[1] + + # don't trust the passed in stride. Calculate for ourselves. + # CVE-2020-35655 + stride = (self._size[0] * bits + 7) // 8 + stride += stride % 2 + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + + +# -------------------------------------------------------------------- +# save PCX files + + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + + +def _save(im, fp, filename): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError as e: + raise ValueError(f"Cannot save {im.mode} images as PCX") from e + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug( + "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], + bits, + stride, + ) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + + o8(version) + + o8(1) + + o8(bits) + + o16(0) + + o16(0) + + o16(im.size[0] - 1) + + o16(im.size[1] - 1) + + o16(dpi[0]) + + o16(dpi[1]) + + b"\0" * 24 + + b"\xFF" * 24 + + b"\0" + + o8(planes) + + o16(stride) + + o16(1) + + o16(screen[0]) + + o16(screen[1]) + + b"\0" * 54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i) * 3) + + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") + +Image.register_mime(PcxImageFile.format, "image/x-pcx") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PdfImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..36c8fb8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PdfImagePlugin.py @@ -0,0 +1,246 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +import io +import os +import time + +from . import Image, ImageFile, ImageSequence, PdfParser, __version__ + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + + +def _save(im, fp, filename, save_all=False): + is_appending = im.encoderinfo.get("append", False) + if is_appending: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b") + else: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b") + + resolution = im.encoderinfo.get("resolution", 72.0) + + info = { + "title": None + if is_appending + else os.path.splitext(os.path.basename(filename))[0], + "author": None, + "subject": None, + "keywords": None, + "creator": None, + "producer": None, + "creationDate": None if is_appending else time.gmtime(), + "modDate": None if is_appending else time.gmtime(), + } + for k, default in info.items(): + v = im.encoderinfo.get(k) if k in im.encoderinfo else default + if v: + existing_pdf.info[k[0].upper() + k[1:]] = v + + # + # make sure image data is available + im.load() + + existing_pdf.start_writing() + existing_pdf.write_header() + existing_pdf.write_comment(f"created by Pillow {__version__} PDF driver") + + # + # pages + ims = [im] + if save_all: + append_images = im.encoderinfo.get("append_images", []) + for append_im in append_images: + append_im.encoderinfo = im.encoderinfo.copy() + ims.append(append_im) + numberOfPages = 0 + image_refs = [] + page_refs = [] + contents_refs = [] + for im in ims: + im_numberOfPages = 1 + if save_all: + try: + im_numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. + # It is a single frame image + pass + numberOfPages += im_numberOfPages + for i in range(im_numberOfPages): + image_refs.append(existing_pdf.next_object_id(0)) + page_refs.append(existing_pdf.next_object_id(0)) + contents_refs.append(existing_pdf.next_object_id(0)) + existing_pdf.pages.append(page_refs[-1]) + + # + # catalog and list of pages + existing_pdf.write_catalog() + + pageNumber = 0 + for imSequence in ims: + im_pages = ImageSequence.Iterator(imSequence) if save_all else [imSequence] + for im in im_pages: + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode + # (packbits) or LZWDecode (tiff/lzw compression). Note that + # PDF 1.2 also supports Flatedecode (zip compression). + + bits = 8 + params = None + decode = None + + if im.mode == "1": + filter = "ASCIIHexDecode" + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "DCTDecode" + # params = f"<< /Predictor 15 /Columns {width-2} >>" + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "P": + filter = "ASCIIHexDecode" + palette = im.im.getpalette("RGB") + colorspace = [ + PdfParser.PdfName("Indexed"), + PdfParser.PdfName("DeviceRGB"), + 255, + PdfParser.PdfBinary(palette), + ] + procset = "ImageI" # indexed color + elif im.mode == "RGB": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceRGB") + procset = "ImageC" # color images + elif im.mode == "CMYK": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceCMYK") + procset = "ImageC" # color images + decode = [1, 0, 1, 0, 1, 0, 1, 0] + else: + raise ValueError(f"cannot save mode {im.mode}") + + # + # image + + op = io.BytesIO() + + if filter == "ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", im.size) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)]) + elif filter == "DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0) + im.size, 0, im.mode)]) + elif filter == "RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0) + im.size, 0, im.mode)]) + else: + raise ValueError(f"unsupported PDF filter ({filter})") + + # + # Get image characteristics + + width, height = im.size + + existing_pdf.write_obj( + image_refs[pageNumber], + stream=op.getvalue(), + Type=PdfParser.PdfName("XObject"), + Subtype=PdfParser.PdfName("Image"), + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Filter=PdfParser.PdfName(filter), + BitsPerComponent=bits, + Decode=decode, + DecodeParams=params, + ColorSpace=colorspace, + ) + + # + # page + + existing_pdf.write_page( + page_refs[pageNumber], + Resources=PdfParser.PdfDict( + ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)], + XObject=PdfParser.PdfDict(image=image_refs[pageNumber]), + ), + MediaBox=[ + 0, + 0, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + ], + Contents=contents_refs[pageNumber], + ) + + # + # page contents + + page_contents = b"q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + ) + + existing_pdf.write_obj(contents_refs[pageNumber], stream=page_contents) + + pageNumber += 1 + + # + # trailer + existing_pdf.write_xref_and_trailer() + if hasattr(fp, "flush"): + fp.flush() + existing_pdf.close() + + +# +# -------------------------------------------------------------------- + + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PdfParser.py b/minor_project/lib/python3.6/site-packages/PIL/PdfParser.py new file mode 100644 index 0000000..975905f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PdfParser.py @@ -0,0 +1,994 @@ +import calendar +import codecs +import collections +import mmap +import os +import re +import time +import zlib + + +# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set +# on page 656 +def encode_text(s): + return codecs.BOM_UTF16_BE + s.encode("utf_16_be") + + +PDFDocEncoding = { + 0x16: "\u0017", + 0x18: "\u02D8", + 0x19: "\u02C7", + 0x1A: "\u02C6", + 0x1B: "\u02D9", + 0x1C: "\u02DD", + 0x1D: "\u02DB", + 0x1E: "\u02DA", + 0x1F: "\u02DC", + 0x80: "\u2022", + 0x81: "\u2020", + 0x82: "\u2021", + 0x83: "\u2026", + 0x84: "\u2014", + 0x85: "\u2013", + 0x86: "\u0192", + 0x87: "\u2044", + 0x88: "\u2039", + 0x89: "\u203A", + 0x8A: "\u2212", + 0x8B: "\u2030", + 0x8C: "\u201E", + 0x8D: "\u201C", + 0x8E: "\u201D", + 0x8F: "\u2018", + 0x90: "\u2019", + 0x91: "\u201A", + 0x92: "\u2122", + 0x93: "\uFB01", + 0x94: "\uFB02", + 0x95: "\u0141", + 0x96: "\u0152", + 0x97: "\u0160", + 0x98: "\u0178", + 0x99: "\u017D", + 0x9A: "\u0131", + 0x9B: "\u0142", + 0x9C: "\u0153", + 0x9D: "\u0161", + 0x9E: "\u017E", + 0xA0: "\u20AC", +} + + +def decode_text(b): + if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: + return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be") + else: + return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) + + +class PdfFormatError(RuntimeError): + """An error that probably indicates a syntactic or semantic error in the + PDF file structure""" + + pass + + +def check_format_condition(condition, error_message): + if not condition: + raise PdfFormatError(error_message) + + +class IndirectReference( + collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"]) +): + def __str__(self): + return "%s %s R" % self + + def __bytes__(self): + return self.__str__().encode("us-ascii") + + def __eq__(self, other): + return ( + other.__class__ is self.__class__ + and other.object_id == self.object_id + and other.generation == self.generation + ) + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash((self.object_id, self.generation)) + + +class IndirectObjectDef(IndirectReference): + def __str__(self): + return "%s %s obj" % self + + +class XrefTable: + def __init__(self): + self.existing_entries = {} # object ID => (offset, generation) + self.new_entries = {} # object ID => (offset, generation) + self.deleted_entries = {0: 65536} # object ID => generation + self.reading_finished = False + + def __setitem__(self, key, value): + if self.reading_finished: + self.new_entries[key] = value + else: + self.existing_entries[key] = value + if key in self.deleted_entries: + del self.deleted_entries[key] + + def __getitem__(self, key): + try: + return self.new_entries[key] + except KeyError: + return self.existing_entries[key] + + def __delitem__(self, key): + if key in self.new_entries: + generation = self.new_entries[key][1] + 1 + del self.new_entries[key] + self.deleted_entries[key] = generation + elif key in self.existing_entries: + generation = self.existing_entries[key][1] + 1 + self.deleted_entries[key] = generation + elif key in self.deleted_entries: + generation = self.deleted_entries[key] + else: + raise IndexError( + "object ID " + str(key) + " cannot be deleted because it doesn't exist" + ) + + def __contains__(self, key): + return key in self.existing_entries or key in self.new_entries + + def __len__(self): + return len( + set(self.existing_entries.keys()) + | set(self.new_entries.keys()) + | set(self.deleted_entries.keys()) + ) + + def keys(self): + return ( + set(self.existing_entries.keys()) - set(self.deleted_entries.keys()) + ) | set(self.new_entries.keys()) + + def write(self, f): + keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys())) + deleted_keys = sorted(set(self.deleted_entries.keys())) + startxref = f.tell() + f.write(b"xref\n") + while keys: + # find a contiguous sequence of object IDs + prev = None + for index, key in enumerate(keys): + if prev is None or prev + 1 == key: + prev = key + else: + contiguous_keys = keys[:index] + keys = keys[index:] + break + else: + contiguous_keys = keys + keys = None + f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys))) + for object_id in contiguous_keys: + if object_id in self.new_entries: + f.write(b"%010d %05d n \n" % self.new_entries[object_id]) + else: + this_deleted_object_id = deleted_keys.pop(0) + check_format_condition( + object_id == this_deleted_object_id, + f"expected the next deleted object ID to be {object_id}, " + f"instead found {this_deleted_object_id}", + ) + try: + next_in_linked_list = deleted_keys[0] + except IndexError: + next_in_linked_list = 0 + f.write( + b"%010d %05d f \n" + % (next_in_linked_list, self.deleted_entries[object_id]) + ) + return startxref + + +class PdfName: + def __init__(self, name): + if isinstance(name, PdfName): + self.name = name.name + elif isinstance(name, bytes): + self.name = name + else: + self.name = name.encode("us-ascii") + + def name_as_str(self): + return self.name.decode("us-ascii") + + def __eq__(self, other): + return ( + isinstance(other, PdfName) and other.name == self.name + ) or other == self.name + + def __hash__(self): + return hash(self.name) + + def __repr__(self): + return f"PdfName({repr(self.name)})" + + @classmethod + def from_pdf_stream(cls, data): + return cls(PdfParser.interpret_name(data)) + + allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"} + + def __bytes__(self): + result = bytearray(b"/") + for b in self.name: + if b in self.allowed_chars: + result.append(b) + else: + result.extend(b"#%02X" % b) + return bytes(result) + + +class PdfArray(list): + def __bytes__(self): + return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" + + +class PdfDict(collections.UserDict): + def __setattr__(self, key, value): + if key == "data": + collections.UserDict.__setattr__(self, key, value) + else: + self[key.encode("us-ascii")] = value + + def __getattr__(self, key): + try: + value = self[key.encode("us-ascii")] + except KeyError as e: + raise AttributeError(key) from e + if isinstance(value, bytes): + value = decode_text(value) + if key.endswith("Date"): + if value.startswith("D:"): + value = value[2:] + + relationship = "Z" + if len(value) > 17: + relationship = value[14] + offset = int(value[15:17]) * 60 + if len(value) > 20: + offset += int(value[18:20]) + + format = "%Y%m%d%H%M%S"[: len(value) - 2] + value = time.strptime(value[: len(format) + 2], format) + if relationship in ["+", "-"]: + offset *= 60 + if relationship == "+": + offset *= -1 + value = time.gmtime(calendar.timegm(value) + offset) + return value + + def __bytes__(self): + out = bytearray(b"<<") + for key, value in self.items(): + if value is None: + continue + value = pdf_repr(value) + out.extend(b"\n") + out.extend(bytes(PdfName(key))) + out.extend(b" ") + out.extend(value) + out.extend(b"\n>>") + return bytes(out) + + +class PdfBinary: + def __init__(self, data): + self.data = data + + def __bytes__(self): + return b"<%s>" % b"".join(b"%02X" % b for b in self.data) + + +class PdfStream: + def __init__(self, dictionary, buf): + self.dictionary = dictionary + self.buf = buf + + def decode(self): + try: + filter = self.dictionary.Filter + except AttributeError: + return self.buf + if filter == b"FlateDecode": + try: + expected_length = self.dictionary.DL + except AttributeError: + expected_length = self.dictionary.Length + return zlib.decompress(self.buf, bufsize=int(expected_length)) + else: + raise NotImplementedError( + f"stream filter {repr(self.dictionary.Filter)} unknown/unsupported" + ) + + +def pdf_repr(x): + if x is True: + return b"true" + elif x is False: + return b"false" + elif x is None: + return b"null" + elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)): + return bytes(x) + elif isinstance(x, int): + return str(x).encode("us-ascii") + elif isinstance(x, time.struct_time): + return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")" + elif isinstance(x, dict): + return bytes(PdfDict(x)) + elif isinstance(x, list): + return bytes(PdfArray(x)) + elif isinstance(x, str): + return pdf_repr(encode_text(x)) + elif isinstance(x, bytes): + # XXX escape more chars? handle binary garbage + x = x.replace(b"\\", b"\\\\") + x = x.replace(b"(", b"\\(") + x = x.replace(b")", b"\\)") + return b"(" + x + b")" + else: + return bytes(x) + + +class PdfParser: + """Based on + https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf + Supports PDF up to 1.4 + """ + + def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"): + if buf and f: + raise RuntimeError("specify buf or f or filename, but not both buf and f") + self.filename = filename + self.buf = buf + self.f = f + self.start_offset = start_offset + self.should_close_buf = False + self.should_close_file = False + if filename is not None and f is None: + self.f = f = open(filename, mode) + self.should_close_file = True + if f is not None: + self.buf = buf = self.get_buf_from_file(f) + self.should_close_buf = True + if not filename and hasattr(f, "name"): + self.filename = f.name + self.cached_objects = {} + if buf: + self.read_pdf_info() + else: + self.file_size_total = self.file_size_this = 0 + self.root = PdfDict() + self.root_ref = None + self.info = PdfDict() + self.info_ref = None + self.page_tree_root = {} + self.pages = [] + self.orig_pages = [] + self.pages_ref = None + self.last_xref_section_offset = None + self.trailer_dict = {} + self.xref_table = XrefTable() + self.xref_table.reading_finished = True + if f: + self.seek_end() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + return False # do not suppress exceptions + + def start_writing(self): + self.close_buf() + self.seek_end() + + def close_buf(self): + try: + self.buf.close() + except AttributeError: + pass + self.buf = None + + def close(self): + if self.should_close_buf: + self.close_buf() + if self.f is not None and self.should_close_file: + self.f.close() + self.f = None + + def seek_end(self): + self.f.seek(0, os.SEEK_END) + + def write_header(self): + self.f.write(b"%PDF-1.4\n") + + def write_comment(self, s): + self.f.write(f"% {s}\n".encode("utf-8")) + + def write_catalog(self): + self.del_root() + self.root_ref = self.next_object_id(self.f.tell()) + self.pages_ref = self.next_object_id(0) + self.rewrite_pages() + self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref) + self.write_obj( + self.pages_ref, + Type=PdfName(b"Pages"), + Count=len(self.pages), + Kids=self.pages, + ) + return self.root_ref + + def rewrite_pages(self): + pages_tree_nodes_to_delete = [] + for i, page_ref in enumerate(self.orig_pages): + page_info = self.cached_objects[page_ref] + del self.xref_table[page_ref.object_id] + pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")]) + if page_ref not in self.pages: + # the page has been deleted + continue + # make dict keys into strings for passing to write_page + stringified_page_info = {} + for key, value in page_info.items(): + # key should be a PdfName + stringified_page_info[key.name_as_str()] = value + stringified_page_info["Parent"] = self.pages_ref + new_page_ref = self.write_page(None, **stringified_page_info) + for j, cur_page_ref in enumerate(self.pages): + if cur_page_ref == page_ref: + # replace the page reference with the new one + self.pages[j] = new_page_ref + # delete redundant Pages tree nodes from xref table + for pages_tree_node_ref in pages_tree_nodes_to_delete: + while pages_tree_node_ref: + pages_tree_node = self.cached_objects[pages_tree_node_ref] + if pages_tree_node_ref.object_id in self.xref_table: + del self.xref_table[pages_tree_node_ref.object_id] + pages_tree_node_ref = pages_tree_node.get(b"Parent", None) + self.orig_pages = [] + + def write_xref_and_trailer(self, new_root_ref=None): + if new_root_ref: + self.del_root() + self.root_ref = new_root_ref + if self.info: + self.info_ref = self.write_obj(None, self.info) + start_xref = self.xref_table.write(self.f) + num_entries = len(self.xref_table) + trailer_dict = {b"Root": self.root_ref, b"Size": num_entries} + if self.last_xref_section_offset is not None: + trailer_dict[b"Prev"] = self.last_xref_section_offset + if self.info: + trailer_dict[b"Info"] = self.info_ref + self.last_xref_section_offset = start_xref + self.f.write( + b"trailer\n" + + bytes(PdfDict(trailer_dict)) + + b"\nstartxref\n%d\n%%%%EOF" % start_xref + ) + + def write_page(self, ref, *objs, **dict_obj): + if isinstance(ref, int): + ref = self.pages[ref] + if "Type" not in dict_obj: + dict_obj["Type"] = PdfName(b"Page") + if "Parent" not in dict_obj: + dict_obj["Parent"] = self.pages_ref + return self.write_obj(ref, *objs, **dict_obj) + + def write_obj(self, ref, *objs, **dict_obj): + f = self.f + if ref is None: + ref = self.next_object_id(f.tell()) + else: + self.xref_table[ref.object_id] = (f.tell(), ref.generation) + f.write(bytes(IndirectObjectDef(*ref))) + stream = dict_obj.pop("stream", None) + if stream is not None: + dict_obj["Length"] = len(stream) + if dict_obj: + f.write(pdf_repr(dict_obj)) + for obj in objs: + f.write(pdf_repr(obj)) + if stream is not None: + f.write(b"stream\n") + f.write(stream) + f.write(b"\nendstream\n") + f.write(b"endobj\n") + return ref + + def del_root(self): + if self.root_ref is None: + return + del self.xref_table[self.root_ref.object_id] + del self.xref_table[self.root[b"Pages"].object_id] + + @staticmethod + def get_buf_from_file(f): + if hasattr(f, "getbuffer"): + return f.getbuffer() + elif hasattr(f, "getvalue"): + return f.getvalue() + else: + try: + return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: # cannot mmap an empty file + return b"" + + def read_pdf_info(self): + self.file_size_total = len(self.buf) + self.file_size_this = self.file_size_total - self.start_offset + self.read_trailer() + self.root_ref = self.trailer_dict[b"Root"] + self.info_ref = self.trailer_dict.get(b"Info", None) + self.root = PdfDict(self.read_indirect(self.root_ref)) + if self.info_ref is None: + self.info = PdfDict() + else: + self.info = PdfDict(self.read_indirect(self.info_ref)) + check_format_condition(b"Type" in self.root, "/Type missing in Root") + check_format_condition( + self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog" + ) + check_format_condition(b"Pages" in self.root, "/Pages missing in Root") + check_format_condition( + isinstance(self.root[b"Pages"], IndirectReference), + "/Pages in Root is not an indirect reference", + ) + self.pages_ref = self.root[b"Pages"] + self.page_tree_root = self.read_indirect(self.pages_ref) + self.pages = self.linearize_page_tree(self.page_tree_root) + # save the original list of page references + # in case the user modifies, adds or deletes some pages + # and we need to rewrite the pages and their list + self.orig_pages = self.pages[:] + + def next_object_id(self, offset=None): + try: + # TODO: support reuse of deleted objects + reference = IndirectReference(max(self.xref_table.keys()) + 1, 0) + except ValueError: + reference = IndirectReference(1, 0) + if offset is not None: + self.xref_table[reference.object_id] = (offset, 0) + return reference + + delimiter = br"[][()<>{}/%]" + delimiter_or_ws = br"[][()<>{}/%\000\011\012\014\015\040]" + whitespace = br"[\000\011\012\014\015\040]" + whitespace_or_hex = br"[\000\011\012\014\015\0400-9a-fA-F]" + whitespace_optional = whitespace + b"*" + whitespace_mandatory = whitespace + b"+" + newline_only = br"[\r\n]+" + newline = whitespace_optional + newline_only + whitespace_optional + re_trailer_end = re.compile( + whitespace_mandatory + + br"trailer" + + whitespace_optional + + br"\<\<(.*\>\>)" + + newline + + br"startxref" + + newline + + br"([0-9]+)" + + newline + + br"%%EOF" + + whitespace_optional + + br"$", + re.DOTALL, + ) + re_trailer_prev = re.compile( + whitespace_optional + + br"trailer" + + whitespace_optional + + br"\<\<(.*?\>\>)" + + newline + + br"startxref" + + newline + + br"([0-9]+)" + + newline + + br"%%EOF" + + whitespace_optional, + re.DOTALL, + ) + + def read_trailer(self): + search_start_offset = len(self.buf) - 16384 + if search_start_offset < self.start_offset: + search_start_offset = self.start_offset + m = self.re_trailer_end.search(self.buf, search_start_offset) + check_format_condition(m, "trailer end not found") + # make sure we found the LAST trailer + last_match = m + while m: + last_match = m + m = self.re_trailer_end.search(self.buf, m.start() + 16) + if not m: + m = last_match + trailer_data = m.group(1) + self.last_xref_section_offset = int(m.group(2)) + self.trailer_dict = self.interpret_trailer(trailer_data) + self.xref_table = XrefTable() + self.read_xref_table(xref_section_offset=self.last_xref_section_offset) + if b"Prev" in self.trailer_dict: + self.read_prev_trailer(self.trailer_dict[b"Prev"]) + + def read_prev_trailer(self, xref_section_offset): + trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset) + m = self.re_trailer_prev.search( + self.buf[trailer_offset : trailer_offset + 16384] + ) + check_format_condition(m, "previous trailer not found") + trailer_data = m.group(1) + check_format_condition( + int(m.group(2)) == xref_section_offset, + "xref section offset in previous trailer doesn't match what was expected", + ) + trailer_dict = self.interpret_trailer(trailer_data) + if b"Prev" in trailer_dict: + self.read_prev_trailer(trailer_dict[b"Prev"]) + + re_whitespace_optional = re.compile(whitespace_optional) + re_name = re.compile( + whitespace_optional + + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + + delimiter_or_ws + + br")" + ) + re_dict_start = re.compile(whitespace_optional + br"\<\<") + re_dict_end = re.compile(whitespace_optional + br"\>\>" + whitespace_optional) + + @classmethod + def interpret_trailer(cls, trailer_data): + trailer = {} + offset = 0 + while True: + m = cls.re_name.match(trailer_data, offset) + if not m: + m = cls.re_dict_end.match(trailer_data, offset) + check_format_condition( + m and m.end() == len(trailer_data), + "name not found in trailer, remaining data: " + + repr(trailer_data[offset:]), + ) + break + key = cls.interpret_name(m.group(1)) + value, offset = cls.get_value(trailer_data, m.end()) + trailer[key] = value + check_format_condition( + b"Size" in trailer and isinstance(trailer[b"Size"], int), + "/Size not in trailer or not an integer", + ) + check_format_condition( + b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), + "/Root not in trailer or not an indirect reference", + ) + return trailer + + re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?") + + @classmethod + def interpret_name(cls, raw, as_text=False): + name = b"" + for m in cls.re_hashes_in_name.finditer(raw): + if m.group(3): + name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii")) + else: + name += m.group(1) + if as_text: + return name.decode("utf-8") + else: + return bytes(name) + + re_null = re.compile(whitespace_optional + br"null(?=" + delimiter_or_ws + br")") + re_true = re.compile(whitespace_optional + br"true(?=" + delimiter_or_ws + br")") + re_false = re.compile(whitespace_optional + br"false(?=" + delimiter_or_ws + br")") + re_int = re.compile( + whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")" + ) + re_real = re.compile( + whitespace_optional + + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + + delimiter_or_ws + + br")" + ) + re_array_start = re.compile(whitespace_optional + br"\[") + re_array_end = re.compile(whitespace_optional + br"]") + re_string_hex = re.compile( + whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>" + ) + re_string_lit = re.compile(whitespace_optional + br"\(") + re_indirect_reference = re.compile( + whitespace_optional + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"R(?=" + + delimiter_or_ws + + br")" + ) + re_indirect_def_start = re.compile( + whitespace_optional + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"([-+]?[0-9]+)" + + whitespace_mandatory + + br"obj(?=" + + delimiter_or_ws + + br")" + ) + re_indirect_def_end = re.compile( + whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")" + ) + re_comment = re.compile( + br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*" + ) + re_stream_start = re.compile(whitespace_optional + br"stream\r?\n") + re_stream_end = re.compile( + whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")" + ) + + @classmethod + def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1): + if max_nesting == 0: + return None, None + m = cls.re_comment.match(data, offset) + if m: + offset = m.end() + m = cls.re_indirect_def_start.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object definition: object ID must be greater than 0", + ) + check_format_condition( + int(m.group(2)) >= 0, + "indirect object definition: generation must be non-negative", + ) + check_format_condition( + expect_indirect is None + or expect_indirect + == IndirectReference(int(m.group(1)), int(m.group(2))), + "indirect object definition different than expected", + ) + object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1) + if offset is None: + return object, None + m = cls.re_indirect_def_end.match(data, offset) + check_format_condition(m, "indirect object definition end not found") + return object, m.end() + check_format_condition( + not expect_indirect, "indirect object definition not found" + ) + m = cls.re_indirect_reference.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object reference: object ID must be greater than 0", + ) + check_format_condition( + int(m.group(2)) >= 0, + "indirect object reference: generation must be non-negative", + ) + return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() + m = cls.re_dict_start.match(data, offset) + if m: + offset = m.end() + result = {} + m = cls.re_dict_end.match(data, offset) + while not m: + key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + if offset is None: + return result, None + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + result[key] = value + if offset is None: + return result, None + m = cls.re_dict_end.match(data, offset) + offset = m.end() + m = cls.re_stream_start.match(data, offset) + if m: + try: + stream_len = int(result[b"Length"]) + except (TypeError, KeyError, ValueError) as e: + raise PdfFormatError( + "bad or missing Length in stream dict (%r)" + % result.get(b"Length", None) + ) from e + stream_data = data[m.end() : m.end() + stream_len] + m = cls.re_stream_end.match(data, m.end() + stream_len) + check_format_condition(m, "stream end not found") + offset = m.end() + result = PdfStream(PdfDict(result), stream_data) + else: + result = PdfDict(result) + return result, offset + m = cls.re_array_start.match(data, offset) + if m: + offset = m.end() + result = [] + m = cls.re_array_end.match(data, offset) + while not m: + value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1) + result.append(value) + if offset is None: + return result, None + m = cls.re_array_end.match(data, offset) + return result, m.end() + m = cls.re_null.match(data, offset) + if m: + return None, m.end() + m = cls.re_true.match(data, offset) + if m: + return True, m.end() + m = cls.re_false.match(data, offset) + if m: + return False, m.end() + m = cls.re_name.match(data, offset) + if m: + return PdfName(cls.interpret_name(m.group(1))), m.end() + m = cls.re_int.match(data, offset) + if m: + return int(m.group(1)), m.end() + m = cls.re_real.match(data, offset) + if m: + # XXX Decimal instead of float??? + return float(m.group(1)), m.end() + m = cls.re_string_hex.match(data, offset) + if m: + # filter out whitespace + hex_string = bytearray( + [b for b in m.group(1) if b in b"0123456789abcdefABCDEF"] + ) + if len(hex_string) % 2 == 1: + # append a 0 if the length is not even - yes, at the end + hex_string.append(ord(b"0")) + return bytearray.fromhex(hex_string.decode("us-ascii")), m.end() + m = cls.re_string_lit.match(data, offset) + if m: + return cls.get_literal_string(data, m.end()) + # return None, offset # fallback (only for debugging) + raise PdfFormatError("unrecognized object: " + repr(data[offset : offset + 32])) + + re_lit_str_token = re.compile( + br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))" + ) + escaped_chars = { + b"n": b"\n", + b"r": b"\r", + b"t": b"\t", + b"b": b"\b", + b"f": b"\f", + b"(": b"(", + b")": b")", + b"\\": b"\\", + ord(b"n"): b"\n", + ord(b"r"): b"\r", + ord(b"t"): b"\t", + ord(b"b"): b"\b", + ord(b"f"): b"\f", + ord(b"("): b"(", + ord(b")"): b")", + ord(b"\\"): b"\\", + } + + @classmethod + def get_literal_string(cls, data, offset): + nesting_depth = 0 + result = bytearray() + for m in cls.re_lit_str_token.finditer(data, offset): + result.extend(data[offset : m.start()]) + if m.group(1): + result.extend(cls.escaped_chars[m.group(1)[1]]) + elif m.group(2): + result.append(int(m.group(2)[1:], 8)) + elif m.group(3): + pass + elif m.group(5): + result.extend(b"\n") + elif m.group(6): + result.extend(b"(") + nesting_depth += 1 + elif m.group(7): + if nesting_depth == 0: + return bytes(result), m.end() + result.extend(b")") + nesting_depth -= 1 + offset = m.end() + raise PdfFormatError("unfinished literal string") + + re_xref_section_start = re.compile(whitespace_optional + br"xref" + newline) + re_xref_subsection_start = re.compile( + whitespace_optional + + br"([0-9]+)" + + whitespace_mandatory + + br"([0-9]+)" + + whitespace_optional + + newline_only + ) + re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") + + def read_xref_table(self, xref_section_offset): + subsection_found = False + m = self.re_xref_section_start.match( + self.buf, xref_section_offset + self.start_offset + ) + check_format_condition(m, "xref section start not found") + offset = m.end() + while True: + m = self.re_xref_subsection_start.match(self.buf, offset) + if not m: + check_format_condition( + subsection_found, "xref subsection start not found" + ) + break + subsection_found = True + offset = m.end() + first_object = int(m.group(1)) + num_objects = int(m.group(2)) + for i in range(first_object, first_object + num_objects): + m = self.re_xref_entry.match(self.buf, offset) + check_format_condition(m, "xref entry not found") + offset = m.end() + is_free = m.group(3) == b"f" + generation = int(m.group(2)) + if not is_free: + new_entry = (int(m.group(1)), generation) + check_format_condition( + i not in self.xref_table or self.xref_table[i] == new_entry, + "xref entry duplicated (and not identical)", + ) + self.xref_table[i] = new_entry + return offset + + def read_indirect(self, ref, max_nesting=-1): + offset, generation = self.xref_table[ref[0]] + check_format_condition( + generation == ref[1], + f"expected to find generation {ref[1]} for object ID {ref[0]} in xref " + f"table, instead found generation {generation} at offset {offset}", + ) + value = self.get_value( + self.buf, + offset + self.start_offset, + expect_indirect=IndirectReference(*ref), + max_nesting=max_nesting, + )[0] + self.cached_objects[ref] = value + return value + + def linearize_page_tree(self, node=None): + if node is None: + node = self.page_tree_root + check_format_condition( + node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages" + ) + pages = [] + for kid in node[b"Kids"]: + kid_object = self.read_indirect(kid) + if kid_object[b"Type"] == b"Page": + pages.append(kid) + else: + pages.extend(self.linearize_page_tree(node=kid_object)) + return pages diff --git a/minor_project/lib/python3.6/site-packages/PIL/PixarImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..c4860b6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PixarImagePlugin.py @@ -0,0 +1,70 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile +from ._binary import i16le as i16 + +# +# helpers + + +def _accept(prefix): + return prefix[:4] == b"\200\350\000\000" + + +## +# Image plugin for PIXAR raster images. + + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label + s = self.fp.read(4) + if not _accept(s): + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self._size = i16(s, 418), i16(s, 416) + + # get channel/depth descriptions + mode = i16(s, 424), i16(s, 426) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))] + + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile, _accept) + +Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PngImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PngImagePlugin.py new file mode 100644 index 0000000..2d4ac76 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PngImagePlugin.py @@ -0,0 +1,1395 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import itertools +import logging +import re +import struct +import warnings +import zlib + +from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence +from ._binary import i16be as i16 +from ._binary import i32be as i32 +from ._binary import o8 +from ._binary import o16be as o16 +from ._binary import o32be as o32 + +logger = logging.getLogger(__name__) + +is_cid = re.compile(br"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + # Greyscale + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + # Truecolour + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + # Indexed-colour + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + # Greyscale with alpha + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + # Truecolour with alpha + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b"^\xff*\x00\xff*$") + +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +""" +Maximum decompressed size for a iTXt or zTXt chunk. +Eliminates decompression bombs where compressed chunks can expand 1000x. +See :ref:`Text in PNG File Format`. +""" +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK +""" +Set the maximum total text chunk size. +See :ref:`Text in PNG File Format`. +""" + + +# APNG frame disposal modes +APNG_DISPOSE_OP_NONE = 0 +""" +No disposal is done on this frame before rendering the next frame. +See :ref:`Saving APNG sequences`. +""" +APNG_DISPOSE_OP_BACKGROUND = 1 +""" +This frame’s modified region is cleared to fully transparent black before rendering +the next frame. +See :ref:`Saving APNG sequences`. +""" +APNG_DISPOSE_OP_PREVIOUS = 2 +""" +This frame’s modified region is reverted to the previous frame’s contents before +rendering the next frame. +See :ref:`Saving APNG sequences`. +""" + +# APNG frame blend modes +APNG_BLEND_OP_SOURCE = 0 +""" +All color components of this frame, including alpha, overwrite the previous output +image contents. +See :ref:`Saving APNG sequences`. +""" +APNG_BLEND_OP_OVER = 1 +""" +This frame should be alpha composited with the previous output image contents. +See :ref:`Saving APNG sequences`. +""" + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +def _crc32(data, seed=0): + return zlib.crc32(data, seed) & 0xFFFFFFFF + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + + +class ChunkStream: + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + def read(self): + """Fetch a new chunk. Returns header information.""" + cid = None + + if self.queue: + cid, pos, length = self.queue.pop() + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + if not ImageFile.LOAD_TRUNCATED_IMAGES: + raise SyntaxError(f"broken PNG file (chunk {repr(cid)})") + + return cid, pos, length + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + self.queue = self.crc = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + """Call the appropriate chunk handler""" + + logger.debug("STREAM %r %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length) + + def crc(self, cid, data): + """Read and verify checksum""" + + # Skip CRC checks for ancillary chunks if allowed to load truncated + # images + # 5th byte of first char is 1 [specs, section 5.4] + if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1): + self.crc_skip(cid, data) + return + + try: + crc1 = _crc32(data, _crc32(cid)) + crc2 = i32(self.fp.read(4)) + if crc1 != crc2: + raise SyntaxError( + f"broken PNG file (bad header checksum in {repr(cid)})" + ) + except struct.error as e: + raise SyntaxError( + f"broken PNG file (incomplete checksum in {repr(cid)})" + ) from e + + def crc_skip(self, cid, data): + """Read checksum. Used if the C module is not present""" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + try: + cid, pos, length = self.read() + except struct.error as e: + raise OSError("truncated PNG file") from e + + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + + @staticmethod + def __new__(cls, text, lang=None, tkey=None): + """ + :param cls: the class to use when creating the instance + :param text: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo: + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data, after_idat=False): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + :param after_idat: for use with private chunks. Whether the chunk + should be written after IDAT + + """ + + chunk = [cid, data] + if after_idat: + chunk.append(True) + self.chunks.append(tuple(chunk)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add( + b"iTXt", + key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value), + ) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value) + + def add_text(self, key, value, zip=False): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, zip=zip) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode("latin-1", "strict") + except UnicodeError: + return self.add_itxt(key, value, zip=zip) + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + + +class PngStream(ChunkStream): + def __init__(self, fp): + super().__init__(fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + self.im_custom_mimetype = None + self.im_n_frames = None + self._seq_num = None + self.rewind_state = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError( + "Too much memory used in text chunks: " + f"{self.text_memory}>MAX_TEXT_MEMORY" + ) + + def save_rewind(self): + self.rewind_state = { + "info": self.im_info.copy(), + "tile": self.im_tile, + "seq_num": self._seq_num, + } + + def rewind(self): + self.im_info = self.rewind_state["info"] + self.im_tile = self.rewind_state["tile"] + self._seq_num = self.rewind_state["seq_num"] + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %r", s[:i]) + logger.debug("Compression method %s", s[i]) + comp_method = s[i] + if comp_method != 0: + raise SyntaxError(f"Unknown compression method {comp_method} in iCCP chunk") + try: + icc_profile = _safe_zlib_decompress(s[i + 2 :]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + icc_profile = None + else: + raise + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + self.im_size = i32(s, 0), i32(s, 4) + try: + self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])] + except Exception: + pass + if s[12]: + self.im_info["interlace"] = 1 + if s[11]: + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + if "bbox" in self.im_info: + tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)] + else: + if self.im_n_frames is not None: + self.im_info["default_image"] = True + tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)] + self.im_tile = tile + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + # tRNS contains only one full-transparent entry, + # other entries are full opaque + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + else: + # otherwise, we have a byte string with one alpha value + # for each palette entry + self.im_info["transparency"] = s + elif self.im_mode in ("1", "L", "I"): + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4) + return s + + def chunk_gAMA(self, pos, length): + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_cHRM(self, pos, length): + # chromaticity, 8 unsigned ints, actual value is scaled by 100,000 + # WP x,y, Red x,y, Green x,y Blue x,y + + s = ImageFile._safe_read(self.fp, length) + raw_vals = struct.unpack(">%dI" % (len(s) // 4), s) + self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals) + return s + + def chunk_sRGB(self, pos, length): + # srgb rendering intent, 1 byte + # 0 perceptual + # 1 relative colorimetric + # 2 saturation + # 3 absolute colorimetric + + s = ImageFile._safe_read(self.fp, length) + self.im_info["srgb"] = s[0] + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + px, py = i32(s, 0), i32(s, 4) + unit = s[8] + if unit == 1: # meter + dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + k = k.decode("latin-1", "strict") + v_str = v.decode("latin-1", "replace") + + self.im_info[k] = v if k == "exif" else v_str + self.im_text[k] = v_str + self.check_text_memory(len(v_str)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = v[0] + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError(f"Unknown compression method {comp_method} in zTXt chunk") + try: + v = _safe_zlib_decompress(v[1:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + v = b"" + else: + raise + except zlib.error: + v = b"" + + if k: + k = k.decode("latin-1", "strict") + v = v.decode("latin-1", "replace") + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = r[0], r[1], r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + else: + raise + except zlib.error: + return s + else: + return s + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + def chunk_eXIf(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + self.im_info["exif"] = b"Exif\x00\x00" + s + return s + + # APNG chunks + def chunk_acTL(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + if self.im_n_frames is not None: + self.im_n_frames = None + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + n_frames = i32(s) + if n_frames == 0 or n_frames > 0x80000000: + warnings.warn("Invalid APNG, will use default PNG image if possible") + return s + self.im_n_frames = n_frames + self.im_info["loop"] = i32(s, 4) + self.im_custom_mimetype = "image/apng" + return s + + def chunk_fcTL(self, pos, length): + s = ImageFile._safe_read(self.fp, length) + seq = i32(s) + if (self._seq_num is None and seq != 0) or ( + self._seq_num is not None and self._seq_num != seq - 1 + ): + raise SyntaxError("APNG contains frame sequence errors") + self._seq_num = seq + width, height = i32(s, 4), i32(s, 8) + px, py = i32(s, 12), i32(s, 16) + im_w, im_h = self.im_size + if px + width > im_w or py + height > im_h: + raise SyntaxError("APNG contains invalid frames") + self.im_info["bbox"] = (px, py, px + width, py + height) + delay_num, delay_den = i16(s, 20), i16(s, 22) + if delay_den == 0: + delay_den = 100 + self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000 + self.im_info["disposal"] = s[24] + self.im_info["blend"] = s[25] + return s + + def chunk_fdAT(self, pos, length): + s = ImageFile._safe_read(self.fp, 4) + seq = i32(s) + if self._seq_num != seq - 1: + raise SyntaxError("APNG contains frame sequence errors") + self._seq_num = seq + return self.chunk_IDAT(pos + 4, length - 4) + + +# -------------------------------------------------------------------- +# PNG reader + + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if not _accept(self.fp.read(8)): + raise SyntaxError("not a PNG file") + self.__fp = self.fp + self.__frame = 0 + + # + # Parse headers up to the first IDAT or fDAT chunk + + self.private_chunks = [] + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + if cid[1:2].islower(): + self.private_chunks.append((cid, s)) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self._size = self.png.im_size + self.info = self.png.im_info + self._text = None + self.tile = self.png.im_tile + self.custom_mimetype = self.png.im_custom_mimetype + self.n_frames = self.png.im_n_frames or 1 + self.default_image = self.info.get("default_image", False) + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + if cid == b"fdAT": + self.__prepare_idat = length - 4 + else: + self.__prepare_idat = length # used by load_prepare() + + if self.png.im_n_frames is not None: + self._close_exclusive_fp_after_loading = False + self.png.save_rewind() + self.__rewind_idat = self.__prepare_idat + self.__rewind = self.__fp.tell() + if self.default_image: + # IDAT chunk contains default image and not first animation frame + self.n_frames += 1 + self._seek(0) + self.is_animated = self.n_frames > 1 + + @property + def text(self): + # experimental + if self._text is None: + # iTxt, tEXt and zTXt chunks may appear at the end of the file + # So load the file to ensure that they are read + if self.is_animated: + frame = self.__frame + # for APNG, seek to the final frame before loading + self.seek(self.n_frames - 1) + self.load() + if self.is_animated: + self.seek(frame) + return self._text + + def verify(self): + """Verify PNG file""" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0, True) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError as e: + self.seek(last_frame) + raise EOFError("no more images in APNG file") from e + + def _seek(self, frame, rewind=False): + if frame == 0: + if rewind: + self.__fp.seek(self.__rewind) + self.png.rewind() + self.__prepare_idat = self.__rewind_idat + self.im = None + if self.pyaccess: + self.pyaccess = None + self.info = self.png.im_info + self.tile = self.png.im_tile + self.fp = self.__fp + self._prev_im = None + self.dispose = None + self.default_image = self.info.get("default_image", False) + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + self.__frame = 0 + else: + if frame != self.__frame + 1: + raise ValueError(f"cannot seek to frame {frame}") + + # ensure previous frame was loaded + self.load() + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + self._prev_im = self.im.copy() + + self.fp = self.__fp + + # advance to the next frame + if self.__prepare_idat: + ImageFile._safe_read(self.fp, self.__prepare_idat) + self.__prepare_idat = 0 + frame_start = False + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + raise EOFError("No more images in APNG file") + if cid == b"fcTL": + if frame_start: + # there must be at least one fdAT chunk between fcTL chunks + raise SyntaxError("APNG missing frame data") + frame_start = True + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + if frame_start: + self.__prepare_idat = length + break + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + ImageFile._safe_read(self.fp, length) + + self.__frame = frame + self.tile = self.png.im_tile + self.dispose_op = self.info.get("disposal") + self.blend_op = self.info.get("blend") + self.dispose_extent = self.info.get("bbox") + + if not self.tile: + raise EOFError + + # setup frame disposal (actual disposal done when needed in the next _seek()) + if self._prev_im is None and self.dispose_op == APNG_DISPOSE_OP_PREVIOUS: + self.dispose_op = APNG_DISPOSE_OP_BACKGROUND + + if self.dispose_op == APNG_DISPOSE_OP_PREVIOUS: + self.dispose = self._prev_im.copy() + self.dispose = self._crop(self.dispose, self.dispose_extent) + elif self.dispose_op == APNG_DISPOSE_OP_BACKGROUND: + self.dispose = Image.core.fill(self.mode, self.size) + self.dispose = self._crop(self.dispose, self.dispose_extent) + else: + self.dispose = None + + def tell(self): + return self.__frame + + def load_prepare(self): + """internal: prepare to read PNG file""" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + self.__idat = self.__prepare_idat # used by load_read() + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + """internal: read more image data""" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT", b"fdAT"]: + self.png.push(cid, pos, length) + return b"" + + if cid == b"fdAT": + try: + self.png.call(cid, pos, length) + except EOFError: + pass + self.__idat = length - 4 # sequence_num has already been read + else: + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + """internal: finished reading image data""" + while True: + self.fp.read(4) # CRC + + try: + cid, pos, length = self.png.read() + except (struct.error, SyntaxError): + break + + if cid == b"IEND": + break + elif cid == b"fcTL" and self.is_animated: + # start of the next frame, stop reading + self.__prepare_idat = 0 + self.png.push(cid, pos, length) + break + + try: + self.png.call(cid, pos, length) + except UnicodeDecodeError: + break + except EOFError: + if cid == b"fdAT": + length -= 4 + ImageFile._safe_read(self.fp, length) + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + if cid[1:2].islower(): + self.private_chunks.append((cid, s, True)) + self._text = self.png.im_text + if not self.is_animated: + self.png.close() + self.png = None + else: + if self._prev_im and self.blend_op == APNG_BLEND_OP_OVER: + updated = self._crop(self.im, self.dispose_extent) + self._prev_im.paste( + updated, self.dispose_extent, updated.convert("RGBA") + ) + self.im = self._prev_im + if self.pyaccess: + self.pyaccess = None + + def _getexif(self): + if "exif" not in self.info: + self.load() + if "exif" not in self.info and "Raw profile type exif" not in self.info: + return None + return dict(self.getexif()) + + def getexif(self): + if "exif" not in self.info: + self.load() + + return super().getexif() + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# -------------------------------------------------------------------- +# PNG writer + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b"\x01\x00"), + "L;1": ("L;1", b"\x01\x00"), + "L;2": ("L;2", b"\x02\x00"), + "L;4": ("L;4", b"\x04\x00"), + "L": ("L", b"\x08\x00"), + "LA": ("LA", b"\x08\x04"), + "I": ("I;16B", b"\x10\x00"), + "I;16": ("I;16B", b"\x10\x00"), + "P;1": ("P;1", b"\x01\x03"), + "P;2": ("P;2", b"\x02\x03"), + "P;4": ("P;4", b"\x04\x03"), + "P": ("P", b"\x08\x03"), + "RGB": ("RGB", b"\x08\x02"), + "RGBA": ("RGBA", b"\x08\x06"), +} + + +def putchunk(fp, cid, *data): + """Write a PNG chunk (including CRC field)""" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + crc = _crc32(data, _crc32(cid)) + fp.write(o32(crc)) + + +class _idat: + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +class _fdat: + # wrap encoder output in fdAT chunks + + def __init__(self, fp, chunk, seq_num): + self.fp = fp + self.chunk = chunk + self.seq_num = seq_num + + def write(self, data): + self.chunk(self.fp, b"fdAT", o32(self.seq_num), data) + self.seq_num += 1 + + +def _write_multiple_frames(im, fp, chunk, rawmode): + default_image = im.encoderinfo.get("default_image", im.info.get("default_image")) + duration = im.encoderinfo.get("duration", im.info.get("duration", 0)) + loop = im.encoderinfo.get("loop", im.info.get("loop", 0)) + disposal = im.encoderinfo.get("disposal", im.info.get("disposal")) + blend = im.encoderinfo.get("blend", im.info.get("blend")) + + if default_image: + chain = itertools.chain(im.encoderinfo.get("append_images", [])) + else: + chain = itertools.chain([im], im.encoderinfo.get("append_images", [])) + + im_frames = [] + frame_count = 0 + for im_seq in chain: + for im_frame in ImageSequence.Iterator(im_seq): + im_frame = im_frame.copy() + if im_frame.mode != im.mode: + if im.mode == "P": + im_frame = im_frame.convert(im.mode, palette=im.palette) + else: + im_frame = im_frame.convert(im.mode) + encoderinfo = im.encoderinfo.copy() + if isinstance(duration, (list, tuple)): + encoderinfo["duration"] = duration[frame_count] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + if isinstance(blend, (list, tuple)): + encoderinfo["blend"] = blend[frame_count] + frame_count += 1 + + if im_frames: + previous = im_frames[-1] + prev_disposal = previous["encoderinfo"].get("disposal") + prev_blend = previous["encoderinfo"].get("blend") + if prev_disposal == APNG_DISPOSE_OP_PREVIOUS and len(im_frames) < 2: + prev_disposal = APNG_DISPOSE_OP_BACKGROUND + + if prev_disposal == APNG_DISPOSE_OP_BACKGROUND: + base_im = previous["im"] + dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0)) + bbox = previous["bbox"] + if bbox: + dispose = dispose.crop(bbox) + else: + bbox = (0, 0) + im.size + base_im.paste(dispose, bbox) + elif prev_disposal == APNG_DISPOSE_OP_PREVIOUS: + base_im = im_frames[-2]["im"] + else: + base_im = previous["im"] + delta = ImageChops.subtract_modulo( + im_frame.convert("RGB"), base_im.convert("RGB") + ) + bbox = delta.getbbox() + if ( + not bbox + and prev_disposal == encoderinfo.get("disposal") + and prev_blend == encoderinfo.get("blend") + ): + duration = encoderinfo.get("duration", 0) + if duration: + if "duration" in previous["encoderinfo"]: + previous["encoderinfo"]["duration"] += duration + else: + previous["encoderinfo"]["duration"] = duration + continue + else: + bbox = None + im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo}) + + # animation control + chunk( + fp, + b"acTL", + o32(len(im_frames)), # 0: num_frames + o32(loop), # 4: num_plays + ) + + # default image IDAT (if it exists) + if default_image: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + seq_num = 0 + for frame, frame_data in enumerate(im_frames): + im_frame = frame_data["im"] + if not frame_data["bbox"]: + bbox = (0, 0) + im_frame.size + else: + bbox = frame_data["bbox"] + im_frame = im_frame.crop(bbox) + size = im_frame.size + duration = int(round(frame_data["encoderinfo"].get("duration", 0))) + disposal = frame_data["encoderinfo"].get("disposal", APNG_DISPOSE_OP_NONE) + blend = frame_data["encoderinfo"].get("blend", APNG_BLEND_OP_SOURCE) + # frame control + chunk( + fp, + b"fcTL", + o32(seq_num), # sequence_number + o32(size[0]), # width + o32(size[1]), # height + o32(bbox[0]), # x_offset + o32(bbox[1]), # y_offset + o16(duration), # delay_numerator + o16(1000), # delay_denominator + o8(disposal), # dispose_op + o8(blend), # blend_op + ) + seq_num += 1 + # frame data + if frame == 0 and not default_image: + # first frame must be in IDAT chunks for backwards compatibility + ImageFile._save( + im_frame, + _idat(fp, chunk), + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + else: + fdat_chunks = _fdat(fp, chunk, seq_num) + ImageFile._save( + im_frame, + fdat_chunks, + [("zip", (0, 0) + im_frame.size, 0, rawmode)], + ) + seq_num = fdat_chunks.seq_num + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, chunk=putchunk, save_all=False): + # save an image to disk (called by the save method) + + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = 1 << im.encoderinfo["bits"] + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 2) + else: + colors = 256 + + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + elif colors <= 16: + bits = 4 + else: + bits = 8 + if bits != 8: + mode = f"{mode};{bits}" + + # encoder options + im.encoderconfig = ( + im.encoderinfo.get("optimize", False), + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + im.encoderinfo.get("dictionary", b""), + ) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError as e: + raise OSError(f"cannot write mode {mode} as PNG") from e + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk( + fp, + b"IHDR", + o32(im.size[0]), # 0: size + o32(im.size[1]), + mode, # 8: depth/type + b"\0", # 10: compression + b"\0", # 11: filter category + b"\0", # 12: interlace flag + ) + + chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"] + + icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(icc) + chunk(fp, b"iCCP", data) + + # You must either have sRGB or iCCP. + # Disallow sRGB chunks when an iCCP-chunk has been emitted. + chunks.remove(b"sRGB") + + info = im.encoderinfo.get("pnginfo") + if info: + chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"] + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + elif cid in chunks_multiple_allowed: + chunk(fp, cid, data) + elif cid[1:2].islower(): + # Private chunk + after_idat = info_chunk[2:3] + if not after_idat: + chunk(fp, cid, data) + + if im.mode == "P": + palette_byte_number = (2 ** bits) * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b"\0" + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = 2 ** bits + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b"\xFF" * transparency + b"\0" + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode in ("1", "L", "I"): + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise OSError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = 2 ** bits + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk( + fp, + b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b"\x01", + ) + + if info: + chunks = [b"bKGD", b"hIST"] + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + + exif = im.encoderinfo.get("exif", im.info.get("exif")) + if exif: + if isinstance(exif, Image.Exif): + exif = exif.tobytes(8) + if exif.startswith(b"Exif\x00\x00"): + exif = exif[6:] + chunk(fp, b"eXIf", exif) + + if save_all: + _write_multiple_frames(im, fp, chunk, rawmode) + else: + ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)]) + + if info: + for info_chunk in info.chunks: + cid, data = info_chunk[:2] + if cid[1:2].islower(): + # Private chunk + after_idat = info_chunk[2:3] + if after_idat: + chunk(fp, cid, data) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector: + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + crc = o32(_crc32(data, _crc32(cid))) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) +Image.register_save_all(PngImageFile.format, _save_all) + +Image.register_extensions(PngImageFile.format, [".png", ".apng"]) + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PpmImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..abf4d65 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PpmImagePlugin.py @@ -0,0 +1,164 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile + +# +# -------------------------------------------------------------------- + +b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d" + +MODES = { + # standard + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK", +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _token(self, s=b""): + while True: # read until next whitespace + c = self.fp.read(1) + if not c or c in b_whitespace: + break + if c > b"\x79": + raise ValueError("Expected ASCII value, found binary") + s = s + c + if len(s) > 9: + raise ValueError("Expected int, got > 9 digits") + return s + + def _open(self): + + # check magic + s = self.fp.read(1) + if s != b"P": + raise SyntaxError("not a PPM file") + magic_number = self._token(s) + mode = MODES[magic_number] + + self.custom_mimetype = { + b"P4": "image/x-portable-bitmap", + b"P5": "image/x-portable-graymap", + b"P6": "image/x-portable-pixmap", + }.get(magic_number) + + if mode == "1": + self.mode = "1" + rawmode = "1;I" + else: + self.mode = rawmode = mode + + for ix in range(3): + while True: + while True: + s = self.fp.read(1) + if s not in b_whitespace: + break + if s == b"": + raise ValueError("File does not extend beyond magic number") + if s != b"#": + break + s = self.fp.readline() + s = int(self._token(s)) + if ix == 0: + xsize = s + elif ix == 1: + ysize = s + if mode == "1": + break + elif ix == 2: + # maxgrey + if s > 255: + if not mode == "L": + raise ValueError(f"Too many colors for band: {s}") + if s < 2 ** 16: + self.mode = "I" + rawmode = "I;16B" + else: + self.mode = "I" + rawmode = "I;32B" + + self._size = xsize, ysize + self.tile = [("raw", (0, 0, xsize, ysize), self.fp.tell(), (rawmode, 0, 1))] + + +# +# -------------------------------------------------------------------- + + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + if im.getextrema()[1] < 2 ** 16: + rawmode, head = "I;16B", b"P5" + else: + rawmode, head = "I;32B", b"P5" + elif im.mode == "RGB": + rawmode, head = "RGB", b"P6" + elif im.mode == "RGBA": + rawmode, head = "RGB", b"P6" + else: + raise OSError(f"cannot write mode {im.mode} as PPM") + fp.write(head + ("\n%d %d\n" % im.size).encode("ascii")) + if head == b"P6": + fp.write(b"255\n") + if head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + elif rawmode == "I;16B": + fp.write(b"65535\n") + elif rawmode == "I;32B": + fp.write(b"2147483648\n") + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + + +# +# -------------------------------------------------------------------- + + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm"]) + +Image.register_mime(PpmImageFile.format, "image/x-portable-anymap") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PsdImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..d3799ed --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PsdImagePlugin.py @@ -0,0 +1,313 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import io + +from . import Image, ImageFile, ImagePalette +from ._binary import i8 +from ._binary import i16be as i16 +from ._binary import i32be as i32 + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3), +} + + +# --------------------------------------------------------------------. +# read PSD images + + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + _close_exclusive_fp_after_loading = False + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if not _accept(s) or i16(s, 4) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s, 22) + psd_channels = i16(s, 12) + psd_mode = i16(s, 24) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise OSError("not enough channels") + + self.mode = mode + self._size = i32(s, 18), i32(s, 14) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + read(4) # signature + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if len(data) & 1: + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self.layers = _layerinfo(self.fp) + self.fp.seek(end) + self.n_frames = len(self.layers) + self.is_animated = self.n_frames > 1 + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self.__fp = self.fp + self.frame = 1 + self._min_frame = 1 + + def seek(self, layer): + if not self._seek_check(layer): + return + + # seek to given layer (1..max) + try: + name, mode, bbox, tile = self.layers[layer - 1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self.__fp + return name, bbox + except IndexError as e: + raise EOFError("no such layer") from e + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + def load_prepare(self): + # create image memory if necessary + if not self.im or self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.fill(self.mode, self.size, 0) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +def _layerinfo(file): + # read layerinfo block + layers = [] + read = file.read + for i in range(abs(i16(read(2)))): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + info = [] + mode = [] + types = list(range(i16(read(2)))) + if len(types) > 4: + continue + + for i in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + size = i32(read(4)) + info.append((m, size)) + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + read(12) # filler + name = "" + size = i32(read(4)) # length of the extra data field + combined = 0 + if size: + data_end = file.tell() + size + + length = i32(read(4)) + if length: + file.seek(length - 16, io.SEEK_CUR) + combined += length + 4 + + length = i32(read(4)) + if length: + file.seek(length, io.SEEK_CUR) + combined += length + 4 + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode("latin-1", "replace") + combined += length + 1 + + file.seek(data_end) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(file, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize * ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("packbits", bbox, offset, layer)) + for y in range(ysize): + offset = offset + i16(bytecount, i) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") + +Image.register_mime(PsdImageFile.format, "image/vnd.adobe.photoshop") diff --git a/minor_project/lib/python3.6/site-packages/PIL/PyAccess.py b/minor_project/lib/python3.6/site-packages/PIL/PyAccess.py new file mode 100644 index 0000000..494f5f9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/PyAccess.py @@ -0,0 +1,352 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +import logging +import sys + +try: + from cffi import FFI + + defs = """ + struct Pixel_RGBA { + unsigned char r,g,b,a; + }; + struct Pixel_I16 { + unsigned char l,r; + }; + """ + ffi = FFI() + ffi.cdef(defs) +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from ._util import deferred_error + + FFI = ffi = deferred_error(ex) + +logger = logging.getLogger(__name__) + + +class PyAccess: + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast("unsigned char **", vals["image8"]) + self.image32 = ffi.cast("int **", vals["image32"]) + self.image = ffi.cast("unsigned char **", vals["image"]) + self.xsize, self.ysize = img.im.size + + # Keep pointer to im object to prevent dereferencing. + self._im = img.im + if self._im.mode == "P": + self._palette = img.palette + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param color: The pixel value. + """ + if self.readonly: + raise ValueError("Attempt to putpixel a read only image") + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + + if ( + self._im.mode == "P" + and isinstance(color, (list, tuple)) + and len(color) in [3, 4] + ): + # RGB or RGBA value for a P image + color = self._palette.getcolor(color) + + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + (x, y) = xy + if x < 0: + x = self.xsize + x + if y < 0: + y = self.ysize + y + (x, y) = self.check_xy((x, y)) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError("pixel location out of range") + return xy + + +class _PyAccess32_2(PyAccess): + """ PA, LA, stored in first and last bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """ RGB and friends, stored in the first three bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = 255 + + +class _PyAccess32_4(PyAccess): + """ RGBA etc, all 4 bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """ 1, L, P, 8 bit images stored as uint8 """ + + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """ I;16 access, native bitendian without conversion """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("unsigned short **", self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """ I;16L access, with conversion """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF # noqa: E741 + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """ I;16B access, with conversion """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_I16 **", self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except Exception: + color = min(color[0], 65535) + + pixel.l = color >> 8 # noqa: E741 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """ Signed Int32 access, native endian """ + + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """ I;32L/B access, with byteswapping conversion """ + + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new("int *", i) + chars = ffi.cast("unsigned char *", orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0] + return ffi.cast("int *", chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """ 32 bit float access """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("float **", self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except TypeError: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = { + "1": _PyAccess8, + "L": _PyAccess8, + "P": _PyAccess8, + "LA": _PyAccess32_2, + "La": _PyAccess32_2, + "PA": _PyAccess32_2, + "RGB": _PyAccess32_3, + "LAB": _PyAccess32_3, + "HSV": _PyAccess32_3, + "YCbCr": _PyAccess32_3, + "RGBA": _PyAccess32_4, + "RGBa": _PyAccess32_4, + "RGBX": _PyAccess32_4, + "CMYK": _PyAccess32_4, + "F": _PyAccessF, + "I": _PyAccessI32_N, +} + +if sys.byteorder == "little": + mode_map["I;16"] = _PyAccessI16_N + mode_map["I;16L"] = _PyAccessI16_N + mode_map["I;16B"] = _PyAccessI16_B + + mode_map["I;32L"] = _PyAccessI32_N + mode_map["I;32B"] = _PyAccessI32_Swap +else: + mode_map["I;16"] = _PyAccessI16_L + mode_map["I;16L"] = _PyAccessI16_L + mode_map["I;16B"] = _PyAccessI16_N + + mode_map["I;32L"] = _PyAccessI32_Swap + mode_map["I;32B"] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) diff --git a/minor_project/lib/python3.6/site-packages/PIL/SgiImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..d0f7c99 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/SgiImagePlugin.py @@ -0,0 +1,229 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# +# History: +# 2017-22-07 mb Add RLE decompression +# 2016-16-10 mb Add save method without compression +# 1995-09-10 fl Created +# +# Copyright (c) 2016 by Mickael Bonfill. +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import os +import struct + +from . import Image, ImageFile +from ._binary import i16be as i16 +from ._binary import o8 + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +MODES = { + (1, 1, 1): "L", + (1, 2, 1): "L", + (2, 1, 1): "L;16B", + (2, 2, 1): "L;16B", + (1, 3, 3): "RGB", + (2, 3, 3): "RGB;16B", + (1, 3, 4): "RGBA", + (2, 3, 4): "RGBA;16B", +} + + +## +# Image plugin for SGI images. +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + headlen = 512 + s = self.fp.read(headlen) + + if not _accept(s): + raise ValueError("Not an SGI image file") + + # compression : verbatim or RLE + compression = s[2] + + # bpc : 1 or 2 bytes (8bits or 16bits) + bpc = s[3] + + # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize) + dimension = i16(s, 4) + + # xsize : width + xsize = i16(s, 6) + + # ysize : height + ysize = i16(s, 8) + + # zsize : channels count + zsize = i16(s, 10) + + # layout + layout = bpc, dimension, zsize + + # determine mode from bits/zsize + rawmode = "" + try: + rawmode = MODES[layout] + except KeyError: + pass + + if rawmode == "": + raise ValueError("Unsupported SGI image mode") + + self._size = xsize, ysize + self.mode = rawmode.split(";")[0] + if self.mode == "RGB": + self.custom_mimetype = "image/rgb" + + # orientation -1 : scanlines begins at the bottom-left corner + orientation = -1 + + # decoder info + if compression == 0: + pagesize = xsize * ysize * bpc + if bpc == 2: + self.tile = [ + ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation)) + ] + else: + self.tile = [] + offset = headlen + for layer in self.mode: + self.tile.append( + ("raw", (0, 0) + self.size, offset, (layer, 0, orientation)) + ) + offset += pagesize + elif compression == 1: + self.tile = [ + ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc)) + ] + + +def _save(im, fp, filename): + if im.mode != "RGB" and im.mode != "RGBA" and im.mode != "L": + raise ValueError("Unsupported SGI image mode") + + # Get the keyword arguments + info = im.encoderinfo + + # Byte-per-pixel precision, 1 = 8bits per pixel + bpc = info.get("bpc", 1) + + if bpc not in (1, 2): + raise ValueError("Unsupported number of bytes per pixel") + + # Flip the image, since the origin of SGI file is the bottom-left corner + orientation = -1 + # Define the file as SGI File Format + magicNumber = 474 + # Run-Length Encoding Compression - Unsupported at this time + rle = 0 + + # Number of dimensions (x,y,z) + dim = 3 + # X Dimension = width / Y Dimension = height + x, y = im.size + if im.mode == "L" and y == 1: + dim = 1 + elif im.mode == "L": + dim = 2 + # Z Dimension: Number of channels + z = len(im.mode) + + if dim == 1 or dim == 2: + z = 1 + + # assert we've got the right number of bands. + if len(im.getbands()) != z: + raise ValueError( + f"incorrect number of bands in SGI write: {z} vs {len(im.getbands())}" + ) + + # Minimum Byte value + pinmin = 0 + # Maximum Byte value (255 = 8bits per pixel) + pinmax = 255 + # Image name (79 characters max, truncated below in write) + imgName = os.path.splitext(os.path.basename(filename))[0] + imgName = imgName.encode("ascii", "ignore") + # Standard representation of pixel in the file + colormap = 0 + fp.write(struct.pack(">h", magicNumber)) + fp.write(o8(rle)) + fp.write(o8(bpc)) + fp.write(struct.pack(">H", dim)) + fp.write(struct.pack(">H", x)) + fp.write(struct.pack(">H", y)) + fp.write(struct.pack(">H", z)) + fp.write(struct.pack(">l", pinmin)) + fp.write(struct.pack(">l", pinmax)) + fp.write(struct.pack("4s", b"")) # dummy + fp.write(struct.pack("79s", imgName)) # truncates to 79 chars + fp.write(struct.pack("s", b"")) # force null byte after imgname + fp.write(struct.pack(">l", colormap)) + fp.write(struct.pack("404s", b"")) # dummy + + rawmode = "L" + if bpc == 2: + rawmode = "L;16B" + + for channel in im.split(): + fp.write(channel.tobytes("raw", rawmode, 0, orientation)) + + fp.close() + + +class SGI16Decoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer): + rawmode, stride, orientation = self.args + pagesize = self.state.xsize * self.state.ysize + zsize = len(self.mode) + self.fd.seek(512) + + for band in range(zsize): + channel = Image.new("L", (self.state.xsize, self.state.ysize)) + channel.frombytes( + self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation + ) + self.im.putband(channel.im, band) + + return -1, 0 + + +# +# registry + + +Image.register_decoder("SGI16", SGI16Decoder) +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) +Image.register_save(SgiImageFile.format, _save) +Image.register_mime(SgiImageFile.format, "image/sgi") + +Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"]) + +# End of file diff --git a/minor_project/lib/python3.6/site-packages/PIL/SpiderImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..819f2ed --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/SpiderImagePlugin.py @@ -0,0 +1,324 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# +import os +import struct +import sys + +from PIL import Image, ImageFile + + +def isInt(f): + try: + i = int(f) + if f - i == 0: + return 1 + else: + return 0 + except (ValueError, OverflowError): + return 0 + + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no. of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + with open(filename, "rb") as fp: + f = fp.read(92) # read 23 * 4 bytes + t = struct.unpack(">23f", f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack("<23f", f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + _close_exclusive_fp_after_loading = False + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack(">27f", f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack("<27f", f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error as e: + raise SyntaxError("not a valid Spider file") from e + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self._size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))] + self.__fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + raise EOFError("attempt to seek in a non-stack file") + if not self._seek_check(frame): + return + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self.__fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum - minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage""" + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print(f"unable to find {img}") + continue + try: + with Image.open(img) as im: + im = im.convert2byte() + except Exception: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info["filename"] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = int(1024 / lenbyt) + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + hdr = [] + nvalues = int(labbyt / 4) + for i in range(nvalues): + hdr.append(0.0) + + if len(hdr) < 23: + return [] + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + hdrstr = [] + for v in hdr: + hdrstr.append(struct.pack("f", v)) + return hdrstr + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert("F") + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise OSError("Error creating Spider header") + + # write the SPIDER header + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]) + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension(SpiderImageFile.format, ext) + _save(im, fp, filename) + + +# -------------------------------------------------------------------- + + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python SpiderImagePlugin.py [infile] [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + with Image.open(filename) as im: + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=" ") + print(im.getextrema()) + + if len(sys.argv) > 2: + outfile = sys.argv[2] + + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + f"saving a flipped version of {os.path.basename(filename)} " + f"as {outfile} " + ) + im.save(outfile, SpiderImageFile.format) diff --git a/minor_project/lib/python3.6/site-packages/PIL/SunImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/SunImagePlugin.py new file mode 100644 index 0000000..c03759a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/SunImagePlugin.py @@ -0,0 +1,136 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile, ImagePalette +from ._binary import i32be as i32 + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59A66A95 + + +## +# Image plugin for Sun raster files. + + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # The Sun Raster file header is 32 bytes in length + # and has the following format: + + # typedef struct _SunRaster + # { + # DWORD MagicNumber; /* Magic (identification) number */ + # DWORD Width; /* Width of image in pixels */ + # DWORD Height; /* Height of image in pixels */ + # DWORD Depth; /* Number of bits per pixel */ + # DWORD Length; /* Size of image data in bytes */ + # DWORD Type; /* Type of raster file */ + # DWORD ColorMapType; /* Type of color map */ + # DWORD ColorMapLength; /* Size of the color map in bytes */ + # } SUNRASTER; + + # HEAD + s = self.fp.read(32) + if not _accept(s): + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self._size = i32(s, 4), i32(s, 8) + + depth = i32(s, 12) + # data_length = i32(s, 16) # unreliable, ignore. + file_type = i32(s, 20) + palette_type = i32(s, 24) # 0: None, 1: RGB, 2: Raw/arbitrary + palette_length = i32(s, 28) + + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 4: + self.mode, rawmode = "L", "L;4" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + if file_type == 3: + self.mode, rawmode = "RGB", "RGB" + else: + self.mode, rawmode = "RGB", "BGR" + elif depth == 32: + if file_type == 3: + self.mode, rawmode = "RGB", "RGBX" + else: + self.mode, rawmode = "RGB", "BGRX" + else: + raise SyntaxError("Unsupported Mode/Bit Depth") + + if palette_length: + if palette_length > 1024: + raise SyntaxError("Unsupported Color Palette Length") + + if palette_type != 1: + raise SyntaxError("Unsupported Palette Type") + + offset = offset + palette_length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length)) + if self.mode == "L": + self.mode = "P" + rawmode = rawmode.replace("L", "P") + + # 16 bit boundaries on stride + stride = ((self.size[0] * depth + 15) // 16) * 2 + + # file type: Type is the version (or flavor) of the bitmap + # file. The following values are typically found in the Type + # field: + # 0000h Old + # 0001h Standard + # 0002h Byte-encoded + # 0003h RGB format + # 0004h TIFF format + # 0005h IFF format + # FFFFh Experimental + + # Old and standard are the same, except for the length tag. + # byte-encoded is run-length-encoded + # RGB looks similar to standard, but RGB byte order + # TIFF and IFF mean that they were converted from T/IFF + # Experimental means that it's something else. + # (https://www.fileformat.info/format/sunraster/egff.htm) + + if file_type in (0, 1, 3, 4, 5): + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))] + elif file_type == 2: + self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)] + else: + raise SyntaxError("Unsupported Sun Raster file type") + + +# +# registry + + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/minor_project/lib/python3.6/site-packages/PIL/TarIO.py b/minor_project/lib/python3.6/site-packages/PIL/TarIO.py new file mode 100644 index 0000000..d108362 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/TarIO.py @@ -0,0 +1,65 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +import io + +from . import ContainerIO + + +class TarIO(ContainerIO.ContainerIO): + """A file object that provides read access to a given member of a TAR file.""" + + def __init__(self, tarfile, file): + """ + Create file object. + + :param tarfile: Name of TAR file. + :param file: Name of member file. + """ + self.fh = open(tarfile, "rb") + + while True: + + s = self.fh.read(512) + if len(s) != 512: + raise OSError("unexpected end of tar file") + + name = s[:100].decode("utf-8") + i = name.find("\0") + if i == 0: + raise OSError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + self.fh.seek((size + 511) & (~511), io.SEEK_CUR) + + # Open region + super().__init__(self.fh, self.fh.tell(), size) + + # Context manager support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + self.fh.close() diff --git a/minor_project/lib/python3.6/site-packages/PIL/TgaImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..2b936d6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/TgaImagePlugin.py @@ -0,0 +1,248 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +import warnings + +from . import Image, ImageFile, ImagePalette +from ._binary import i16le as i16 +from ._binary import o8 +from ._binary import o16le as o16 + +# +# -------------------------------------------------------------------- +# Read RGA file + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (3, 16): "LA", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + id_len = s[0] + + colormaptype = s[1] + imagetype = s[2] + + depth = s[16] + + flags = s[17] + + self._size = i16(s, 12), i16(s, 14) + + # validate header fields + if ( + colormaptype not in (0, 1) + or self.size[0] <= 0 + or self.size[1] <= 0 + or depth not in (1, 8, 16, 24, 32) + ): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif depth == 16: + self.mode = "LA" + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + if orientation == 0x20: + orientation = 1 + elif not orientation: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if id_len: + self.info["id_section"] = self.fp.read(id_len) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s, 3), i16(s, 5), i16(s, 7) + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;16", b"\0" * 2 * start + self.fp.read(2 * size) + ) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0" * 3 * start + self.fp.read(3 * size) + ) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0" * 4 * start + self.fp.read(4 * size) + ) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [ + ( + "tga_rle", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, orientation, depth), + ) + ] + else: + self.tile = [ + ( + "raw", + (0, 0) + self.size, + self.fp.tell(), + (rawmode, 0, orientation), + ) + ] + except KeyError: + pass # cannot decode + + +# +# -------------------------------------------------------------------- +# Write TGA file + + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "LA": ("LA", 16, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as TGA") from e + + if "rle" in im.encoderinfo: + rle = im.encoderinfo["rle"] + else: + compression = im.encoderinfo.get("compression", im.info.get("compression")) + rle = compression == "tga_rle" + if rle: + imagetype += 8 + + id_section = im.encoderinfo.get("id_section", im.info.get("id_section", "")) + id_len = len(id_section) + if id_len > 255: + id_len = 255 + id_section = id_section[:255] + warnings.warn("id_section has been trimmed to 255 characters") + + if colormaptype: + colormapfirst, colormaplength, colormapentry = 0, 256, 24 + else: + colormapfirst, colormaplength, colormapentry = 0, 0, 0 + + if im.mode in ("LA", "RGBA"): + flags = 8 + else: + flags = 0 + + orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1)) + if orientation > 0: + flags = flags | 0x20 + + fp.write( + o8(id_len) + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags) + ) + + if id_section: + fp.write(id_section) + + if colormaptype: + fp.write(im.im.getpalette("RGB", "BGR")) + + if rle: + ImageFile._save( + im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))] + ) + else: + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))] + ) + + # write targa version 2 footer + fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000") + + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"]) + +Image.register_mime(TgaImageFile.format, "image/x-tga") diff --git a/minor_project/lib/python3.6/site-packages/PIL/TiffImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..0b70ce3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/TiffImagePlugin.py @@ -0,0 +1,1924 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# +import io +import itertools +import logging +import os +import struct +import warnings +from collections.abc import MutableMapping +from fractions import Fraction +from numbers import Number, Rational + +from . import Image, ImageFile, ImagePalette, TiffTags +from ._binary import o8 +from .TiffTags import TYPES + +logger = logging.getLogger(__name__) + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +TRANSFERFUNCTION = 301 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +SUBIFD = 330 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +REFERENCEBLACKWHITE = 532 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 +JPEGQUALITY = 65537 # pseudo-tag by libtiff + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", + 34925: "lzma", + 50000: "zstd", + 50001: "webp", +} + +COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()} + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"), + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"), + (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"), + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"), + # JPEG compressed images handled by LibTiff and auto-converted to RGBX + # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel + (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"), + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [ + b"MM\x00\x2A", # Valid TIFF header with big-endian byte order + b"II\x2A\x00", # Valid TIFF header with little-endian byte order + b"MM\x2A\x00", # Invalid TIFF header, assume big-endian + b"II\x00\x2A", # Invalid TIFF header, assume little-endian +] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + + +def _limit_signed_rational(val, max_val, min_val): + frac = Fraction(val) + n_d = frac.numerator, frac.denominator + + if min(n_d) < min_val: + n_d = _limit_rational(val, abs(min_val)) + + if max(n_d) > max_val: + val = Fraction(*n_d) + n_d = _limit_rational(val, max_val) + + return n_d + + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +class IFDRational(Rational): + """Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ("_numerator", "_denominator", "_val") + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + if isinstance(value, IFDRational): + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value._val + return + + if isinstance(value, Fraction): + self._numerator = value.numerator + self._denominator = value.denominator + else: + self._numerator = value + self._denominator = denominator + + if denominator == 0: + self._val = float("nan") + elif denominator == 1: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(a): + return a._numerator + + @property + def denominator(a): + return a._denominator + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return (self.numerator, self.denominator) + + f = self._val.limit_denominator(max_denominator) + return (f.numerator, f.denominator) + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self, other): + if isinstance(other, IFDRational): + other = other._val + return self._val == other + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val, op)(*args) + + return delegate + + """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', 'rfloordiv', + 'mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool', + 'ceil', 'floor', 'round'] + print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a)) + """ + + __add__ = _delegate("__add__") + __radd__ = _delegate("__radd__") + __sub__ = _delegate("__sub__") + __rsub__ = _delegate("__rsub__") + __mul__ = _delegate("__mul__") + __rmul__ = _delegate("__rmul__") + __truediv__ = _delegate("__truediv__") + __rtruediv__ = _delegate("__rtruediv__") + __floordiv__ = _delegate("__floordiv__") + __rfloordiv__ = _delegate("__rfloordiv__") + __mod__ = _delegate("__mod__") + __rmod__ = _delegate("__rmod__") + __pow__ = _delegate("__pow__") + __rpow__ = _delegate("__rpow__") + __pos__ = _delegate("__pos__") + __neg__ = _delegate("__neg__") + __abs__ = _delegate("__abs__") + __trunc__ = _delegate("__trunc__") + __lt__ = _delegate("__lt__") + __gt__ = _delegate("__gt__") + __le__ = _delegate("__le__") + __ge__ = _delegate("__ge__") + __bool__ = _delegate("__bool__") + __ceil__ = _delegate("__ceil__") + __floor__ = _delegate("__floor__") + __round__ = _delegate("__round__") + + +class ImageFileDirectory_v2(MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = TiffTags.ASCII + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from + ~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError(f"not a TIFF file (header {repr(ifh)} not valid)") + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.tagtype = {} + """ Dictionary of tag types """ + self.reset() + (self.next,) = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return {TiffTags.lookup(code).name: value for code, value in self.items()} + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = (val,) + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + + info = TiffTags.lookup(tag) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = TiffTags.UNDEFINED + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = ( + TiffTags.RATIONAL + if all(v >= 0 for v in values) + else TiffTags.SIGNED_RATIONAL + ) + elif all(isinstance(v, int) for v in values): + if all(0 <= v < 2 ** 16 for v in values): + self.tagtype[tag] = TiffTags.SHORT + elif all(-(2 ** 15) < v < 2 ** 15 for v in values): + self.tagtype[tag] = TiffTags.SIGNED_SHORT + else: + self.tagtype[tag] = ( + TiffTags.LONG + if all(v >= 0 for v in values) + else TiffTags.SIGNED_LONG + ) + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = TiffTags.DOUBLE + elif all(isinstance(v, str) for v in values): + self.tagtype[tag] = TiffTags.ASCII + elif all(isinstance(v, bytes) for v in values): + self.tagtype[tag] = TiffTags.BYTE + + if self.tagtype[tag] == TiffTags.UNDEFINED: + values = [ + value.encode("ascii", "replace") if isinstance(value, str) else value + ] + elif self.tagtype[tag] == TiffTags.RATIONAL: + values = [float(v) if isinstance(v, int) else v for v in values] + + is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict) + if not is_ifd: + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + # Three branches: + # Spec'd length == 1, Actual length 1, store as element + # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed. + # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple. + # Don't mess with the legacy api, since it's frozen. + if not is_ifd and ( + (info.length == 1) + or self.tagtype[tag] == TiffTags.BYTE + or (info.length is None and len(values) == 1 and not legacy_api) + ): + # Don't mess with the legacy api, since it's frozen. + if legacy_api and self.tagtype[tag] in [ + TiffTags.RATIONAL, + TiffTags.SIGNED_RATIONAL, + ]: # rationals + values = (values,) + try: + (dest[tag],) = values + except ValueError: + # We've got a builtin tag with 1 expected entry + warnings.warn( + f"Metadata Warning, tag {tag} had too many entries: " + f"{len(values)}, expected 1" + ) + dest[tag] = values[0] + + else: + # Spec'd length > 1 or undefined + # Unspec'd, and length > 1 + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from .TiffTags import TYPES + + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func # noqa: F821 + return func + + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func # noqa: F821 + return func + + return decorator + + def _register_basic(idx_fmt_name): + from .TiffTags import TYPES + + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = ( # noqa: F821 + size, + lambda self, data, legacy_api=True: ( + self._unpack("{}{}".format(len(data) // size, fmt), data) + ), + ) + _write_dispatch[idx] = lambda self, *values: ( # noqa: F821 + b"".join(self._pack(fmt, value) for value in values) + ) + + list( + map( + _register_basic, + [ + (TiffTags.SHORT, "H", "short"), + (TiffTags.LONG, "L", "long"), + (TiffTags.SIGNED_BYTE, "b", "signed byte"), + (TiffTags.SIGNED_SHORT, "h", "signed short"), + (TiffTags.SIGNED_LONG, "l", "signed long"), + (TiffTags.FLOAT, "f", "float"), + (TiffTags.DOUBLE, "d", "double"), + (TiffTags.IFD, "L", "long"), + ], + ) + ) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return data + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + return b"" + value.encode("ascii", "replace") + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{}L".format(len(data) // 4), data) + + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join( + self._pack("2L", *_limit_rational(frac, 2 ** 32 - 1)) for frac in values + ) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{}l".format(len(data) // 4), data) + + def combine(a, b): + return (a, b) if legacy_api else IFDRational(a, b) + + return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join( + self._pack("2l", *_limit_signed_rational(frac, 2 ** 31 - 1, -(2 ** 31))) + for frac in values + ) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise OSError( + "Corrupt EXIF data. " + f"Expecting to read {size} bytes but only got {len(ret)}. " + ) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) + + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})" + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + logger.debug(msg + f" - unsupported type {typ}") + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + (offset,) = self._unpack("L", data) + msg += f" Tag Location: {here} - Data Location: {offset}" + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn( + "Possibly corrupt EXIF data. " + f"Expecting to read {size} bytes but only got {len(data)}." + f" Skipping tag {tag}" + ) + logger.debug(msg) + continue + + if not data: + logger.debug(msg) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + msg += " - value: " + ( + "" % size if size > 32 else repr(data) + ) + logger.debug(msg) + + (self.next,) = self._unpack("L", self._ensure_read(fp, 4)) + except OSError as msg: + warnings.warn(str(msg)) + return + + def tobytes(self, offset=0): + # FIXME What about tagdata? + result = self._pack("H", len(self._tags_v2)) + + entries = [] + offset = offset + len(result) + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + logger.debug(f"Tag {tag}, Type: {typ}, Value: {repr(value)}") + is_ifd = typ == TiffTags.LONG and isinstance(value, dict) + if is_ifd: + if self._endian == "<": + ifh = b"II\x2A\x00\x08\x00\x00\x00" + else: + ifh = b"MM\x00\x2A\x00\x00\x00\x08" + ifd = ImageFileDirectory_v2(ifh) + for ifd_tag, ifd_value in self._tags_v2[tag].items(): + ifd[ifd_tag] = ifd_value + data = ifd.tobytes(offset) + else: + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + + tagname = TiffTags.lookup(tag).name + typname = "ifd" if is_ifd else TYPES.get(typ, "unknown") + msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})" + msg += " - value: " + ( + "" % len(data) if len(data) >= 16 else str(values) + ) + logger.debug(msg) + + # count is sum of lengths for string and arbitrary data + if is_ifd: + count = 1 + elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]: + count = len(data) + else: + count = len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError("multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + logger.debug(f"{tag} {typ} {count} {repr(value)} {repr(data)}") + result += self._pack("HHL4s", tag, typ, count, value) + + # -- overwrite here for multi-page -- + result += b"\0\0\0\0" # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + result += data + if len(data) & 1: + result += b"\0" + + return result + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + offset = fp.tell() + result = self.tobytes(offset) + fp.write(result) + return offset + len(result) + + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = TiffTags.ASCII + print(ifd[key]) + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + # defined in ImageFileDirectory_v2 + tagtype: dict + """Dictionary of tag types""" + + @classmethod + def from_v2(cls, original): + """Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = (val,) + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + _close_exclusive_fp_after_loading = False + + def __init__(self, fp=None, filename=None): + self.tag_v2 = None + """ Image file directory (tag dictionary) """ + + self.tag = None + """ Legacy tag entries """ + + super().__init__(fp, filename) + + def _open(self): + """Open the first image in a TIFF file""" + + # Header + ifh = self.fp.read(8) + + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy IFD entries will be filled in later + self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + + logger.debug("*** TiffImageFile._open ***") + logger.debug(f"- __first: {self.__first}") + logger.debug(f"- ifh: {repr(ifh)}") # Use repr to avoid str(bytes) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + self._seek(len(self._frame_pos)) + while self._n_frames is None: + self._seek(self.tell() + 1) + self.seek(current) + return self._n_frames + + def seek(self, frame): + """Select a given frame as current image""" + if not self._seek_check(frame): + return + self._seek(frame) + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + logger.debug( + f"Seeking to frame {frame}, on frame {self.__frame}, " + f"__next {self.__next}, location: {self.fp.tell()}" + ) + # reset buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + logger.debug("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + if self.__next == 0: + self._n_frames = frame + 1 + if len(self._frame_pos) == 1: + self.is_animated = self.__next != 0 + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + """Return the current frame number""" + return self.__frame + + def load(self): + if self.tile and self.use_load_libtiff: + return self._load_libtiff() + return super().load() + + def load_end(self): + if self._tile_orientation: + method = { + 2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(self._tile_orientation) + if method is not None: + self.im = self.im.transpose(method) + self._size = self.im.size + + # allow closing if we're on the first frame, there's no next + # This is the ImageFile.load path only, libtiff specific below. + if not self.is_animated: + self._close_exclusive_fp_after_loading = True + + def _load_libtiff(self): + """Overload method triggered when we detect a compressed tiff + Calls out to libtiff""" + + Image.Image.load(self) + + self.load_prepare() + + if not len(self.tile) == 1: + raise OSError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = list(self.tile[0][3]) + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except OSError: + # io.BytesIO have a fileno, but returns an OSError if + # it doesn't use a file descriptor. + fp = False + + if fp: + args[2] = fp + + decoder = Image._getdecoder( + self.mode, "libtiff", tuple(args), self.decoderconfig + ) + try: + decoder.setimage(self.im, extents) + except ValueError as e: + raise OSError("Couldn't set the image") from e + + close_self_fp = self._exclusive_fp and not self.is_animated + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an OSError if there's no underlying fp. Easier to + # deal with here by reordering. + logger.debug("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif fp: + # we've got a actual file on disk, pass in the fp. + logger.debug("have fileno, calling fileno version of the decoder.") + if not close_self_fp: + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + logger.debug("don't have fileno or getvalue. just reading") + self.fp.seek(0) + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + + self.load_end() + + # libtiff closed the fp in a, we need to close self.fp, if possible + if close_self_fp: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise OSError(err) + + return Image.Image.load(self) + + def _setup(self): + """Setup this image object based on current tags""" + + if 0xBC01 in self.tag_v2: + raise OSError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + # old style jpeg compression images most certainly are YCbCr + if self._compression == "tiff_jpeg": + photo = 6 + + fillorder = self.tag_v2.get(FILLORDER, 1) + + logger.debug("*** Summary ***") + logger.debug(f"- compression: {self._compression}") + logger.debug(f"- photometric_interpretation: {photo}") + logger.debug(f"- planar_configuration: {self._planar_configuration}") + logger.debug(f"- fill_order: {fillorder}") + logger.debug(f"- YCbCr subsampling: {self.tag.get(530)}") + + # size + xsize = int(self.tag_v2.get(IMAGEWIDTH)) + ysize = int(self.tag_v2.get(IMAGELENGTH)) + self._size = xsize, ysize + + logger.debug(f"- size: {self.size}") + + sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if len(sampleFormat) > 1 and max(sampleFormat) == min(sampleFormat) == 1: + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + sampleFormat = (1,) + + bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,)) + extra_tuple = self.tag_v2.get(EXTRASAMPLES, ()) + if photo in (2, 6, 8): # RGB, YCbCr, LAB + bps_count = 3 + elif photo == 5: # CMYK + bps_count = 4 + else: + bps_count = 1 + bps_count += len(extra_tuple) + # Some files have only one value in bps_tuple, + # while should have more. Fix it + if bps_count > len(bps_tuple) and len(bps_tuple) == 1: + bps_tuple = bps_tuple * bps_count + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, + photo, + sampleFormat, + fillorder, + bps_tuple, + extra_tuple, + ) + logger.debug(f"format key: {key}") + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError as e: + logger.debug("- unsupported format") + raise SyntaxError("unknown pixel mode") from e + + logger.debug(f"- raw mode: {rawmode}") + logger.debug(f"- pil mode: {self.mode}") + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, 1) + yres = self.tag_v2.get(Y_RESOLUTION, 1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT) + if resunit == 2: # dots per inch + self.info["dpi"] = int(xres + 0.5), int(yres + 0.5) + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = int(xres * 2.54 + 0.5), int(yres * 2.54 + 0.5) + elif resunit is None: # used to default to 1, but now 2) + self.info["dpi"] = int(xres + 0.5), int(yres + 0.5) + # For backward compatibility, + # we also preserve the old behavior + self.info["resolution"] = xres, yres + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = layer = 0 + self.tile = [] + self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw" + if self.use_load_libtiff: + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # use the _load_libtiff function. + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + # Replace fillorder with fillorder=1 + key = key[:3] + (1,) + key[4:] + logger.debug(f"format key: {key}") + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if rawmode == "I;16": + rawmode = "I;16N" + if ";16B" in rawmode: + rawmode = rawmode.replace(";16B", ";16N") + if ";16L" in rawmode: + rawmode = rawmode.replace(";16L", ";16N") + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, False, self.tag_v2.offset) + self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a)) + + elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2: + # striped image + if STRIPOFFSETS in self.tag_v2: + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + else: + # tiled image + offsets = self.tag_v2[TILEOFFSETS] + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + + for offset in offsets: + if x + w > xsize: + stride = w * sum(bps_tuple) / 8 # bytes per line + else: + stride = 0 + + tile_rawmode = rawmode + if self._planar_configuration == 2: + # each band on it's own layer + tile_rawmode = rawmode[layer] + # adjust stride width accordingly + stride /= bps_count + + a = (tile_rawmode, int(stride), 1) + self.tile.append( + ( + self._compression, + (x, y, min(x + w, xsize), min(y + h, ysize)), + offset, + a, + ) + ) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + layer += 1 + else: + logger.debug("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # Fix up info. + if ICCPROFILE in self.tag_v2: + self.info["icc_profile"] = self.tag_v2[ICCPROFILE] + + # fixup palette descriptor + + if self.mode in ["P", "PA"]: + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) + + self._tile_orientation = self.tag_v2.get(0x0112) + + def _close__fp(self): + try: + if self.__fp != self.fp: + self.__fp.close() + except AttributeError: + pass + finally: + self.__fp = None + + +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError as e: + raise OSError(f"cannot write mode {im.mode} as TIFF") from e + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get("compression", im.info.get("compression")) + if compression is None: + compression = "raw" + elif compression == "tiff_jpeg": + # OJPEG is obsolete, so use new-style JPEG compression instead + compression = "jpeg" + + libtiff = WRITE_LIBTIFF or compression != "raw" + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, "_planar_configuration", 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + logger.debug("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except Exception: + pass # might not be an IFD. Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, "tag_v2"): + # preserve tags from original TIFF image file + for key in ( + RESOLUTION_UNIT, + X_RESOLUTION, + Y_RESOLUTION, + IPTC_NAA_CHUNK, + PHOTOSHOP_CHUNK, + XMP, + ): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype[key] + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [ + (IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright"), + ]: + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = int(dpi[0] + 0.5) + ifd[Y_RESOLUTION] = int(dpi[1] + 0.5) + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode in ["P", "PA"]: + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(v * 256 for v in lut) + # data orientation + stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8) + ifd[ROWSPERSTRIP] = im.size[1] + strip_byte_counts = stride * im.size[1] + if strip_byte_counts >= 2 ** 16: + ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG + ifd[STRIPBYTECOUNTS] = strip_byte_counts + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if "quality" in im.encoderinfo: + quality = im.encoderinfo["quality"] + if not isinstance(quality, int) or quality < 0 or quality > 100: + raise ValueError("Invalid quality setting") + if compression != "jpeg": + raise ValueError( + "quality setting only supported for 'jpeg' compression" + ) + ifd[JPEGQUALITY] = quality + + logger.debug("Saving using libtiff encoder") + logger.debug("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # optional types for non core tags + types = {} + # SAMPLEFORMAT is determined by the image format and should not be copied + # from legacy_ifd. + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + # The other tags expect arrays with a certain length (fixed or depending on + # BITSPERSAMPLE, etc), passing arrays with a different length will result in + # segfaults. Block these tags until we add extra validation. + # SUBIFD may also cause a segfault. + blocklist = [ + REFERENCEBLACKWHITE, + SAMPLEFORMAT, + STRIPBYTECOUNTS, + STRIPOFFSETS, + TRANSFERFUNCTION, + SUBIFD, + ] + + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, "tag"): + legacy_ifd = im.tag.to_v2() + for tag, value in itertools.chain( + ifd.items(), getattr(im, "tag_v2", {}).items(), legacy_ifd.items() + ): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. + # Custom items are supported for int, float, unicode, string and byte + # values. Other types and tuples require a tagtype. + if tag not in TiffTags.LIBTIFF_CORE: + if not Image.core.libtiff_support_custom_tags: + continue + + if tag in ifd.tagtype: + types[tag] = ifd.tagtype[tag] + elif not (isinstance(value, (int, float, str, bytes))): + continue + else: + type = TiffTags.lookup(tag).type + if type: + types[tag] = type + if tag not in atts and tag not in blocklist: + if isinstance(value, str): + atts[tag] = value.encode("ascii", "replace") + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + logger.debug("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ("I;16B", "I;16"): + rawmode = "I;16N" + + # Pass tags as sorted list so that the tags are set in a fixed order. + # This is required by libtiff for some tags. For example, the JPEGQUALITY + # pseudo tag requires that the COMPRESS tag was already set. + tags = list(atts.items()) + tags.sort() + a = (rawmode, compression, _fp, filename, tags, types) + e = Image._getencoder(im.mode, "libtiff", a, im.encoderconfig) + e.setimage(im.im, (0, 0) + im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16 * 1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise OSError(f"encoder error {s} when writing image file") + + else: + offset = ifd.save(fp) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))] + ) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + + +class AppendingTiffWriter: + fieldSizes = [ + 0, # None + 1, # byte + 1, # ascii + 2, # short + 4, # long + 8, # rational + 1, # sbyte + 1, # undefined + 2, # sshort + 4, # slong + 8, # srational + 4, # float + 8, # double + ] + + # StripOffsets = 273 + # FreeOffsets = 288 + # TileOffsets = 324 + # JPEGQTables = 519 + # JPEGDCTables = 520 + # JPEGACTables = 521 + Tags = {273, 288, 324, 519, 520, 521} + + def __init__(self, fn, new=False): + if hasattr(fn, "read"): + self.f = fn + self.close_fp = False + else: + self.name = fn + self.close_fp = True + try: + self.f = open(fn, "w+b" if new else "r+b") + except OSError: + self.f = open(fn, "w+b") + self.beginning = self.f.tell() + self.setup() + + def setup(self): + # Reset everything. + self.f.seek(self.beginning, os.SEEK_SET) + + self.whereToWriteNewIFDOffset = None + self.offsetOfNewPage = 0 + + self.IIMM = IIMM = self.f.read(4) + if not IIMM: + # empty file - first page + self.isFirst = True + return + + self.isFirst = False + if IIMM == b"II\x2a\x00": + self.setEndian("<") + elif IIMM == b"MM\x00\x2a": + self.setEndian(">") + else: + raise RuntimeError("Invalid TIFF file header") + + self.skipIFDs() + self.goToEnd() + + def finalize(self): + if self.isFirst: + return + + # fix offsets + self.f.seek(self.offsetOfNewPage) + + IIMM = self.f.read(4) + if not IIMM: + # raise RuntimeError("nothing written into new page") + # Make it easy to finish a frame without committing to a new one. + return + + if IIMM != self.IIMM: + raise RuntimeError("IIMM of new page doesn't match IIMM of first page") + + IFDoffset = self.readLong() + IFDoffset += self.offsetOfNewPage + self.f.seek(self.whereToWriteNewIFDOffset) + self.writeLong(IFDoffset) + self.f.seek(IFDoffset) + self.fixIFD() + + def newFrame(self): + # Call this to finish a frame. + self.finalize() + self.setup() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if self.close_fp: + self.close() + return False + + def tell(self): + return self.f.tell() - self.offsetOfNewPage + + def seek(self, offset, whence=io.SEEK_SET): + if whence == os.SEEK_SET: + offset += self.offsetOfNewPage + + self.f.seek(offset, whence) + return self.tell() + + def goToEnd(self): + self.f.seek(0, os.SEEK_END) + pos = self.f.tell() + + # pad to 16 byte boundary + padBytes = 16 - pos % 16 + if 0 < padBytes < 16: + self.f.write(bytes(padBytes)) + self.offsetOfNewPage = self.f.tell() + + def setEndian(self, endian): + self.endian = endian + self.longFmt = self.endian + "L" + self.shortFmt = self.endian + "H" + self.tagFormat = self.endian + "HHL" + + def skipIFDs(self): + while True: + IFDoffset = self.readLong() + if IFDoffset == 0: + self.whereToWriteNewIFDOffset = self.f.tell() - 4 + break + + self.f.seek(IFDoffset) + numTags = self.readShort() + self.f.seek(numTags * 12, os.SEEK_CUR) + + def write(self, data): + return self.f.write(data) + + def readShort(self): + (value,) = struct.unpack(self.shortFmt, self.f.read(2)) + return value + + def readLong(self): + (value,) = struct.unpack(self.longFmt, self.f.read(4)) + return value + + def rewriteLastShortToLong(self, value): + self.f.seek(-2, os.SEEK_CUR) + bytesWritten = self.f.write(struct.pack(self.longFmt, value)) + if bytesWritten is not None and bytesWritten != 4: + raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 4") + + def rewriteLastShort(self, value): + self.f.seek(-2, os.SEEK_CUR) + bytesWritten = self.f.write(struct.pack(self.shortFmt, value)) + if bytesWritten is not None and bytesWritten != 2: + raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 2") + + def rewriteLastLong(self, value): + self.f.seek(-4, os.SEEK_CUR) + bytesWritten = self.f.write(struct.pack(self.longFmt, value)) + if bytesWritten is not None and bytesWritten != 4: + raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 4") + + def writeShort(self, value): + bytesWritten = self.f.write(struct.pack(self.shortFmt, value)) + if bytesWritten is not None and bytesWritten != 2: + raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 2") + + def writeLong(self, value): + bytesWritten = self.f.write(struct.pack(self.longFmt, value)) + if bytesWritten is not None and bytesWritten != 4: + raise RuntimeError(f"wrote only {bytesWritten} bytes but wanted 4") + + def close(self): + self.finalize() + self.f.close() + + def fixIFD(self): + numTags = self.readShort() + + for i in range(numTags): + tag, fieldType, count = struct.unpack(self.tagFormat, self.f.read(8)) + + fieldSize = self.fieldSizes[fieldType] + totalSize = fieldSize * count + isLocal = totalSize <= 4 + if not isLocal: + offset = self.readLong() + offset += self.offsetOfNewPage + self.rewriteLastLong(offset) + + if tag in self.Tags: + curPos = self.f.tell() + + if isLocal: + self.fixOffsets( + count, isShort=(fieldSize == 2), isLong=(fieldSize == 4) + ) + self.f.seek(curPos + 4) + else: + self.f.seek(offset) + self.fixOffsets( + count, isShort=(fieldSize == 2), isLong=(fieldSize == 4) + ) + self.f.seek(curPos) + + offset = curPos = None + + elif isLocal: + # skip the locally stored value that is not an offset + self.f.seek(4, os.SEEK_CUR) + + def fixOffsets(self, count, isShort=False, isLong=False): + if not isShort and not isLong: + raise RuntimeError("offset is neither short nor long") + + for i in range(count): + offset = self.readShort() if isShort else self.readLong() + offset += self.offsetOfNewPage + if isShort and offset >= 65536: + # offset is now too large - we must convert shorts to longs + if count != 1: + raise RuntimeError("not implemented") # XXX TODO + + # simple case - the offset is just one and therefore it is + # local (not referenced with another offset) + self.rewriteLastShortToLong(offset) + self.f.seek(-10, os.SEEK_CUR) + self.writeShort(TiffTags.LONG) # rewrite the type to LONG + self.f.seek(8, os.SEEK_CUR) + elif isShort: + self.rewriteLastShort(offset) + else: + self.rewriteLastLong(offset) + + +def _save_all(im, fp, filename): + encoderinfo = im.encoderinfo.copy() + encoderconfig = im.encoderconfig + append_images = list(encoderinfo.get("append_images", [])) + if not hasattr(im, "n_frames") and not append_images: + return _save(im, fp, filename) + + cur_idx = im.tell() + try: + with AppendingTiffWriter(fp) as tf: + for ims in [im] + append_images: + ims.encoderinfo = encoderinfo + ims.encoderconfig = encoderconfig + if not hasattr(ims, "n_frames"): + nfr = 1 + else: + nfr = ims.n_frames + + for idx in range(nfr): + ims.seek(idx) + ims.load() + _save(ims, tf, filename) + tf.newFrame() + finally: + im.seek(cur_idx) + + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) +Image.register_save_all(TiffImageFile.format, _save_all) + +Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"]) + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/minor_project/lib/python3.6/site-packages/PIL/TiffTags.py b/minor_project/lib/python3.6/site-packages/PIL/TiffTags.py new file mode 100644 index 0000000..796ff34 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/TiffTags.py @@ -0,0 +1,499 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None): + return super().__new__(cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + # Using get will call hash(value), which can be expensive + # for some types (e.g. Fraction). Since self.enum is rarely + # used, it's usually better to test it first. + return self.enum.get(value, value) if self.enum else value + + +def lookup(tag): + """ + :param tag: Integer tag number + :returns: Taginfo namedtuple, From the TAGS_V2 info if possible, + otherwise just populating the value and name from TAGS. + If the tag is not recognized, "unknown" is returned for the name + + """ + + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, "unknown"))) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# +# The length here differs from the length in the tiff spec. For +# numbers, the tiff spec is for the number of fields returned. We +# agree here. For string-like types, the tiff spec uses the length of +# field in bytes. In Pillow, we are using the number of expected +# fields, in general 1 for string-like types. + + +BYTE = 1 +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 +SIGNED_BYTE = 6 +UNDEFINED = 7 +SIGNED_SHORT = 8 +SIGNED_LONG = 9 +SIGNED_RATIONAL = 10 +FLOAT = 11 +DOUBLE = 12 +IFD = 13 + +TAGS_V2 = { + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ( + "Compression", + SHORT, + 1, + { + "Uncompressed": 1, + "CCITT 1d": 2, + "Group 3 Fax": 3, + "Group 4 Fax": 4, + "LZW": 5, + "JPEG": 6, + "PackBits": 32773, + }, + ), + 262: ( + "PhotometricInterpretation", + SHORT, + 1, + { + "WhiteIsZero": 0, + "BlackIsZero": 1, + "RGB": 2, + "RGB Palette": 3, + "Transparency Mask": 4, + "CMYK": 5, + "YCbCr": 6, + "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892, # Adobe DNG + }, + ), + 263: ("Threshholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellLength", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + 280: ("MinSampleValue", SHORT, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), + 297: ("PageNumber", SHORT, 2), + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticities", RATIONAL, 6), + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + 340: ("SMinSampleValue", DOUBLE, 0), + 341: ("SMaxSampleValue", DOUBLE, 0), + 342: ("TransferRange", SHORT, 6), + 347: ("JPEGTables", UNDEFINED, 1), + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", RATIONAL, 6), + 700: ("XMP", BYTE, 0), + 33432: ("Copyright", ASCII, 1), + 33723: ("IptcNaaInfo", UNDEFINED, 0), + 34377: ("PhotoshopInfo", BYTE, 0), + # FIXME add more tags here + 34665: ("ExifIFD", LONG, 1), + 34675: ("ICCProfile", UNDEFINED, 1), + 34853: ("GPSInfoIFD", LONG, 1), + # MPInfo + 45056: ("MPFVersion", UNDEFINED, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", UNDEFINED, 1), + 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), + 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), + 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), + 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), + 45579: ("YawAngle", SIGNED_RATIONAL, 1), + 45580: ("PitchAngle", SIGNED_RATIONAL, 1), + 45581: ("RollAngle", SIGNED_RATIONAL, 1), + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one + 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006 +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = { + 347: "JPEGTables", + 700: "XMP", + # Additional Exif Info + 32932: "Wang Annotation", + 33434: "ExposureTime", + 33437: "FNumber", + 33445: "MD FileTag", + 33446: "MD ScalePixel", + 33447: "MD ColorTable", + 33448: "MD LabName", + 33449: "MD SampleInfo", + 33450: "MD PrepDate", + 33451: "MD PrepTime", + 33452: "MD FileUnits", + 33550: "ModelPixelScaleTag", + 33723: "IptcNaaInfo", + 33918: "INGR Packet Data Tag", + 33919: "INGR Flag Registers", + 33920: "IrasB Transformation Matrix", + 33922: "ModelTiepointTag", + 34264: "ModelTransformationTag", + 34377: "PhotoshopInfo", + 34735: "GeoKeyDirectoryTag", + 34736: "GeoDoubleParamsTag", + 34737: "GeoAsciiParamsTag", + 34850: "ExposureProgram", + 34852: "SpectralSensitivity", + 34855: "ISOSpeedRatings", + 34856: "OECF", + 34864: "SensitivityType", + 34865: "StandardOutputSensitivity", + 34866: "RecommendedExposureIndex", + 34867: "ISOSpeed", + 34868: "ISOSpeedLatitudeyyy", + 34869: "ISOSpeedLatitudezzz", + 34908: "HylaFAX FaxRecvParams", + 34909: "HylaFAX FaxSubAddress", + 34910: "HylaFAX FaxRecvTime", + 36864: "ExifVersion", + 36867: "DateTimeOriginal", + 36868: "DateTImeDigitized", + 37121: "ComponentsConfiguration", + 37122: "CompressedBitsPerPixel", + 37724: "ImageSourceData", + 37377: "ShutterSpeedValue", + 37378: "ApertureValue", + 37379: "BrightnessValue", + 37380: "ExposureBiasValue", + 37381: "MaxApertureValue", + 37382: "SubjectDistance", + 37383: "MeteringMode", + 37384: "LightSource", + 37385: "Flash", + 37386: "FocalLength", + 37396: "SubjectArea", + 37500: "MakerNote", + 37510: "UserComment", + 37520: "SubSec", + 37521: "SubSecTimeOriginal", + 37522: "SubsecTimeDigitized", + 40960: "FlashPixVersion", + 40961: "ColorSpace", + 40962: "PixelXDimension", + 40963: "PixelYDimension", + 40964: "RelatedSoundFile", + 40965: "InteroperabilityIFD", + 41483: "FlashEnergy", + 41484: "SpatialFrequencyResponse", + 41486: "FocalPlaneXResolution", + 41487: "FocalPlaneYResolution", + 41488: "FocalPlaneResolutionUnit", + 41492: "SubjectLocation", + 41493: "ExposureIndex", + 41495: "SensingMethod", + 41728: "FileSource", + 41729: "SceneType", + 41730: "CFAPattern", + 41985: "CustomRendered", + 41986: "ExposureMode", + 41987: "WhiteBalance", + 41988: "DigitalZoomRatio", + 41989: "FocalLengthIn35mmFilm", + 41990: "SceneCaptureType", + 41991: "GainControl", + 41992: "Contrast", + 41993: "Saturation", + 41994: "Sharpness", + 41995: "DeviceSettingDescription", + 41996: "SubjectDistanceRange", + 42016: "ImageUniqueID", + 42032: "CameraOwnerName", + 42033: "BodySerialNumber", + 42034: "LensSpecification", + 42035: "LensMake", + 42036: "LensModel", + 42037: "LensSerialNumber", + 42112: "GDAL_METADATA", + 42113: "GDAL_NODATA", + 42240: "Gamma", + 50215: "Oce Scanjob Description", + 50216: "Oce Application Selector", + 50217: "Oce Identification Number", + 50218: "Oce ImageLogic Characteristics", + # Adobe DNG + 50706: "DNGVersion", + 50707: "DNGBackwardVersion", + 50708: "UniqueCameraModel", + 50709: "LocalizedCameraModel", + 50710: "CFAPlaneColor", + 50711: "CFALayout", + 50712: "LinearizationTable", + 50713: "BlackLevelRepeatDim", + 50714: "BlackLevel", + 50715: "BlackLevelDeltaH", + 50716: "BlackLevelDeltaV", + 50717: "WhiteLevel", + 50718: "DefaultScale", + 50719: "DefaultCropOrigin", + 50720: "DefaultCropSize", + 50721: "ColorMatrix1", + 50722: "ColorMatrix2", + 50723: "CameraCalibration1", + 50724: "CameraCalibration2", + 50725: "ReductionMatrix1", + 50726: "ReductionMatrix2", + 50727: "AnalogBalance", + 50728: "AsShotNeutral", + 50729: "AsShotWhiteXY", + 50730: "BaselineExposure", + 50731: "BaselineNoise", + 50732: "BaselineSharpness", + 50733: "BayerGreenSplit", + 50734: "LinearResponseLimit", + 50735: "CameraSerialNumber", + 50736: "LensInfo", + 50737: "ChromaBlurRadius", + 50738: "AntiAliasStrength", + 50740: "DNGPrivateData", + 50778: "CalibrationIlluminant1", + 50779: "CalibrationIlluminant2", + 50784: "Alias Layer Metadata", +} + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# Following pseudo-tags are also handled by default in libtiff: +# TIFFTAG_JPEGQUALITY 65537 + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +# This list also exists in encode.c +LIBTIFF_CORE = { + 255, + 256, + 257, + 258, + 259, + 262, + 263, + 266, + 274, + 277, + 278, + 280, + 281, + 340, + 341, + 282, + 283, + 284, + 286, + 287, + 296, + 297, + 321, + 320, + 338, + 32995, + 322, + 323, + 32998, + 32996, + 339, + 32997, + 330, + 531, + 530, + 301, + 532, + 333, + # as above + 269, # this has been in our tests forever, and works + 65537, +} + +LIBTIFF_CORE.remove(301) # Array of short, crashes +LIBTIFF_CORE.remove(532) # Array of long, crashes + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/minor_project/lib/python3.6/site-packages/PIL/WalImageFile.py b/minor_project/lib/python3.6/site-packages/PIL/WalImageFile.py new file mode 100644 index 0000000..b578d69 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/WalImageFile.py @@ -0,0 +1,126 @@ +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +""" +This reader is based on the specification available from: +https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +and has been tested with a few sample files found using google. + +.. note:: + This format cannot be automatically recognized, so the reader + is not registered for use with :py:func:`PIL.Image.open()`. + To open a WAL file, use the :py:func:`PIL.WalImageFile.open()` function instead. +""" + +import builtins + +from . import Image +from ._binary import i32le as i32 + + +def open(filename): + """ + Load texture from a Quake2 WAL texture file. + + By default, a Quake2 standard palette is attached to the texture. + To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method. + + :param filename: WAL file name, or an opened file handle. + :returns: An image instance. + """ + # FIXME: modify to return a WalImageFile instance instead of + # plain Image object ? + + def imopen(fp): + # read header fields + header = fp.read(32 + 24 + 32 + 12) + size = i32(header, 32), i32(header, 36) + offset = i32(header, 40) + + # load pixel data + fp.seek(offset) + + Image._decompression_bomb_check(size) + im = Image.frombytes("P", size, fp.read(size[0] * size[1])) + im.putpalette(quake2palette) + + im.format = "WAL" + im.format_description = "Quake2 Texture" + + # strings are null-terminated + im.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56 : 56 + 32].split(b"\0", 1)[0] + if next_name: + im.info["next_name"] = next_name + + return im + + if hasattr(filename, "read"): + return imopen(filename) + else: + with builtins.open(filename, "rb") as fp: + return imopen(fp) + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans Häggström + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/minor_project/lib/python3.6/site-packages/PIL/WebPImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..2e9746f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/WebPImagePlugin.py @@ -0,0 +1,351 @@ +from io import BytesIO + +from . import Image, ImageFile + +try: + from . import _webp + + SUPPORTED = True +except ImportError: + SUPPORTED = False + + +_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True} + +_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True} + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless +} + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + if is_riff_file_format and is_webp_file and is_valid_vp8_mode: + if not SUPPORTED: + return ( + "image file could not be identified because WEBP support not installed" + ) + return True + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + __loaded = 0 + __logical_frame = 0 + + def _open(self): + if not _webp.HAVE_WEBPANIM: + # Legacy mode + data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode( + self.fp.read() + ) + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + self._size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + self.n_frames = 1 + self.is_animated = False + return + + # Use the newer AnimDecoder API to parse the (possibly) animated file, + # and access muxed chunks like ICC/EXIF/XMP. + self._decoder = _webp.WebPAnimDecoder(self.fp.read()) + + # Get info from decoder + width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info() + self._size = width, height + self.info["loop"] = loop_count + bg_a, bg_r, bg_g, bg_b = ( + (bgcolor >> 24) & 0xFF, + (bgcolor >> 16) & 0xFF, + (bgcolor >> 8) & 0xFF, + bgcolor & 0xFF, + ) + self.info["background"] = (bg_r, bg_g, bg_b, bg_a) + self.n_frames = frame_count + self.is_animated = self.n_frames > 1 + self.mode = "RGB" if mode == "RGBX" else mode + self.rawmode = mode + self.tile = [] + + # Attempt to read ICC / EXIF / XMP chunks from file + icc_profile = self._decoder.get_chunk("ICCP") + exif = self._decoder.get_chunk("EXIF") + xmp = self._decoder.get_chunk("XMP ") + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + if xmp: + self.info["xmp"] = xmp + + # Initialize seek state + self._reset(reset=False) + + def _getexif(self): + if "exif" not in self.info: + return None + return dict(self.getexif()) + + def seek(self, frame): + if not self._seek_check(frame): + return + + # Set logical frame to requested position + self.__logical_frame = frame + + def _reset(self, reset=True): + if reset: + self._decoder.reset() + self.__physical_frame = 0 + self.__loaded = -1 + self.__timestamp = 0 + + def _get_next(self): + # Get next frame + ret = self._decoder.get_next() + self.__physical_frame += 1 + + # Check if an error occurred + if ret is None: + self._reset() # Reset just to be safe + self.seek(0) + raise EOFError("failed to decode next frame in WebP file") + + # Compute duration + data, timestamp = ret + duration = timestamp - self.__timestamp + self.__timestamp = timestamp + + # libwebp gives frame end, adjust to start of frame + timestamp -= duration + return data, timestamp, duration + + def _seek(self, frame): + if self.__physical_frame == frame: + return # Nothing to do + if frame < self.__physical_frame: + self._reset() # Rewind to beginning + while self.__physical_frame < frame: + self._get_next() # Advance to the requested frame + + def load(self): + if _webp.HAVE_WEBPANIM: + if self.__loaded != self.__logical_frame: + self._seek(self.__logical_frame) + + # We need to load the image data for this frame + data, timestamp, duration = self._get_next() + self.info["timestamp"] = timestamp + self.info["duration"] = duration + self.__loaded = self.__logical_frame + + # Set tile + if self.fp and self._exclusive_fp: + self.fp.close() + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)] + + return super().load() + + def tell(self): + if not _webp.HAVE_WEBPANIM: + return super().tell() + + return self.__logical_frame + + +def _save_all(im, fp, filename): + encoderinfo = im.encoderinfo.copy() + append_images = list(encoderinfo.get("append_images", [])) + + # If total frame count is 1, then save using the legacy API, which + # will preserve non-alpha modes + total = 0 + for ims in [im] + append_images: + total += getattr(ims, "n_frames", 1) + if total == 1: + _save(im, fp, filename) + return + + background = (0, 0, 0, 0) + if "background" in encoderinfo: + background = encoderinfo["background"] + elif "background" in im.info: + background = im.info["background"] + if isinstance(background, int): + # GifImagePlugin stores a global color table index in + # info["background"]. So it must be converted to an RGBA value + palette = im.getpalette() + if palette: + r, g, b = palette[background * 3 : (background + 1) * 3] + background = (r, g, b, 0) + + duration = im.encoderinfo.get("duration", 0) + loop = im.encoderinfo.get("loop", 0) + minimize_size = im.encoderinfo.get("minimize_size", False) + kmin = im.encoderinfo.get("kmin", None) + kmax = im.encoderinfo.get("kmax", None) + allow_mixed = im.encoderinfo.get("allow_mixed", False) + verbose = False + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + method = im.encoderinfo.get("method", 0) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + xmp = im.encoderinfo.get("xmp", "") + if allow_mixed: + lossless = False + + # Sensible keyframe defaults are from gif2webp.c script + if kmin is None: + kmin = 9 if lossless else 3 + if kmax is None: + kmax = 17 if lossless else 5 + + # Validate background color + if ( + not isinstance(background, (list, tuple)) + or len(background) != 4 + or not all(v >= 0 and v < 256 for v in background) + ): + raise OSError( + "Background color is not an RGBA tuple clamped to (0-255): %s" + % str(background) + ) + + # Convert to packed uint + bg_r, bg_g, bg_b, bg_a = background + background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0) + + # Setup the WebP animation encoder + enc = _webp.WebPAnimEncoder( + im.size[0], + im.size[1], + background, + loop, + minimize_size, + kmin, + kmax, + allow_mixed, + verbose, + ) + + # Add each frame + frame_idx = 0 + timestamp = 0 + cur_idx = im.tell() + try: + for ims in [im] + append_images: + # Get # of frames in this image + nfr = getattr(ims, "n_frames", 1) + + for idx in range(nfr): + ims.seek(idx) + ims.load() + + # Make sure image mode is supported + frame = ims + rawmode = ims.mode + if ims.mode not in _VALID_WEBP_MODES: + alpha = ( + "A" in ims.mode + or "a" in ims.mode + or (ims.mode == "P" and "A" in ims.im.getpalettemode()) + ) + rawmode = "RGBA" if alpha else "RGB" + frame = ims.convert(rawmode) + + if rawmode == "RGB": + # For faster conversion, use RGBX + rawmode = "RGBX" + + # Append the frame to the animation encoder + enc.add( + frame.tobytes("raw", rawmode), + timestamp, + frame.size[0], + frame.size[1], + rawmode, + lossless, + quality, + method, + ) + + # Update timestamp and frame index + if isinstance(duration, (list, tuple)): + timestamp += duration[frame_idx] + else: + timestamp += duration + frame_idx += 1 + + finally: + im.seek(cur_idx) + + # Force encoder to flush frames + enc.add(None, timestamp, 0, 0, "", lossless, quality, 0) + + # Get the final output from the encoder + data = enc.assemble(icc_profile, exif, xmp) + if data is None: + raise OSError("cannot write file as WebP (encoder returned None)") + + fp.write(data) + + +def _save(im, fp, filename): + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + if isinstance(exif, Image.Exif): + exif = exif.tobytes() + xmp = im.encoderinfo.get("xmp", "") + method = im.encoderinfo.get("method", 0) + + if im.mode not in _VALID_WEBP_LEGACY_MODES: + alpha = ( + "A" in im.mode + or "a" in im.mode + or (im.mode == "P" and "A" in im.im.getpalettemode()) + ) + im = im.convert("RGBA" if alpha else "RGB") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + method, + exif, + xmp, + ) + if data is None: + raise OSError("cannot write file as WebP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +if SUPPORTED: + Image.register_save(WebPImageFile.format, _save) + if _webp.HAVE_WEBPANIM: + Image.register_save_all(WebPImageFile.format, _save_all) + Image.register_extension(WebPImageFile.format, ".webp") + Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/minor_project/lib/python3.6/site-packages/PIL/WmfImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..87847a1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/WmfImagePlugin.py @@ -0,0 +1,178 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +# WMF/EMF reference documentation: +# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf +# http://wvware.sourceforge.net/caolan/index.html +# http://wvware.sourceforge.net/caolan/ora-wmf.html + +from . import Image, ImageFile +from ._binary import i16le as word +from ._binary import i32le as dword +from ._binary import si16le as short +from ._binary import si32le as _long + +_handler = None + + +def register_handler(handler): + """ + Install application-specific WMF image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler: + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", + im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", + "BGR", + (im.size[0] * 3 + 3) & -4, + -1, + ) + + register_handler(WmfHandler()) + +# +# -------------------------------------------------------------------- +# Read WMF file + + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + self._inch = None + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + self._inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + self.info["dpi"] = 72 + size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif dword(s) == 1 and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = _long(s, 8) + y0 = _long(s, 12) + x1 = _long(s, 16) + y1 = _long(s, 20) + + # get frame (in 0.01 millimeter units) + frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36) + + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = int(2540.0 * (x1 - y0) / (frame[2] - frame[0]) + 0.5) + ydpi = int(2540.0 * (y1 - y0) / (frame[3] - frame[1]) + 0.5) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self._size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + def load(self, dpi=None): + if dpi is not None and self._inch is not None: + self.info["dpi"] = int(dpi + 0.5) + x0, y0, x1, y1 = self.info["wmf_bbox"] + self._size = ( + (x1 - x0) * self.info["dpi"] // self._inch, + (y1 - y0) * self.info["dpi"] // self._inch, + ) + super().load() + + +def _save(im, fp, filename): + if _handler is None or not hasattr(_handler, "save"): + raise OSError("WMF save handler not installed") + _handler.save(im, fp, filename) + + +# +# -------------------------------------------------------------------- +# Registry stuff + + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"]) diff --git a/minor_project/lib/python3.6/site-packages/PIL/XVThumbImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..4efedb7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/XVThumbImagePlugin.py @@ -0,0 +1,78 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from . import Image, ImageFile, ImagePalette +from ._binary import o8 + +_MAGIC = b"P7 332" + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + ( + o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3) + ) + + +def _accept(prefix): + return prefix[:6] == _MAGIC + + +## +# Image plugin for XV thumbnail images. + + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + if not _accept(self.fp.read(6)): + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if s[0] != 35: # ie. when not a comment: '#' + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self._size = int(s[0]), int(s[1]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))] + + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/minor_project/lib/python3.6/site-packages/PIL/XbmImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..644cfb3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/XbmImagePlugin.py @@ -0,0 +1,94 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re + +from . import Image, ImageFile + +# XBM header +xbm_head = re.compile( + br"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + b"[\\000-\\377]*_bits\\[\\]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if m: + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot"))) + + self.mode = "1" + self._size = xsize, ysize + + self.tile = [("xbm", (0, 0) + self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise OSError(f"cannot write mode {im.mode} as XBM") + + fp.write(f"#define im_width {im.size[0]}\n".encode("ascii")) + fp.write(f"#define im_height {im.size[1]}\n".encode("ascii")) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii")) + fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii")) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/minor_project/lib/python3.6/site-packages/PIL/XpmImagePlugin.py b/minor_project/lib/python3.6/site-packages/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..ebd65ba --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/XpmImagePlugin.py @@ -0,0 +1,130 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from . import Image, ImageFile, ImagePalette +from ._binary import o8 + +# XPM header +xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)') + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self._size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for i in range(pal): + + s = self.fp.readline() + if s[-2:] == b"\r\n": + s = s[:-2] + elif s[-1:] in b"\r\n": + s = s[:-1] + + c = s[1] + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i + 1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[0:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = ( + o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255) + ) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1 : xsize + 1].ljust(xsize) + + return b"".join(s) + + +# +# Registry + + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/minor_project/lib/python3.6/site-packages/PIL/__init__.py b/minor_project/lib/python3.6/site-packages/PIL/__init__.py new file mode 100644 index 0000000..890ae44 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/__init__.py @@ -0,0 +1,139 @@ +"""Pillow (Fork of the Python Imaging Library) + +Pillow is the friendly PIL fork by Alex Clark and Contributors. + https://github.com/python-pillow/Pillow/ + +Pillow is forked from PIL 1.1.7. + +PIL is the Python Imaging Library by Fredrik Lundh and Contributors. +Copyright (c) 1999 by Secret Labs AB. + +Use PIL.__version__ for this Pillow version. + +;-) +""" + +import sys +import warnings + +from . import _version + +# VERSION was removed in Pillow 6.0.0. +__version__ = _version.__version__ + + +# PILLOW_VERSION is deprecated and will be removed in a future release. +# Use __version__ instead. +def _raise_version_warning(): + warnings.warn( + "PILLOW_VERSION is deprecated and will be removed in Pillow 9 (2022-01-02). " + "Use __version__ instead.", + DeprecationWarning, + stacklevel=3, + ) + + +if sys.version_info >= (3, 7): + + def __getattr__(name): + if name == "PILLOW_VERSION": + _raise_version_warning() + return __version__ + raise AttributeError(f"module '{__name__}' has no attribute '{name}'") + + +else: + + class _Deprecated_Version(str): + def __str__(self): + _raise_version_warning() + return super().__str__() + + def __getitem__(self, key): + _raise_version_warning() + return super().__getitem__(key) + + def __eq__(self, other): + _raise_version_warning() + return super().__eq__(other) + + def __ne__(self, other): + _raise_version_warning() + return super().__ne__(other) + + def __gt__(self, other): + _raise_version_warning() + return super().__gt__(other) + + def __lt__(self, other): + _raise_version_warning() + return super().__lt__(other) + + def __ge__(self, other): + _raise_version_warning() + return super().__gt__(other) + + def __le__(self, other): + _raise_version_warning() + return super().__lt__(other) + + PILLOW_VERSION = _Deprecated_Version(__version__) + +del _version + + +_plugins = [ + "BlpImagePlugin", + "BmpImagePlugin", + "BufrStubImagePlugin", + "CurImagePlugin", + "DcxImagePlugin", + "DdsImagePlugin", + "EpsImagePlugin", + "FitsStubImagePlugin", + "FliImagePlugin", + "FpxImagePlugin", + "FtexImagePlugin", + "GbrImagePlugin", + "GifImagePlugin", + "GribStubImagePlugin", + "Hdf5StubImagePlugin", + "IcnsImagePlugin", + "IcoImagePlugin", + "ImImagePlugin", + "ImtImagePlugin", + "IptcImagePlugin", + "JpegImagePlugin", + "Jpeg2KImagePlugin", + "McIdasImagePlugin", + "MicImagePlugin", + "MpegImagePlugin", + "MpoImagePlugin", + "MspImagePlugin", + "PalmImagePlugin", + "PcdImagePlugin", + "PcxImagePlugin", + "PdfImagePlugin", + "PixarImagePlugin", + "PngImagePlugin", + "PpmImagePlugin", + "PsdImagePlugin", + "SgiImagePlugin", + "SpiderImagePlugin", + "SunImagePlugin", + "TgaImagePlugin", + "TiffImagePlugin", + "WebPImagePlugin", + "WmfImagePlugin", + "XbmImagePlugin", + "XpmImagePlugin", + "XVThumbImagePlugin", +] + + +class UnidentifiedImageError(OSError): + """ + Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified. + """ + + pass diff --git a/minor_project/lib/python3.6/site-packages/PIL/__main__.py b/minor_project/lib/python3.6/site-packages/PIL/__main__.py new file mode 100644 index 0000000..a05323f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/__main__.py @@ -0,0 +1,3 @@ +from .features import pilinfo + +pilinfo() diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BdfFontFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BdfFontFile.cpython-36.pyc new file mode 100644 index 0000000..a7e45de Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BdfFontFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..be03b5b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..444af14 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..376baff Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ContainerIO.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ContainerIO.cpython-36.pyc new file mode 100644 index 0000000..db66dd3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ContainerIO.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/CurImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/CurImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..36c6035 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/CurImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..eecf83c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..eccc387 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..731f6fd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ExifTags.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ExifTags.cpython-36.pyc new file mode 100644 index 0000000..3acdd33 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ExifTags.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FitsStubImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FitsStubImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..b4fa2f5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FitsStubImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FliImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FliImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..944c3c7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FliImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FontFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FontFile.cpython-36.pyc new file mode 100644 index 0000000..eea5351 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FontFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..f036215 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..d2f00ae Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..b4c6fd3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GdImageFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GdImageFile.cpython-36.pyc new file mode 100644 index 0000000..befa6cc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GdImageFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GifImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GifImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..228bac2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GifImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GimpGradientFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GimpGradientFile.cpython-36.pyc new file mode 100644 index 0000000..22fbc8c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GimpGradientFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-36.pyc new file mode 100644 index 0000000..f97fb87 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..81c928a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..1b3006c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..f5c2f02 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..6c86036 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..2d914f4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Image.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Image.cpython-36.pyc new file mode 100644 index 0000000..11317fc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Image.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageChops.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageChops.cpython-36.pyc new file mode 100644 index 0000000..e1a97c2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageChops.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageCms.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageCms.cpython-36.pyc new file mode 100644 index 0000000..587f26a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageCms.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageColor.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageColor.cpython-36.pyc new file mode 100644 index 0000000..0d9f65f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageColor.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageDraw.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageDraw.cpython-36.pyc new file mode 100644 index 0000000..50b73be Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageDraw.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageDraw2.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageDraw2.cpython-36.pyc new file mode 100644 index 0000000..1f8c19c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageDraw2.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageEnhance.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageEnhance.cpython-36.pyc new file mode 100644 index 0000000..e443995 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageEnhance.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFile.cpython-36.pyc new file mode 100644 index 0000000..7654b29 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFilter.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFilter.cpython-36.pyc new file mode 100644 index 0000000..7699c77 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFilter.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFont.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFont.cpython-36.pyc new file mode 100644 index 0000000..5ea27a0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageFont.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageGrab.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageGrab.cpython-36.pyc new file mode 100644 index 0000000..4a5bd41 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageGrab.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMath.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMath.cpython-36.pyc new file mode 100644 index 0000000..dce3885 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMath.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMode.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMode.cpython-36.pyc new file mode 100644 index 0000000..41e0f8d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMode.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMorph.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMorph.cpython-36.pyc new file mode 100644 index 0000000..bfe79e1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageMorph.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageOps.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageOps.cpython-36.pyc new file mode 100644 index 0000000..3c3ca0e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageOps.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImagePalette.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImagePalette.cpython-36.pyc new file mode 100644 index 0000000..d8f9c33 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImagePalette.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImagePath.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImagePath.cpython-36.pyc new file mode 100644 index 0000000..1a6148d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImagePath.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageQt.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageQt.cpython-36.pyc new file mode 100644 index 0000000..d09d809 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageQt.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageSequence.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageSequence.cpython-36.pyc new file mode 100644 index 0000000..6978122 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageSequence.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageShow.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageShow.cpython-36.pyc new file mode 100644 index 0000000..044b7d2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageShow.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageStat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageStat.cpython-36.pyc new file mode 100644 index 0000000..4d802eb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageStat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageTk.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageTk.cpython-36.pyc new file mode 100644 index 0000000..c24d5e1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageTk.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageTransform.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageTransform.cpython-36.pyc new file mode 100644 index 0000000..cddc5c6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageTransform.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageWin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageWin.cpython-36.pyc new file mode 100644 index 0000000..7ffda00 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImageWin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..8ceba99 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..df3a439 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..c0ada25 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..5f2776b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/JpegPresets.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/JpegPresets.cpython-36.pyc new file mode 100644 index 0000000..8b4eddb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/JpegPresets.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..86387ff Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MicImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MicImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..3ef269a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MicImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..f0f8f5f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..15a5325 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MspImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MspImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..d8d2a4e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/MspImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PSDraw.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PSDraw.cpython-36.pyc new file mode 100644 index 0000000..3df7c9d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PSDraw.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PaletteFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PaletteFile.cpython-36.pyc new file mode 100644 index 0000000..42be410 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PaletteFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..bbf2c62 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..4183f92 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcfFontFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcfFontFile.cpython-36.pyc new file mode 100644 index 0000000..d808529 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcfFontFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..a4802d5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..3cef10d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PdfParser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PdfParser.cpython-36.pyc new file mode 100644 index 0000000..a7879e2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PdfParser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..184a7bc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PngImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PngImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..70d6387 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PngImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..b82a087 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..8596606 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PyAccess.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PyAccess.cpython-36.pyc new file mode 100644 index 0000000..65d30ec Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/PyAccess.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..739ec7b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..994b8da Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SunImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SunImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..ef37d2b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/SunImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TarIO.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TarIO.cpython-36.pyc new file mode 100644 index 0000000..04a3902 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TarIO.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..b6eb8e7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..da4a1d8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TiffTags.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TiffTags.cpython-36.pyc new file mode 100644 index 0000000..457e09e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/TiffTags.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WalImageFile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WalImageFile.cpython-36.pyc new file mode 100644 index 0000000..ecd7b16 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WalImageFile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..25ff0c0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..809157e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..4b0c3cf Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..9abd6a6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-36.pyc new file mode 100644 index 0000000..038d964 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..3b07c09 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/__main__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/__main__.cpython-36.pyc new file mode 100644 index 0000000..e105452 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/__main__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_binary.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_binary.cpython-36.pyc new file mode 100644 index 0000000..86c7a57 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_binary.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_tkinter_finder.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_tkinter_finder.cpython-36.pyc new file mode 100644 index 0000000..c6e0ec6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_tkinter_finder.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_util.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_util.cpython-36.pyc new file mode 100644 index 0000000..66088fe Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_util.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_version.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_version.cpython-36.pyc new file mode 100644 index 0000000..b51f374 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/_version.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/__pycache__/features.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/features.cpython-36.pyc new file mode 100644 index 0000000..8824529 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/__pycache__/features.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_binary.py b/minor_project/lib/python3.6/site-packages/PIL/_binary.py new file mode 100644 index 0000000..5564f45 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/_binary.py @@ -0,0 +1,92 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + + +"""Binary input/output support routines.""" + + +from struct import pack, unpack_from + + +def i8(c): + return c if c.__class__ is int else c[0] + + +def o8(i): + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an unsigned integer. + + :param c: string containing bytes to convert + :param o: offset of bytes to convert in string + """ + return unpack_from("H", c, o)[0] + + +def i32be(c, o=0): + return unpack_from(">I", c, o)[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) diff --git a/minor_project/lib/python3.6/site-packages/PIL/_imaging.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_imaging.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..29cb5ea Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_imaging.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_imagingcms.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_imagingcms.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..736c143 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_imagingcms.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_imagingft.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_imagingft.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..1fdcfcb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_imagingft.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_imagingmath.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_imagingmath.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..e1dfc6e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_imagingmath.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_imagingmorph.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_imagingmorph.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..3ce109a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_imagingmorph.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_imagingtk.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_imagingtk.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..3b073e2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_imagingtk.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_tkinter_finder.py b/minor_project/lib/python3.6/site-packages/PIL/_tkinter_finder.py new file mode 100644 index 0000000..7018a1b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/_tkinter_finder.py @@ -0,0 +1,9 @@ +""" Find compiled module linking to Tcl / Tk libraries +""" +import sys +from tkinter import _tkinter as tk + +if hasattr(sys, "pypy_find_executable"): + TKINTER_LIB = tk.tklib_cffi.__file__ +else: + TKINTER_LIB = tk.__file__ diff --git a/minor_project/lib/python3.6/site-packages/PIL/_util.py b/minor_project/lib/python3.6/site-packages/PIL/_util.py new file mode 100644 index 0000000..0c5d389 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/_util.py @@ -0,0 +1,19 @@ +import os +from pathlib import Path + + +def isPath(f): + return isinstance(f, (bytes, str, Path)) + + +# Checks if an object is a string, and that it points to a directory. +def isDirectory(f): + return isPath(f) and os.path.isdir(f) + + +class deferred_error: + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/minor_project/lib/python3.6/site-packages/PIL/_version.py b/minor_project/lib/python3.6/site-packages/PIL/_version.py new file mode 100644 index 0000000..8877e09 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/_version.py @@ -0,0 +1,2 @@ +# Master version for Pillow +__version__ = "8.1.0" diff --git a/minor_project/lib/python3.6/site-packages/PIL/_webp.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/PIL/_webp.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..dc3dc90 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/PIL/_webp.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/PIL/features.py b/minor_project/lib/python3.6/site-packages/PIL/features.py new file mode 100644 index 0000000..da0ca55 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/PIL/features.py @@ -0,0 +1,313 @@ +import collections +import os +import sys +import warnings + +import PIL + +from . import Image + +modules = { + "pil": ("PIL._imaging", "PILLOW_VERSION"), + "tkinter": ("PIL._tkinter_finder", None), + "freetype2": ("PIL._imagingft", "freetype2_version"), + "littlecms2": ("PIL._imagingcms", "littlecms_version"), + "webp": ("PIL._webp", "webpdecoder_version"), +} + + +def check_module(feature): + """ + Checks if a module is available. + + :param feature: The module to check for. + :returns: ``True`` if available, ``False`` otherwise. + :raises ValueError: If the module is not defined in this version of Pillow. + """ + if not (feature in modules): + raise ValueError(f"Unknown module {feature}") + + module, ver = modules[feature] + + try: + __import__(module) + return True + except ImportError: + return False + + +def version_module(feature): + """ + :param feature: The module to check for. + :returns: + The loaded version number as a string, or ``None`` if unknown or not available. + :raises ValueError: If the module is not defined in this version of Pillow. + """ + if not check_module(feature): + return None + + module, ver = modules[feature] + + if ver is None: + return None + + return getattr(__import__(module, fromlist=[ver]), ver) + + +def get_supported_modules(): + """ + :returns: A list of all supported modules. + """ + return [f for f in modules if check_module(f)] + + +codecs = { + "jpg": ("jpeg", "jpeglib"), + "jpg_2000": ("jpeg2k", "jp2klib"), + "zlib": ("zip", "zlib"), + "libtiff": ("libtiff", "libtiff"), +} + + +def check_codec(feature): + """ + Checks if a codec is available. + + :param feature: The codec to check for. + :returns: ``True`` if available, ``False`` otherwise. + :raises ValueError: If the codec is not defined in this version of Pillow. + """ + if feature not in codecs: + raise ValueError(f"Unknown codec {feature}") + + codec, lib = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def version_codec(feature): + """ + :param feature: The codec to check for. + :returns: + The version number as a string, or ``None`` if not available. + Checked at compile time for ``jpg``, run-time otherwise. + :raises ValueError: If the codec is not defined in this version of Pillow. + """ + if not check_codec(feature): + return None + + codec, lib = codecs[feature] + + version = getattr(Image.core, lib + "_version") + + if feature == "libtiff": + return version.split("\n")[0].split("Version ")[1] + + return version + + +def get_supported_codecs(): + """ + :returns: A list of all supported codecs. + """ + return [f for f in codecs if check_codec(f)] + + +features = { + "webp_anim": ("PIL._webp", "HAVE_WEBPANIM", None), + "webp_mux": ("PIL._webp", "HAVE_WEBPMUX", None), + "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY", None), + "raqm": ("PIL._imagingft", "HAVE_RAQM", "raqm_version"), + "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"), + "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"), + "xcb": ("PIL._imaging", "HAVE_XCB", None), +} + + +def check_feature(feature): + """ + Checks if a feature is available. + + :param feature: The feature to check for. + :returns: ``True`` if available, ``False`` if unavailable, ``None`` if unknown. + :raises ValueError: If the feature is not defined in this version of Pillow. + """ + if feature not in features: + raise ValueError(f"Unknown feature {feature}") + + module, flag, ver = features[feature] + + try: + imported_module = __import__(module, fromlist=["PIL"]) + return getattr(imported_module, flag) + except ImportError: + return None + + +def version_feature(feature): + """ + :param feature: The feature to check for. + :returns: The version number as a string, or ``None`` if not available. + :raises ValueError: If the feature is not defined in this version of Pillow. + """ + if not check_feature(feature): + return None + + module, flag, ver = features[feature] + + if ver is None: + return None + + return getattr(__import__(module, fromlist=[ver]), ver) + + +def get_supported_features(): + """ + :returns: A list of all supported features. + """ + return [f for f in features if check_feature(f)] + + +def check(feature): + """ + :param feature: A module, codec, or feature name. + :returns: + ``True`` if the module, codec, or feature is available, + ``False`` or ``None`` otherwise. + """ + + if feature in modules: + return check_module(feature) + if feature in codecs: + return check_codec(feature) + if feature in features: + return check_feature(feature) + warnings.warn(f"Unknown feature '{feature}'.", stacklevel=2) + return False + + +def version(feature): + """ + :param feature: + The module, codec, or feature to check for. + :returns: + The version number as a string, or ``None`` if unknown or not available. + """ + if feature in modules: + return version_module(feature) + if feature in codecs: + return version_codec(feature) + if feature in features: + return version_feature(feature) + return None + + +def get_supported(): + """ + :returns: A list of all supported modules, features, and codecs. + """ + + ret = get_supported_modules() + ret.extend(get_supported_features()) + ret.extend(get_supported_codecs()) + return ret + + +def pilinfo(out=None, supported_formats=True): + """ + Prints information about this installation of Pillow. + This function can be called with ``python -m PIL``. + + :param out: + The output stream to print to. Defaults to ``sys.stdout`` if ``None``. + :param supported_formats: + If ``True``, a list of all supported image file formats will be printed. + """ + + if out is None: + out = sys.stdout + + Image.init() + + print("-" * 68, file=out) + print(f"Pillow {PIL.__version__}", file=out) + py_version = sys.version.splitlines() + print(f"Python {py_version[0].strip()}", file=out) + for py_version in py_version[1:]: + print(f" {py_version.strip()}", file=out) + print("-" * 68, file=out) + print( + f"Python modules loaded from {os.path.dirname(Image.__file__)}", + file=out, + ) + print( + f"Binary modules loaded from {os.path.dirname(Image.core.__file__)}", + file=out, + ) + print("-" * 68, file=out) + + for name, feature in [ + ("pil", "PIL CORE"), + ("tkinter", "TKINTER"), + ("freetype2", "FREETYPE2"), + ("littlecms2", "LITTLECMS2"), + ("webp", "WEBP"), + ("transp_webp", "WEBP Transparency"), + ("webp_mux", "WEBPMUX"), + ("webp_anim", "WEBP Animation"), + ("jpg", "JPEG"), + ("jpg_2000", "OPENJPEG (JPEG2000)"), + ("zlib", "ZLIB (PNG/ZIP)"), + ("libtiff", "LIBTIFF"), + ("raqm", "RAQM (Bidirectional Text)"), + ("libimagequant", "LIBIMAGEQUANT (Quantization method)"), + ("xcb", "XCB (X protocol)"), + ]: + if check(name): + if name == "jpg" and check_feature("libjpeg_turbo"): + v = "libjpeg-turbo " + version_feature("libjpeg_turbo") + else: + v = version(name) + if v is not None: + version_static = name in ("pil", "jpg") + if name == "littlecms2": + # this check is also in src/_imagingcms.c:setup_module() + version_static = tuple(int(x) for x in v.split(".")) < (2, 7) + t = "compiled for" if version_static else "loaded" + print("---", feature, "support ok,", t, v, file=out) + else: + print("---", feature, "support ok", file=out) + else: + print("***", feature, "support not installed", file=out) + print("-" * 68, file=out) + + if supported_formats: + extensions = collections.defaultdict(list) + for ext, i in Image.EXTENSION.items(): + extensions[i].append(ext) + + for i in sorted(Image.ID): + line = f"{i}" + if i in Image.MIME: + line = f"{line} {Image.MIME[i]}" + print(line, file=out) + + if i in extensions: + print( + "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out + ) + + features = [] + if i in Image.OPEN: + features.append("open") + if i in Image.SAVE: + features.append("save") + if i in Image.SAVE_ALL: + features.append("save_all") + if i in Image.DECODERS: + features.append("decode") + if i in Image.ENCODERS: + features.append("encode") + + print("Features: {}".format(", ".join(features)), file=out) + print("-" * 68, file=out) diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/LICENSE b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/LICENSE new file mode 100644 index 0000000..b73e6a2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/LICENSE @@ -0,0 +1,564 @@ +The Python Imaging Library (PIL) is + + Copyright © 1997-2011 by Secret Labs AB + Copyright © 1995-2011 by Fredrik Lundh + +Pillow is the friendly PIL fork. It is + + Copyright © 2010-2021 by Alex Clark and contributors + +Like PIL, Pillow is licensed under the open source HPND License: + +By obtaining, using, and/or copying this software and/or its associated +documentation, you agree that you have read, understood, and will comply +with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its +associated documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appears in all copies, and that +both that copyright notice and this permission notice appear in supporting +documentation, and that the name of Secret Labs AB or the author not be +used in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + +---- + +FREETYPE2 + +The FreeType 2 font engine is copyrighted work and cannot be used +legally without a software license. In order to make this project +usable to a vast majority of developers, we distribute it under two +mutually exclusive open-source licenses. + +This means that *you* must choose *one* of the two licenses described +below, then obey all its terms and conditions when using FreeType 2 in +any of your projects or products. + + - The FreeType License, found in the file `FTL.TXT', which is similar + to the original BSD license *with* an advertising clause that forces + you to explicitly cite the FreeType project in your product's + documentation. All details are in the license file. This license + is suited to products which don't use the GNU General Public + License. + + Note that this license is compatible to the GNU General Public + License version 3, but not version 2. + + - The GNU General Public License version 2, found in `GPLv2.TXT' (any + later version can be used also), for programs which already use the + GPL. Note that the FTL is incompatible with GPLv2 due to its + advertisement clause. + +The contributed BDF and PCF drivers come with a license similar to that +of the X Window System. It is compatible to the above two licenses (see +file src/bdf/README and src/pcf/README). The same holds for the files +`fthash.c' and `fthash.h'; their code was part of the BDF driver in +earlier FreeType versions. + +The gzip module uses the zlib license (see src/gzip/zlib.h) which too is +compatible to the above two licenses. + +The MD5 checksum support (only used for debugging in development builds) +is in the public domain. + +---- + +LCMS2 + +Little CMS +Copyright (c) 1998-2020 Marti Maria Saguer + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---- + +LIBJPEG + +1. We don't promise that this software works. (But if you find any bugs, + please let us know!) +2. You can use this software for whatever you want. You don't have to pay us. +3. You may not pretend that you wrote this software. If you use it in a + program, you must acknowledge somewhere in your documentation that + you've used the IJG code. + +In legalese: + +The authors make NO WARRANTY or representation, either express or implied, +with respect to this software, its quality, accuracy, merchantability, or +fitness for a particular purpose. This software is provided "AS IS", and you, +its user, assume the entire risk as to its quality and accuracy. + +This software is copyright (C) 1991-2020, Thomas G. Lane, Guido Vollbeding. +All Rights Reserved except as specified below. + +Permission is hereby granted to use, copy, modify, and distribute this +software (or portions thereof) for any purpose, without fee, subject to these +conditions: +(1) If any part of the source code for this software is distributed, then this +README file must be included, with this copyright and no-warranty notice +unaltered; and any additions, deletions, or changes to the original files +must be clearly indicated in accompanying documentation. +(2) If only executable code is distributed, then the accompanying +documentation must state that "this software is based in part on the work of +the Independent JPEG Group". +(3) Permission for use of this software is granted only if the user accepts +full responsibility for any undesirable consequences; the authors accept +NO LIABILITY for damages of any kind. + +These conditions apply to any software derived from or based on the IJG code, +not just to the unmodified library. If you use our work, you ought to +acknowledge us. + +Permission is NOT granted for the use of any IJG author's name or company name +in advertising or publicity relating to this software or products derived from +it. This software may be referred to only as "the Independent JPEG Group's +software". + +We specifically permit and encourage the use of this software as the basis of +commercial products, provided that all warranty or liability claims are +assumed by the product vendor. + +---- + +LIBLZMA + +XZ Utils Licensing +================== + + Different licenses apply to different files in this package. Here + is a rough summary of which licenses apply to which parts of this + package (but check the individual files to be sure!): + + - liblzma is in the public domain. + + - xz, xzdec, and lzmadec command line tools are in the public + domain unless GNU getopt_long had to be compiled and linked + in from the lib directory. The getopt_long code is under + GNU LGPLv2.1+. + + - The scripts to grep, diff, and view compressed files have been + adapted from gzip. These scripts and their documentation are + under GNU GPLv2+. + + - All the documentation in the doc directory and most of the + XZ Utils specific documentation files in other directories + are in the public domain. + + - Translated messages are in the public domain. + + - The build system contains public domain files, and files that + are under GNU GPLv2+ or GNU GPLv3+. None of these files end up + in the binaries being built. + + - Test files and test code in the tests directory, and debugging + utilities in the debug directory are in the public domain. + + - The extra directory may contain public domain files, and files + that are under various free software licenses. + + You can do whatever you want with the files that have been put into + the public domain. If you find public domain legally problematic, + take the previous sentence as a license grant. If you still find + the lack of copyright legally problematic, you have too many + lawyers. + + As usual, this software is provided "as is", without any warranty. + + If you copy significant amounts of public domain code from XZ Utils + into your project, acknowledging this somewhere in your software is + polite (especially if it is proprietary, non-free software), but + naturally it is not legally required. Here is an example of a good + notice to put into "about box" or into documentation: + + This software includes code from XZ Utils . + + The following license texts are included in the following files: + - COPYING.LGPLv2.1: GNU Lesser General Public License version 2.1 + - COPYING.GPLv2: GNU General Public License version 2 + - COPYING.GPLv3: GNU General Public License version 3 + + Note that the toolchain (compiler, linker etc.) may add some code + pieces that are copyrighted. Thus, it is possible that e.g. liblzma + binary wouldn't actually be in the public domain in its entirety + even though it contains no copyrighted code from the XZ Utils source + package. + + If you have questions, don't hesitate to ask the author(s) for more + information. + +---- + +LIBTIFF + +Copyright (c) 1988-1997 Sam Leffler +Copyright (c) 1991-1997 Silicon Graphics, Inc. + +Permission to use, copy, modify, distribute, and sell this software and +its documentation for any purpose is hereby granted without fee, provided +that (i) the above copyright notices and this permission notice appear in +all copies of the software and related documentation, and (ii) the names of +Sam Leffler and Silicon Graphics may not be used in any advertising or +publicity relating to the software without the specific, prior written +permission of Sam Leffler and Silicon Graphics. + +THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, +EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY +WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + +IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR +ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF +LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +OF THIS SOFTWARE. + +---- + +LIBWEBP + +Copyright (c) 2010, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of Google nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---- + +OPENJPEG + +* + * The copyright in this software is being made available under the 2-clauses + * BSD License, included below. This software may be subject to other third + * party and contributor rights, including patent rights, and no such rights + * are granted under this license. + * + * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium + * Copyright (c) 2002-2014, Professor Benoit Macq + * Copyright (c) 2003-2014, Antonin Descampe + * Copyright (c) 2003-2009, Francois-Olivier Devaux + * Copyright (c) 2005, Herve Drolon, FreeImage Team + * Copyright (c) 2002-2003, Yannick Verschueren + * Copyright (c) 2001-2003, David Janssens + * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France + * Copyright (c) 2012, CS Systemes d'Information, France + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +--- + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE +========================================= + +PNG Reference Library License version 2 +--------------------------------------- + + * Copyright (c) 1995-2019 The PNG Reference Library Authors. + * Copyright (c) 2018-2019 Cosmin Truta. + * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson. + * Copyright (c) 1996-1997 Andreas Dilger. + * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +The software is supplied "as is", without warranty of any kind, +express or implied, including, without limitation, the warranties +of merchantability, fitness for a particular purpose, title, and +non-infringement. In no event shall the Copyright owners, or +anyone distributing the software, be liable for any damages or +other liability, whether in contract, tort or otherwise, arising +from, out of, or in connection with the software, or the use or +other dealings in the software, even if advised of the possibility +of such damage. + +Permission is hereby granted to use, copy, modify, and distribute +this software, or portions hereof, for any purpose, without fee, +subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you + must not claim that you wrote the original software. If you + use this software in a product, an acknowledgment in the product + documentation would be appreciated, but is not required. + + 2. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + + +PNG Reference Library License version 1 (for libpng 0.5 through 1.6.35) +----------------------------------------------------------------------- + +libpng versions 1.0.7, July 1, 2000, through 1.6.35, July 15, 2018 are +Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson, are +derived from libpng-1.0.6, and are distributed according to the same +disclaimer and license as libpng-1.0.6 with the following individuals +added to the list of Contributing Authors: + + Simon-Pierre Cadieux + Eric S. Raymond + Mans Rullgard + Cosmin Truta + Gilles Vollant + James Yu + Mandar Sahastrabuddhe + Google Inc. + Vadim Barkov + +and with the following additions to the disclaimer: + + There is no warranty against interference with your enjoyment of + the library or against infringement. There is no warranty that our + efforts or the library will fulfill any of your particular purposes + or needs. This library is provided with all faults, and the entire + risk of satisfactory quality, performance, accuracy, and effort is + with the user. + +Some files in the "contrib" directory and some configure-generated +files that are distributed with libpng have other copyright owners, and +are released under other open source licenses. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are +Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from +libpng-0.96, and are distributed according to the same disclaimer and +license as libpng-0.96, with the following individuals added to the +list of Contributing Authors: + + Tom Lane + Glenn Randers-Pehrson + Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are +Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88, +and are distributed according to the same disclaimer and license as +libpng-0.88, with the following individuals added to the list of +Contributing Authors: + + John Bowler + Kevin Bracey + Sam Bushell + Magnus Holmgren + Greg Roelofs + Tom Tanner + +Some files in the "scripts" directory have other copyright owners, +but are released under this license. + +libpng versions 0.5, May 1995, through 0.88, January 1996, are +Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" +is defined as the following set of individuals: + + Andreas Dilger + Dave Martindale + Guy Eric Schalnat + Paul Schmidt + Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing +Authors and Group 42, Inc. disclaim all warranties, expressed or +implied, including, without limitation, the warranties of +merchantability and of fitness for any purpose. The Contributing +Authors and Group 42, Inc. assume no liability for direct, indirect, +incidental, special, exemplary, or consequential damages, which may +result from the use of the PNG Reference Library, even if advised of +the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this +source code, or portions hereof, for any purpose, without fee, subject +to the following restrictions: + + 1. The origin of this source code must not be misrepresented. + + 2. Altered versions must be plainly marked as such and must not + be misrepresented as being the original source. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + +The Contributing Authors and Group 42, Inc. specifically permit, +without fee, and encourage the use of this source code as a component +to supporting the PNG file format in commercial products. If you use +this source code in a product, acknowledgment is not required but would +be appreciated. + +---- + +XAU + +Copyright 1988, 1993, 1994, 1998 The Open Group + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of The Open Group shall not be +used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization from The Open Group. + +---- + +XCB + +Copyright (C) 2001-2006 Bart Massey, Jamey Sharp, and Josh Triplett. +All Rights Reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, +sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall +be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the names of the authors +or their institutions shall not be used in advertising or +otherwise to promote the sale, use or other dealings in this +Software without prior written authorization from the +authors. + +---- + +XDMCP + +Copyright 1989, 1998 The Open Group + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of The Open Group shall not be +used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization from The Open Group. + +Author: Keith Packard, MIT X Consortium + +---- + +ZLIB + + (C) 1995-2017 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + +If you use the zlib library in a product, we would appreciate *not* receiving +lengthy legal documents to sign. The sources are provided for free but without +warranty of any kind. The library has been entirely written by Jean-loup +Gailly and Mark Adler; it does not include third-party code. + +If you redistribute modified sources, we would appreciate that you include in +the file ChangeLog history information documenting your changes. Please read +the FAQ for more information on the distribution of modified source versions. \ No newline at end of file diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/METADATA new file mode 100644 index 0000000..7e86d06 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/METADATA @@ -0,0 +1,137 @@ +Metadata-Version: 2.1 +Name: Pillow +Version: 8.1.0 +Summary: Python Imaging Library (Fork) +Home-page: https://python-pillow.org +Author: Alex Clark (PIL Fork Author) +Author-email: aclark@python-pillow.org +License: HPND +Project-URL: Documentation, https://pillow.readthedocs.io +Project-URL: Source, https://github.com/python-pillow/Pillow +Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi +Project-URL: Release notes, https://pillow.readthedocs.io/en/stable/releasenotes/index.html +Project-URL: Changelog, https://github.com/python-pillow/Pillow/blob/master/CHANGES.rst +Keywords: Imaging +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND) +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Classifier: Topic :: Multimedia :: Graphics :: Viewers +Requires-Python: >=3.6 +Description-Content-Type: text/markdown + +

+ Pillow logo +

+ +# Pillow + +## Python Imaging Library (Fork) + +Pillow is the friendly PIL fork by [Alex Clark and +Contributors](https://github.com/python-pillow/Pillow/graphs/contributors). +PIL is the Python Imaging Library by Fredrik Lundh and Contributors. +As of 2019, Pillow development is +[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise). + + + + + + + + + + + + + + + + + + +
docs + Documentation Status +
tests + GitHub Actions build status (Lint) + GitHub Actions build status (Test Linux and macOS) + GitHub Actions build status (Test Windows) + GitHub Actions build status (Test Docker) + AppVeyor CI build status (Windows) + Travis CI build status (macOS) + Code coverage +
package + Zenodo + Tidelift + Newest PyPI version + Number of PyPI downloads +
social + Join the chat at https://gitter.im/python-pillow/Pillow + Follow on https://twitter.com/PythonPillow +
+ +## Overview + +The Python Imaging Library adds image processing capabilities to your Python interpreter. + +This library provides extensive file format support, an efficient internal representation, and fairly powerful image processing capabilities. + +The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool. + +## More Information + +- [Documentation](https://pillow.readthedocs.io/) + - [Installation](https://pillow.readthedocs.io/en/latest/installation.html) + - [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html) +- [Contribute](https://github.com/python-pillow/Pillow/blob/master/.github/CONTRIBUTING.md) + - [Issues](https://github.com/python-pillow/Pillow/issues) + - [Pull requests](https://github.com/python-pillow/Pillow/pulls) +- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html) +- [Changelog](https://github.com/python-pillow/Pillow/blob/master/CHANGES.rst) + - [Pre-fork](https://github.com/python-pillow/Pillow/blob/master/CHANGES.rst#pre-fork) + +## Report a Vulnerability + +To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security). + + diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/RECORD new file mode 100644 index 0000000..1fd4190 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/RECORD @@ -0,0 +1,212 @@ +PIL/BdfFontFile.py,sha256=hRnSgFZOIiTgWfJIaRHRQpU4TKVok2E31KJY6sbZPwc,2817 +PIL/BlpImagePlugin.py,sha256=B3Hu6Uc8UMddeiKIKQ49h_pikTyBXHRChjk47XMXHfI,14338 +PIL/BmpImagePlugin.py,sha256=JNcxnXmnv4s_TuFKOqqrjsVHC6cyVSN2BiA2fTiY7Rg,14164 +PIL/BufrStubImagePlugin.py,sha256=Zq60GwcqQJTmZJrA9EQq94QvYpNqwYvQzHojh4U7SDw,1520 +PIL/ContainerIO.py,sha256=1U15zUXjWO8uWK-MyCp66Eh7djQEU-oUeCDoBqewNkA,2883 +PIL/CurImagePlugin.py,sha256=er_bI3V1Ezly0QfFJq0fZMlGwrD5izDutwF1FrOwiMA,1679 +PIL/DcxImagePlugin.py,sha256=bfESLTji9GerqI4oYsy5oTFyRMlr2mjSsXzpY9IuLsk,2145 +PIL/DdsImagePlugin.py,sha256=DxDAfsdOvcjyLvMwbcZWLbYBxNv2sxNUOZ5dlOPOPhQ,6009 +PIL/EpsImagePlugin.py,sha256=12udC0cavBCfJ0hfv3nDrn8HHxNIAzQ9nVbGQmRsCaM,12096 +PIL/ExifTags.py,sha256=fx7S0CnztT9ptHT2HGuMYteI99CMVrD73IHeRI5OFjU,9009 +PIL/FitsStubImagePlugin.py,sha256=8Zq2D9ReJE-stBppxB_ELX3wxcS0_BDGg6Xce7sWpaU,1624 +PIL/FliImagePlugin.py,sha256=pGeC1JI6d5xdYWRhsKz0_3yeFzGII_jYbQhJYNo6n7Y,4260 +PIL/FontFile.py,sha256=LkQcbwUu1C4fokMnbg-ao9ksp2RX-saaPRie-z2rpH4,2765 +PIL/FpxImagePlugin.py,sha256=nKGioxa5C0q9X9qva3t_htRV_3jXQcFkclVxTEaSusk,6658 +PIL/FtexImagePlugin.py,sha256=d5xy9hZ6vzmzfTS7eUM_LaB8NHgV6izj1VKTBHOfFEQ,3309 +PIL/GbrImagePlugin.py,sha256=u9kOIdBxYMRrXfXfIwGcz0uyvvxNRCwO3U1xcfa51T4,2794 +PIL/GdImageFile.py,sha256=JFWSUssG1z1r884GQtBbZ3T7uhPF4cDXSuW3ctgf3TU,2465 +PIL/GifImagePlugin.py,sha256=c0OnvrO4R74seBRjSDgWyfomkGf29RrhAO9Ljd7A-7o,28917 +PIL/GimpGradientFile.py,sha256=G0ClRmjRHIJoU0nmG-P-tgehLHZip5i0rY4-5pjJ7bc,3353 +PIL/GimpPaletteFile.py,sha256=_wWvNmB40AfQ1M5sTxoYYXOMApWQji7rrubqZhfd1dU,1274 +PIL/GribStubImagePlugin.py,sha256=sSBrTisTcunuC0WcSQ4_55nV6uFvLCQ0JLSd62dgURw,1515 +PIL/Hdf5StubImagePlugin.py,sha256=zjtFPZIcVkWXvYRPnHow6XA9kElEi772w7PFSuEqmq4,1517 +PIL/IcnsImagePlugin.py,sha256=S_TEcoc96xvum6IgN0C5J2bIuk7kzPyBbwjkdAclEew,11684 +PIL/IcoImagePlugin.py,sha256=FW0hQQknL2mQz8kpKKGuFRQGXa13oco6VxRQ0mV1RDc,10287 +PIL/ImImagePlugin.py,sha256=RFFyRlFJTVuti-TZ9yWsqP7vJJydgX1MC6mjYwwdw-0,10729 +PIL/Image.py,sha256=WyxivptE6qQtik2pLO1201eQ3CGJojxFFoVNZU00KYE,116624 +PIL/ImageChops.py,sha256=HOGSnuU4EcCbdeUzEGPm54zewppHWWe12XLyOLLPgCw,7297 +PIL/ImageCms.py,sha256=NZs-joebSCHg2J0fKXASLNgiXl7FRfWmUn-IW7AkonQ,37087 +PIL/ImageColor.py,sha256=zUDJ9l_gRzB2Xb5JX5lfDhOdsL8l7hmLte7YEmVHI3s,8638 +PIL/ImageDraw.py,sha256=lVcv9yoadOXjcV9PaaYrMpBiP9_j2Y-GBayaPd-YZJQ,30660 +PIL/ImageDraw2.py,sha256=oBhpBTZhx3bd4D0s8E2kDjBzgThRkDU_TE_987l501k,5019 +PIL/ImageEnhance.py,sha256=CJnCouiBmxN2fE0xW7m_uMdBqcm-Fp0S3ruHhkygal4,3190 +PIL/ImageFile.py,sha256=Oe8P0zXRFcACneJqwkNSLrnZnnyIBzpE1nxIjmMcJvo,21246 +PIL/ImageFilter.py,sha256=3zaG3rvTfdox6zKcXq3sbHHwEfl7E1jJd5_KTirH2hw,15827 +PIL/ImageFont.py,sha256=zYN6axJ_u7Jt62bH0o3SaEO6Cxwk79PMUn5FlWY0S7s,45202 +PIL/ImageGrab.py,sha256=2o1aA0_vP-KeRJsJtIxYhi61yCK4k_Khh6NHQD7HO2Q,3625 +PIL/ImageMath.py,sha256=iQPtbXgdhcCchGTXbDop7AiI_Fe-fNmq8m1YHsHMBgc,7048 +PIL/ImageMode.py,sha256=gI88wDgAc4y-m46vTA4zPmipG12wpYLNXPRHyPZBZaY,1638 +PIL/ImageMorph.py,sha256=TM0-barsZdbHEyQ_wB2SrdZvJBkRnWnUbDzGVzDECL4,7854 +PIL/ImageOps.py,sha256=8n-F_HEVRt2l7SENEoiSHpWKn6EehLT0tYfZKKCGB7I,18462 +PIL/ImagePalette.py,sha256=ZjkQry8gfuET-QmG8P18UcZlttKUQjZUgQ3EKh3E6Js,6350 +PIL/ImagePath.py,sha256=lVmH1-lCd0SyrFoqyhlstAFW2iJuC14fPcW8iewvxCQ,336 +PIL/ImageQt.py,sha256=xAXlhkwENjox8CcQYKEq9CJjNk8bzpIH8C_XsLBSDzM,5994 +PIL/ImageSequence.py,sha256=3djA7vDH6wafTGbt4e_lPlVhy2TaKfdSrA1XQ4n-Uoc,1850 +PIL/ImageShow.py,sha256=Df6u8oiv8O4VBmOyfqj53AJAN3kiWsOb97xJaevj8XM,6295 +PIL/ImageStat.py,sha256=PieQi44mRHE6jod7NqujwGr6WCntuZuNGmC2z9PaoDY,3901 +PIL/ImageTk.py,sha256=rLPqAnLH61y2XRHgRPUdesYLQqnDQ__LeRK66KL_fPQ,9324 +PIL/ImageTransform.py,sha256=V2l6tsjmymMIF7HQBMI21UPn4mlicarrm4NF3Kazvio,2843 +PIL/ImageWin.py,sha256=1MQBJS7tVrQzI9jN0nmeNeFpIaq8fXra9kQocHkiFxM,7191 +PIL/ImtImagePlugin.py,sha256=cn60lqUVnK2oh_sPqPBORr_rZ4zuF_6FU0V96IAh8Ww,2203 +PIL/IptcImagePlugin.py,sha256=-RZBUUodHcF5wLKanW1MxJj7cbLOpx5LvXqm0vDM22U,5714 +PIL/Jpeg2KImagePlugin.py,sha256=3NAbqBmvSU_fHUIGspXFsVQV7uYMydN2Rj8jP2bGdiA,8722 +PIL/JpegImagePlugin.py,sha256=cpE4tQ3IqdtP3__RjZDLOEygwoFnska6MgbL8o6NXuI,27740 +PIL/JpegPresets.py,sha256=6gYstS6ZLaE0ENw7qcz1vlNtF2HMGSKx5Sm-KfKKCJ0,12709 +PIL/McIdasImagePlugin.py,sha256=LrP5nA7l8IQG3WhlMI0Xs8fGXY_uf6IDmzNCERl3tGw,1754 +PIL/MicImagePlugin.py,sha256=t8iqakHjOilWVEOrjTISN2-ctxkTYSZgzmtxf4ufrfg,2606 +PIL/MpegImagePlugin.py,sha256=n16Zgdy8Hcfke16lQwZWs53PZq4BA_OxPCMPDkW62nw,1803 +PIL/MpoImagePlugin.py,sha256=lbBbUp-o6xVnfaX3sQYpd7RN4-5-KHcbwi0Km2vN0eg,4244 +PIL/MspImagePlugin.py,sha256=Rjs-Vw2v1RtdP0V3RS6cf_1edF9FIpn9fYApatwLXXM,5524 +PIL/PSDraw.py,sha256=caJ_uayWqTlk0EhPSKTUOevVooEfTV1ny3jw2PtteoI,6670 +PIL/PaletteFile.py,sha256=s3KtsDuY5S04MKDyiXK3iIbiOGzV9PvCDUpOQHI7yqc,1106 +PIL/PalmImagePlugin.py,sha256=lTVwwSPFrQ-IPFGU8_gRCMZ1Lb73cuVhQ-nkx1Q0oqc,9108 +PIL/PcdImagePlugin.py,sha256=cnBm_xKcpLGT6hZ8QKai9Up0gZERMxZwhDXl1hQtBm0,1476 +PIL/PcfFontFile.py,sha256=njhgblsjSVcITVz1DpWdEligmJgPMh5nTk_zDDWWTik,6348 +PIL/PcxImagePlugin.py,sha256=KVYbCKGGp2DcsYwZSv3YYo3GrcxNTQAniZKlzKvHowg,5682 +PIL/PdfImagePlugin.py,sha256=H2zXDGd_he0MO411T_yREpn9IoA2nifXzaRn3WBGYqk,7665 +PIL/PdfParser.py,sha256=QIWh2AqFbdTM52XNOcf-nHFjLlk8mDatb5_qdpO2v2U,34385 +PIL/PixarImagePlugin.py,sha256=5MMcrrShVr511QKevK1ziKyJn0WllokWQxBhs8NWttY,1631 +PIL/PngImagePlugin.py,sha256=S7cPWcRuk98HDquNoBFIKEBJQ_5jmgaSgRIdWL-LIQs,43885 +PIL/PpmImagePlugin.py,sha256=UNwCp3h7psEK8i0p3P93VVXUBz9_8tUVzUWsITux6HQ,4447 +PIL/PsdImagePlugin.py,sha256=OSfBzyxW2_DclFj6k7ElqERo9oFIBrJ5KH6vRhcDjw4,7725 +PIL/PyAccess.py,sha256=U_N4WB6yg_qpWKo1X7avE98p6Ve3bqqnWOGX6DeyE4U,9592 +PIL/SgiImagePlugin.py,sha256=fdY5GOfjLgGVV5nvZ9gGomYboQ0-uPqyosDAU5M9eeU,6064 +PIL/SpiderImagePlugin.py,sha256=1m1xCZ2S7i2w4f-Tz2FSNkqUzqqZzYaZetKXevmsx6Y,9534 +PIL/SunImagePlugin.py,sha256=bnjnVFRjvApCH1QC1F9HeynoCe5AZk3wa1tOhPvHzKU,4282 +PIL/TarIO.py,sha256=E_pjAxk9wHezXUuR_99liySBXfJoL2wjzdNDf0g1hTo,1440 +PIL/TgaImagePlugin.py,sha256=UmGHaYcHHz3V1T87ZfFNR5TvP1QnQ1QG_EfuJPLmDpw,6277 +PIL/TiffImagePlugin.py,sha256=2RhRdr-ZeaZibTSW5jg-bWTgznQDhoQRwPtvpx4LWIg,68529 +PIL/TiffTags.py,sha256=ZAu3cTEiWLV5QkVFSzpY8LDdXv_RPhQoSa4XS8PYVmE,14560 +PIL/WalImageFile.py,sha256=Mfwtpwi-CgRKGORZbdc35uVG0XdelIEIafmtzh0aTKw,5531 +PIL/WebPImagePlugin.py,sha256=RKHtxgrWjuxN1fQF8AX0ckl3cEJkltOiKJrHYh--gk4,10795 +PIL/WmfImagePlugin.py,sha256=Ht5JppC4GZiYz8GNaww4IXEXTJkSQK7h-A2tt4AEvSI,4672 +PIL/XVThumbImagePlugin.py,sha256=zmZ8Z4B8Kr6NOdUqSipW9_X5mKiLBLs-wxvPRRg1l0M,1940 +PIL/XbmImagePlugin.py,sha256=oIEt_uqwKKU6lLS_IVFwEjotwE1FI4_IHUnx_6Ul_gk,2430 +PIL/XpmImagePlugin.py,sha256=1EBt-g678p0A0NXOkxq7sGM8dymneDMHHQmwJzAbrlw,3062 +PIL/__init__.py,sha256=NnlpBykSA7dIeA6k7aHKD2ikvrCKhpieYVv7UieVoyk,3260 +PIL/__main__.py,sha256=axR7PO-HtXp-o0rBhKIxs0wark0rBfaDIhAIWqtWUo4,41 +PIL/__pycache__/BdfFontFile.cpython-36.pyc,, +PIL/__pycache__/BlpImagePlugin.cpython-36.pyc,, +PIL/__pycache__/BmpImagePlugin.cpython-36.pyc,, +PIL/__pycache__/BufrStubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/ContainerIO.cpython-36.pyc,, +PIL/__pycache__/CurImagePlugin.cpython-36.pyc,, +PIL/__pycache__/DcxImagePlugin.cpython-36.pyc,, +PIL/__pycache__/DdsImagePlugin.cpython-36.pyc,, +PIL/__pycache__/EpsImagePlugin.cpython-36.pyc,, +PIL/__pycache__/ExifTags.cpython-36.pyc,, +PIL/__pycache__/FitsStubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/FliImagePlugin.cpython-36.pyc,, +PIL/__pycache__/FontFile.cpython-36.pyc,, +PIL/__pycache__/FpxImagePlugin.cpython-36.pyc,, +PIL/__pycache__/FtexImagePlugin.cpython-36.pyc,, +PIL/__pycache__/GbrImagePlugin.cpython-36.pyc,, +PIL/__pycache__/GdImageFile.cpython-36.pyc,, +PIL/__pycache__/GifImagePlugin.cpython-36.pyc,, +PIL/__pycache__/GimpGradientFile.cpython-36.pyc,, +PIL/__pycache__/GimpPaletteFile.cpython-36.pyc,, +PIL/__pycache__/GribStubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/Hdf5StubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/IcnsImagePlugin.cpython-36.pyc,, +PIL/__pycache__/IcoImagePlugin.cpython-36.pyc,, +PIL/__pycache__/ImImagePlugin.cpython-36.pyc,, +PIL/__pycache__/Image.cpython-36.pyc,, +PIL/__pycache__/ImageChops.cpython-36.pyc,, +PIL/__pycache__/ImageCms.cpython-36.pyc,, +PIL/__pycache__/ImageColor.cpython-36.pyc,, +PIL/__pycache__/ImageDraw.cpython-36.pyc,, +PIL/__pycache__/ImageDraw2.cpython-36.pyc,, +PIL/__pycache__/ImageEnhance.cpython-36.pyc,, +PIL/__pycache__/ImageFile.cpython-36.pyc,, +PIL/__pycache__/ImageFilter.cpython-36.pyc,, +PIL/__pycache__/ImageFont.cpython-36.pyc,, +PIL/__pycache__/ImageGrab.cpython-36.pyc,, +PIL/__pycache__/ImageMath.cpython-36.pyc,, +PIL/__pycache__/ImageMode.cpython-36.pyc,, +PIL/__pycache__/ImageMorph.cpython-36.pyc,, +PIL/__pycache__/ImageOps.cpython-36.pyc,, +PIL/__pycache__/ImagePalette.cpython-36.pyc,, +PIL/__pycache__/ImagePath.cpython-36.pyc,, +PIL/__pycache__/ImageQt.cpython-36.pyc,, +PIL/__pycache__/ImageSequence.cpython-36.pyc,, +PIL/__pycache__/ImageShow.cpython-36.pyc,, +PIL/__pycache__/ImageStat.cpython-36.pyc,, +PIL/__pycache__/ImageTk.cpython-36.pyc,, +PIL/__pycache__/ImageTransform.cpython-36.pyc,, +PIL/__pycache__/ImageWin.cpython-36.pyc,, +PIL/__pycache__/ImtImagePlugin.cpython-36.pyc,, +PIL/__pycache__/IptcImagePlugin.cpython-36.pyc,, +PIL/__pycache__/Jpeg2KImagePlugin.cpython-36.pyc,, +PIL/__pycache__/JpegImagePlugin.cpython-36.pyc,, +PIL/__pycache__/JpegPresets.cpython-36.pyc,, +PIL/__pycache__/McIdasImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MicImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MpegImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MpoImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MspImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PSDraw.cpython-36.pyc,, +PIL/__pycache__/PaletteFile.cpython-36.pyc,, +PIL/__pycache__/PalmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PcdImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PcfFontFile.cpython-36.pyc,, +PIL/__pycache__/PcxImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PdfImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PdfParser.cpython-36.pyc,, +PIL/__pycache__/PixarImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PngImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PpmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PsdImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PyAccess.cpython-36.pyc,, +PIL/__pycache__/SgiImagePlugin.cpython-36.pyc,, +PIL/__pycache__/SpiderImagePlugin.cpython-36.pyc,, +PIL/__pycache__/SunImagePlugin.cpython-36.pyc,, +PIL/__pycache__/TarIO.cpython-36.pyc,, +PIL/__pycache__/TgaImagePlugin.cpython-36.pyc,, +PIL/__pycache__/TiffImagePlugin.cpython-36.pyc,, +PIL/__pycache__/TiffTags.cpython-36.pyc,, +PIL/__pycache__/WalImageFile.cpython-36.pyc,, +PIL/__pycache__/WebPImagePlugin.cpython-36.pyc,, +PIL/__pycache__/WmfImagePlugin.cpython-36.pyc,, +PIL/__pycache__/XVThumbImagePlugin.cpython-36.pyc,, +PIL/__pycache__/XbmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/XpmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/__init__.cpython-36.pyc,, +PIL/__pycache__/__main__.cpython-36.pyc,, +PIL/__pycache__/_binary.cpython-36.pyc,, +PIL/__pycache__/_tkinter_finder.cpython-36.pyc,, +PIL/__pycache__/_util.cpython-36.pyc,, +PIL/__pycache__/_version.cpython-36.pyc,, +PIL/__pycache__/features.cpython-36.pyc,, +PIL/_binary.py,sha256=M_yObPVR_1rxnS5craSJsSbFJMykMYqJ0vNHeUpAmj4,1793 +PIL/_imaging.cpython-36m-x86_64-linux-gnu.so,sha256=A4mnC8iJJ88UuzR4nV6uWzsgZpWAsyPEao1UbURUVuk,665760 +PIL/_imagingcms.cpython-36m-x86_64-linux-gnu.so,sha256=yWUPn-IDifMUhAjTiXGk7RWnmwJfiooLlO8-elW-QeU,38968 +PIL/_imagingft.cpython-36m-x86_64-linux-gnu.so,sha256=QGTezI7PcPcdlBbPIIF7PqkA2db_B3jEdYhjpaIoTB8,42992 +PIL/_imagingmath.cpython-36m-x86_64-linux-gnu.so,sha256=fOmzz-Fi1CvLI2EE4DiFTUcMBHZkfaHS38GPOCdV6Ag,25016 +PIL/_imagingmorph.cpython-36m-x86_64-linux-gnu.so,sha256=Izb-P0sfXL7w79aeMs_rmFrtXSH3MHppiX5ueItw2oI,8312 +PIL/_imagingtk.cpython-36m-x86_64-linux-gnu.so,sha256=1QJfW4DzP9u_oe93mw2tQY3qk2C9fb2Gqjntx3p62ZQ,9592 +PIL/_tkinter_finder.py,sha256=pWHz4HAUfRlF12YrGMXIZM1evllpRHq2kRAxD43hFmk,224 +PIL/_util.py,sha256=pbjX5KY1W2oZyYVC4TE9ai2PfrJZrAsO5hAnz_JMees,359 +PIL/_version.py,sha256=tpf8Y4_80o_PwzIvbXt9LqzFIeSJMY8z5JU3xcKjcts,50 +PIL/_webp.cpython-36m-x86_64-linux-gnu.so,sha256=gOgL36oImTFwYUN34OUVILJghJxwwqzRTaHqq_CutYk,41800 +PIL/features.py,sha256=sM5E4oDEBqAZ3kHxVRTG7drsuJth_GPRxIUbCDqGHAk,9008 +Pillow-8.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Pillow-8.1.0.dist-info/LICENSE,sha256=KaHC87Q1Qt5XsxBpy0TNb5wrUnV82TvczN-Dy54mytU,24628 +Pillow-8.1.0.dist-info/METADATA,sha256=hTCG0-G76l3R-nEqmsmiCbT2SSmx7q2juo4SZBCWtWQ,7064 +Pillow-8.1.0.dist-info/RECORD,, +Pillow-8.1.0.dist-info/WHEEL,sha256=IkZiJFiZiRavDHfaLV1hdVPFYe1HJNLQ8dnz1DVQgHk,109 +Pillow-8.1.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4 +Pillow-8.1.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +Pillow.libs/libXau-312dbc56.so.6.0.0,sha256=pkazO4sSozRX0yzzcJGgLNmc1orwSlFB4wuDxG8Mtwo,12848 +Pillow.libs/libXdmcp-e15573e7.so.6.0.0,sha256=s9RVRC2L1UOO2Y67NE2LshHgoJ6B13dNTHYog-opS_I,21912 +Pillow.libs/libfreetype-6ad068c6.so.6.17.4,sha256=4-bW6TaGH0TJm6dR9jX9LQWcyKpozyg9FX6nTmAk2ns,1307624 +Pillow.libs/libjpeg-ba7bf5af.so.9.4.0,sha256=Z7iNudg2aBRlcXpMoV9SipKYKMolUlsMcUrYjHpl-bQ,250504 +Pillow.libs/liblcms2-db671c5b.so.2.0.10,sha256=3ahmiETg4f-e-vd9iL5ZSpDA0JiVvloO9JnYz4BzrHs,457280 +Pillow.libs/liblzma-99449165.so.5.2.5,sha256=7iMfemDnr3asL0JQkjfbOaPwbo0aQZywH4C3xf2u_k4,216664 +Pillow.libs/libopenjp2-f0612b30.so.2.4.0,sha256=lfYvfA4tEjTYCEN2fWwvB3A4F9VPT9fRReVGDREzyJM,532880 +Pillow.libs/libpng16-bedcb7ea.so.16.37.0,sha256=LSA-3MI8mYtx3zrJpk5ZizW4Hyg-Hvd8ykCIDfajVcA,283920 +Pillow.libs/libtiff-d147fec3.so.5.6.0,sha256=B_UTvwPMz2ver1LhJk3Oc_aYAQDyHlEBXm4GUWw0pb4,690040 +Pillow.libs/libwebp-122bd20b.so.7.1.0,sha256=PMbv3RH81P9_1hkOJiUNfjkjAWe8kBYqAlcWFmaeMgs,596712 +Pillow.libs/libwebpdemux-2db559e5.so.2.0.6,sha256=eQogBgl17h71V_m4fquF10WRl1nHuI596tS1wo1Vjus,29528 +Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5,sha256=VaYWzcr8EgLLuI2zM2y91a-5L5qpR59rsRQF2q0-8Hw,58616 +Pillow.libs/libxcb-2dfad6c3.so.1.1.0,sha256=Pyy57cZxqkJU1x6vtPmMY97py0X6n1QjBotZPTchs1M,263776 +Pillow.libs/libz-a147dcb0.so.1.2.3,sha256=_HZ_XiOuVypJhy7VEy7Pv-QM6RxKmNtmQG2swxTNcvE,87848 diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/WHEEL new file mode 100644 index 0000000..2b4ff2b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp36-cp36m-manylinux1_x86_64 + diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..b338169 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +PIL diff --git a/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/zip-safe b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/Pillow-8.1.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libXau-312dbc56.so.6.0.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libXau-312dbc56.so.6.0.0 new file mode 100755 index 0000000..64340c0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libXau-312dbc56.so.6.0.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libXdmcp-e15573e7.so.6.0.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libXdmcp-e15573e7.so.6.0.0 new file mode 100755 index 0000000..7e94bf1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libXdmcp-e15573e7.so.6.0.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libfreetype-6ad068c6.so.6.17.4 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libfreetype-6ad068c6.so.6.17.4 new file mode 100755 index 0000000..10edcb1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libfreetype-6ad068c6.so.6.17.4 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libjpeg-ba7bf5af.so.9.4.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libjpeg-ba7bf5af.so.9.4.0 new file mode 100755 index 0000000..ad26c11 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libjpeg-ba7bf5af.so.9.4.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/liblcms2-db671c5b.so.2.0.10 b/minor_project/lib/python3.6/site-packages/Pillow.libs/liblcms2-db671c5b.so.2.0.10 new file mode 100755 index 0000000..7b057bb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/liblcms2-db671c5b.so.2.0.10 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/liblzma-99449165.so.5.2.5 b/minor_project/lib/python3.6/site-packages/Pillow.libs/liblzma-99449165.so.5.2.5 new file mode 100755 index 0000000..92c8666 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/liblzma-99449165.so.5.2.5 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libopenjp2-f0612b30.so.2.4.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libopenjp2-f0612b30.so.2.4.0 new file mode 100755 index 0000000..e9402af Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libopenjp2-f0612b30.so.2.4.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libpng16-bedcb7ea.so.16.37.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libpng16-bedcb7ea.so.16.37.0 new file mode 100755 index 0000000..0cc2caa Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libpng16-bedcb7ea.so.16.37.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libtiff-d147fec3.so.5.6.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libtiff-d147fec3.so.5.6.0 new file mode 100755 index 0000000..149d5ec Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libtiff-d147fec3.so.5.6.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebp-122bd20b.so.7.1.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebp-122bd20b.so.7.1.0 new file mode 100755 index 0000000..df8a73c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebp-122bd20b.so.7.1.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebpdemux-2db559e5.so.2.0.6 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebpdemux-2db559e5.so.2.0.6 new file mode 100755 index 0000000..4fa4628 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebpdemux-2db559e5.so.2.0.6 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5 new file mode 100755 index 0000000..f67c3cc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libwebpmux-ec1d5c76.so.3.0.5 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libxcb-2dfad6c3.so.1.1.0 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libxcb-2dfad6c3.so.1.1.0 new file mode 100755 index 0000000..3385d9d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libxcb-2dfad6c3.so.1.1.0 differ diff --git a/minor_project/lib/python3.6/site-packages/Pillow.libs/libz-a147dcb0.so.1.2.3 b/minor_project/lib/python3.6/site-packages/Pillow.libs/libz-a147dcb0.so.1.2.3 new file mode 100755 index 0000000..0d6b630 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/Pillow.libs/libz-a147dcb0.so.1.2.3 differ diff --git a/minor_project/lib/python3.6/site-packages/__pycache__/_virtualenv.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/__pycache__/_virtualenv.cpython-36.pyc new file mode 100644 index 0000000..90b000a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/__pycache__/_virtualenv.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/__pycache__/cycler.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/__pycache__/cycler.cpython-36.pyc new file mode 100644 index 0000000..fd52e62 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/__pycache__/cycler.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/__pycache__/pylab.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/__pycache__/pylab.cpython-36.pyc new file mode 100644 index 0000000..0ec63b4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/__pycache__/pylab.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/__pycache__/pyparsing.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/__pycache__/pyparsing.cpython-36.pyc new file mode 100644 index 0000000..6a80067 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/__pycache__/pyparsing.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/__pycache__/six.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/__pycache__/six.cpython-36.pyc new file mode 100644 index 0000000..d3b6f8c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/__pycache__/six.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/_virtualenv.pth b/minor_project/lib/python3.6/site-packages/_virtualenv.pth new file mode 100644 index 0000000..1c3ff99 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/_virtualenv.pth @@ -0,0 +1 @@ +import _virtualenv \ No newline at end of file diff --git a/minor_project/lib/python3.6/site-packages/_virtualenv.py b/minor_project/lib/python3.6/site-packages/_virtualenv.py new file mode 100644 index 0000000..b399da4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/_virtualenv.py @@ -0,0 +1,115 @@ +"""Patches that are applied at runtime to the virtual environment""" +# -*- coding: utf-8 -*- + +import os +import sys + +VIRTUALENV_PATCH_FILE = os.path.join(__file__) + + +def patch_dist(dist): + """ + Distutils allows user to configure some arguments via a configuration file: + https://docs.python.org/3/install/index.html#distutils-configuration-files + + Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up. + """ + # we cannot allow some install config as that would get packages installed outside of the virtual environment + old_parse_config_files = dist.Distribution.parse_config_files + + def parse_config_files(self, *args, **kwargs): + result = old_parse_config_files(self, *args, **kwargs) + install = self.get_option_dict("install") + + if "prefix" in install: # the prefix governs where to install the libraries + install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix) + for base in ("purelib", "platlib", "headers", "scripts", "data"): + key = "install_{}".format(base) + if key in install: # do not allow global configs to hijack venv paths + install.pop(key, None) + return result + + dist.Distribution.parse_config_files = parse_config_files + + +# Import hook that patches some modules to ignore configuration values that break package installation in case +# of virtual environments. +_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist" +if sys.version_info > (3, 4): + # https://docs.python.org/3/library/importlib.html#setting-up-an-importer + from importlib.abc import MetaPathFinder + from importlib.util import find_spec + from threading import Lock + from functools import partial + + class _Finder(MetaPathFinder): + """A meta path finder that allows patching the imported distutils modules""" + + fullname = None + lock = Lock() + + def find_spec(self, fullname, path, target=None): + if fullname in _DISTUTILS_PATCH and self.fullname is None: + with self.lock: + self.fullname = fullname + try: + spec = find_spec(fullname, path) + if spec is not None: + # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work + is_new_api = hasattr(spec.loader, "exec_module") + func_name = "exec_module" if is_new_api else "load_module" + old = getattr(spec.loader, func_name) + func = self.exec_module if is_new_api else self.load_module + if old is not func: + try: + setattr(spec.loader, func_name, partial(func, old)) + except AttributeError: + pass # C-Extension loaders are r/o such as zipimporter with >> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/ +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/RECORD new file mode 100644 index 0000000..43fd9eb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/RECORD @@ -0,0 +1,13 @@ +certifi-2020.12.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2020.12.5.dist-info/LICENSE,sha256=anCkv2sBABbVmmS4rkrY3H9e8W8ftFPMLs13HFo0ETE,1048 +certifi-2020.12.5.dist-info/METADATA,sha256=SEw5GGHIeBwGwDJsIUaVfEQAc5Jqs_XofOfTX-_kCE0,2994 +certifi-2020.12.5.dist-info/RECORD,, +certifi-2020.12.5.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +certifi-2020.12.5.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=SsmdmFHjHCY4VLtqwpp9P_jsOcAuHj-5c5WqoEz-oFg,62 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-36.pyc,, +certifi/__pycache__/__main__.cpython-36.pyc,, +certifi/__pycache__/core.cpython-36.pyc,, +certifi/cacert.pem,sha256=u3fxPT--yemLvyislQRrRBlsfY9Vq3cgBh6ZmRqCkZc,263774 +certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303 diff --git a/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/WHEEL new file mode 100644 index 0000000..6d38aa0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi-2020.12.5.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/minor_project/lib/python3.6/site-packages/certifi/__init__.py b/minor_project/lib/python3.6/site-packages/certifi/__init__.py new file mode 100644 index 0000000..17aaf90 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import contents, where + +__version__ = "2020.12.05" diff --git a/minor_project/lib/python3.6/site-packages/certifi/__main__.py b/minor_project/lib/python3.6/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/minor_project/lib/python3.6/site-packages/certifi/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/certifi/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..582ed1c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/certifi/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/certifi/__pycache__/__main__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/certifi/__pycache__/__main__.cpython-36.pyc new file mode 100644 index 0000000..8635077 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/certifi/__pycache__/__main__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/certifi/__pycache__/core.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/certifi/__pycache__/core.cpython-36.pyc new file mode 100644 index 0000000..f0b2655 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/certifi/__pycache__/core.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/certifi/cacert.pem b/minor_project/lib/python3.6/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..c9459dc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi/cacert.pem @@ -0,0 +1,4325 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/certifi/core.py b/minor_project/lib/python3.6/site-packages/certifi/core.py new file mode 100644 index 0000000..5d2b8cd --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/certifi/core.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os + +try: + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + +def contents(): + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/LICENSE b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/LICENSE new file mode 100644 index 0000000..8add30a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + + diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/METADATA new file mode 100644 index 0000000..590bcc3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/METADATA @@ -0,0 +1,101 @@ +Metadata-Version: 2.1 +Name: chardet +Version: 4.0.0 +Summary: Universal encoding detector for Python 2 and 3 +Home-page: https://github.com/chardet/chardet +Author: Mark Pilgrim +Author-email: mark@diveintomark.org +Maintainer: Daniel Blanchard +Maintainer-email: dan.blanchard@gmail.com +License: LGPL +Keywords: encoding,i18n,xml +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.7 or 3.5+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.7+ and 3.4+. + +:maintainer: Dan Blanchard + + diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/RECORD new file mode 100644 index 0000000..be480a4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/RECORD @@ -0,0 +1,94 @@ +../../../bin/chardetect,sha256=xBCrM5sqUN7EkMthiYaUobDQM8XYCmsj4NNxjzSoy9w,352 +chardet-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet-4.0.0.dist-info/LICENSE,sha256=YJXp_6d33SKDn3gBqoRbMcntB_PWv4om3F0t7IzMDvM,26432 +chardet-4.0.0.dist-info/METADATA,sha256=ySYQAE7NPm3LwxgMqFi1zdLQ48mmwMbrJwqAWCtcbH8,3526 +chardet-4.0.0.dist-info/RECORD,, +chardet-4.0.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +chardet-4.0.0.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 +chardet-4.0.0.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271 +chardet/__pycache__/__init__.cpython-36.pyc,, +chardet/__pycache__/big5freq.cpython-36.pyc,, +chardet/__pycache__/big5prober.cpython-36.pyc,, +chardet/__pycache__/chardistribution.cpython-36.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +chardet/__pycache__/charsetprober.cpython-36.pyc,, +chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +chardet/__pycache__/compat.cpython-36.pyc,, +chardet/__pycache__/cp949prober.cpython-36.pyc,, +chardet/__pycache__/enums.cpython-36.pyc,, +chardet/__pycache__/escprober.cpython-36.pyc,, +chardet/__pycache__/escsm.cpython-36.pyc,, +chardet/__pycache__/eucjpprober.cpython-36.pyc,, +chardet/__pycache__/euckrfreq.cpython-36.pyc,, +chardet/__pycache__/euckrprober.cpython-36.pyc,, +chardet/__pycache__/euctwfreq.cpython-36.pyc,, +chardet/__pycache__/euctwprober.cpython-36.pyc,, +chardet/__pycache__/gb2312freq.cpython-36.pyc,, +chardet/__pycache__/gb2312prober.cpython-36.pyc,, +chardet/__pycache__/hebrewprober.cpython-36.pyc,, +chardet/__pycache__/jisfreq.cpython-36.pyc,, +chardet/__pycache__/jpcntx.cpython-36.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +chardet/__pycache__/langrussianmodel.cpython-36.pyc,, +chardet/__pycache__/langthaimodel.cpython-36.pyc,, +chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +chardet/__pycache__/latin1prober.cpython-36.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/mbcssm.cpython-36.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/sjisprober.cpython-36.pyc,, +chardet/__pycache__/universaldetector.cpython-36.pyc,, +chardet/__pycache__/utf8prober.cpython-36.pyc,, +chardet/__pycache__/version.cpython-36.pyc,, +chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839 +chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +chardet/cli/__pycache__/__init__.cpython-36.pyc,, +chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +chardet/cli/chardetect.py,sha256=kUPeQCi-olObXpOq5MtlKuBn1EU19rkeenAMwxl7URY,2711 +chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200 +chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +chardet/langbulgarianmodel.py,sha256=r6tvOtO8FqhnbWBB5V4czcl1fWM4pB9lGiWQU-8gvsw,105685 +chardet/langgreekmodel.py,sha256=1cMu2wUgPB8bQ2RbVjR4LNwCCETgQ-Dwo0Eg2_uB11s,99559 +chardet/langhebrewmodel.py,sha256=urMmJHHIXtCwaWAqy1zEY_4SmwwNzt730bDOtjXzRjs,98764 +chardet/langhungarianmodel.py,sha256=ODAisvqCfes8B4FeyM_Pg9HY3ZDnEyaCiT4Bxyzoc6w,102486 +chardet/langrussianmodel.py,sha256=sPqkrBbX0QVwwy6oqRl-x7ERv2J4-zaMoCvLpkSsSJI,131168 +chardet/langthaimodel.py,sha256=ppoKOGL9OPdj9A4CUyG8R48zbnXt9MN1WXeCYepa6sc,103300 +chardet/langturkishmodel.py,sha256=H3ldicI_rhlv0r3VFpVWtUL6X30Wy596v7_YHz2sEdg,95934 +chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +chardet/metadata/__pycache__/__init__.cpython-36.pyc,, +chardet/metadata/__pycache__/languages.cpython-36.pyc,, +chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474 +chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136 +chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309 +chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503 +chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242 diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/WHEEL new file mode 100644 index 0000000..6d38aa0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/entry_points.txt b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/entry_points.txt new file mode 100644 index 0000000..a884269 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +chardetect = chardet.cli.chardetect:main + diff --git a/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..79236f2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet-4.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/minor_project/lib/python3.6/site-packages/chardet/__init__.py b/minor_project/lib/python3.6/site-packages/chardet/__init__.py new file mode 100644 index 0000000..80ad254 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/__init__.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .universaldetector import UniversalDetector +from .enums import InputState +from .version import __version__, VERSION + + +__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION'] + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() + + +def detect_all(byte_str): + """ + Detect all the possible encodings of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + + detector = UniversalDetector() + detector.feed(byte_str) + detector.close() + + if detector._input_state == InputState.HIGH_BYTE: + results = [] + for prober in detector._charset_probers: + if prober.get_confidence() > detector.MINIMUM_THRESHOLD: + charset_name = prober.charset_name + lower_charset_name = prober.charset_name.lower() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if detector._has_win_bytes: + charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + results.append({ + 'encoding': charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language, + }) + if len(results) > 0: + return sorted(results, key=lambda result: -result['confidence']) + + return [detector.result] diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..a88e86d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/big5freq.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/big5freq.cpython-36.pyc new file mode 100644 index 0000000..26236ef Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/big5freq.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/big5prober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/big5prober.cpython-36.pyc new file mode 100644 index 0000000..8873c27 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/big5prober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/chardistribution.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/chardistribution.cpython-36.pyc new file mode 100644 index 0000000..0a6b0d6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/chardistribution.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc new file mode 100644 index 0000000..b4e735d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/charsetgroupprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/charsetprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/charsetprober.cpython-36.pyc new file mode 100644 index 0000000..847db0e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/charsetprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc new file mode 100644 index 0000000..d7fb91d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/codingstatemachine.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..ffa334f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/cp949prober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/cp949prober.cpython-36.pyc new file mode 100644 index 0000000..38c7ff8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/cp949prober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/enums.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/enums.cpython-36.pyc new file mode 100644 index 0000000..2f5d510 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/enums.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/escprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/escprober.cpython-36.pyc new file mode 100644 index 0000000..f445e0d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/escprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/escsm.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/escsm.cpython-36.pyc new file mode 100644 index 0000000..44a6ab6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/escsm.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/eucjpprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/eucjpprober.cpython-36.pyc new file mode 100644 index 0000000..8ea3b7b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/eucjpprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euckrfreq.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euckrfreq.cpython-36.pyc new file mode 100644 index 0000000..ca44f17 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euckrfreq.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euckrprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euckrprober.cpython-36.pyc new file mode 100644 index 0000000..4115772 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euckrprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euctwfreq.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euctwfreq.cpython-36.pyc new file mode 100644 index 0000000..4995414 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euctwfreq.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euctwprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euctwprober.cpython-36.pyc new file mode 100644 index 0000000..2ab73da Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/euctwprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/gb2312freq.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/gb2312freq.cpython-36.pyc new file mode 100644 index 0000000..d7e6e4f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/gb2312freq.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/gb2312prober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/gb2312prober.cpython-36.pyc new file mode 100644 index 0000000..5454b57 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/gb2312prober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/hebrewprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/hebrewprober.cpython-36.pyc new file mode 100644 index 0000000..0e2e406 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/hebrewprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/jisfreq.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/jisfreq.cpython-36.pyc new file mode 100644 index 0000000..ce346b0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/jisfreq.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/jpcntx.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/jpcntx.cpython-36.pyc new file mode 100644 index 0000000..9301585 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/jpcntx.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc new file mode 100644 index 0000000..778413a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langbulgarianmodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc new file mode 100644 index 0000000..9f7d469 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langgreekmodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc new file mode 100644 index 0000000..d1117e8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langhebrewmodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc new file mode 100644 index 0000000..3c4e55a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langhungarianmodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langrussianmodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langrussianmodel.cpython-36.pyc new file mode 100644 index 0000000..baea2a1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langrussianmodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langthaimodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langthaimodel.cpython-36.pyc new file mode 100644 index 0000000..4dd4f85 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langthaimodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langturkishmodel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langturkishmodel.cpython-36.pyc new file mode 100644 index 0000000..7097bcb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/langturkishmodel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/latin1prober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/latin1prober.cpython-36.pyc new file mode 100644 index 0000000..5e5db87 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/latin1prober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc new file mode 100644 index 0000000..39195d7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcharsetprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc new file mode 100644 index 0000000..3a5594d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcsgroupprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcssm.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcssm.cpython-36.pyc new file mode 100644 index 0000000..2140839 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/mbcssm.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc new file mode 100644 index 0000000..c2db8de Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sbcharsetprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc new file mode 100644 index 0000000..d80a1ac Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sbcsgroupprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sjisprober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sjisprober.cpython-36.pyc new file mode 100644 index 0000000..a728646 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/sjisprober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/universaldetector.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/universaldetector.cpython-36.pyc new file mode 100644 index 0000000..7a1af45 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/universaldetector.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/utf8prober.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/utf8prober.cpython-36.pyc new file mode 100644 index 0000000..4998216 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/utf8prober.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/__pycache__/version.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/version.cpython-36.pyc new file mode 100644 index 0000000..fed0839 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/__pycache__/version.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/big5freq.py b/minor_project/lib/python3.6/site-packages/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/minor_project/lib/python3.6/site-packages/chardet/big5prober.py b/minor_project/lib/python3.6/site-packages/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/minor_project/lib/python3.6/site-packages/chardet/chardistribution.py b/minor_project/lib/python3.6/site-packages/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/minor_project/lib/python3.6/site-packages/chardet/charsetgroupprober.py b/minor_project/lib/python3.6/site-packages/chardet/charsetgroupprober.py new file mode 100644 index 0000000..5812cef --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/charsetgroupprober.py @@ -0,0 +1,107 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + self._state = ProbingState.FOUND_IT + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/minor_project/lib/python3.6/site-packages/chardet/charsetprober.py b/minor_project/lib/python3.6/site-packages/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/minor_project/lib/python3.6/site-packages/chardet/cli/__init__.py b/minor_project/lib/python3.6/site-packages/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/minor_project/lib/python3.6/site-packages/chardet/cli/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/cli/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..2bff474 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/cli/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/cli/__pycache__/chardetect.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/cli/__pycache__/chardetect.cpython-36.pyc new file mode 100644 index 0000000..d97fe3e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/cli/__pycache__/chardetect.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/cli/chardetect.py b/minor_project/lib/python3.6/site-packages/chardet/cli/chardetect.py new file mode 100644 index 0000000..e1d8cd6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/cli/chardetect.py @@ -0,0 +1,84 @@ +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{}: {} with confidence {}'.format(name, result['encoding'], + result['confidence']) + else: + return '{}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/minor_project/lib/python3.6/site-packages/chardet/codingstatemachine.py b/minor_project/lib/python3.6/site-packages/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/minor_project/lib/python3.6/site-packages/chardet/compat.py b/minor_project/lib/python3.6/site-packages/chardet/compat.py new file mode 100644 index 0000000..8941572 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/compat.py @@ -0,0 +1,36 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + string_types = (str, unicode) + text_type = unicode + iteritems = dict.iteritems +else: + PY2 = False + PY3 = True + string_types = (bytes, str) + text_type = str + iteritems = dict.items diff --git a/minor_project/lib/python3.6/site-packages/chardet/cp949prober.py b/minor_project/lib/python3.6/site-packages/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/minor_project/lib/python3.6/site-packages/chardet/enums.py b/minor_project/lib/python3.6/site-packages/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/minor_project/lib/python3.6/site-packages/chardet/escprober.py b/minor_project/lib/python3.6/site-packages/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/minor_project/lib/python3.6/site-packages/chardet/escsm.py b/minor_project/lib/python3.6/site-packages/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/minor_project/lib/python3.6/site-packages/chardet/eucjpprober.py b/minor_project/lib/python3.6/site-packages/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/minor_project/lib/python3.6/site-packages/chardet/euckrfreq.py b/minor_project/lib/python3.6/site-packages/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/minor_project/lib/python3.6/site-packages/chardet/euckrprober.py b/minor_project/lib/python3.6/site-packages/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/minor_project/lib/python3.6/site-packages/chardet/euctwfreq.py b/minor_project/lib/python3.6/site-packages/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/minor_project/lib/python3.6/site-packages/chardet/euctwprober.py b/minor_project/lib/python3.6/site-packages/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/minor_project/lib/python3.6/site-packages/chardet/gb2312freq.py b/minor_project/lib/python3.6/site-packages/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/minor_project/lib/python3.6/site-packages/chardet/gb2312prober.py b/minor_project/lib/python3.6/site-packages/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/minor_project/lib/python3.6/site-packages/chardet/hebrewprober.py b/minor_project/lib/python3.6/site-packages/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/minor_project/lib/python3.6/site-packages/chardet/jisfreq.py b/minor_project/lib/python3.6/site-packages/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/minor_project/lib/python3.6/site-packages/chardet/jpcntx.py b/minor_project/lib/python3.6/site-packages/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langbulgarianmodel.py b/minor_project/lib/python3.6/site-packages/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..561bfd9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langbulgarianmodel.py @@ -0,0 +1,4650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +BULGARIAN_LANG_MODEL = { + 63: { # 'e' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 45: { # '\xad' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 31: { # 'Ð' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 2, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 0, # 'и' + 26: 2, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 1, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 32: { # 'Б' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 2, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 2, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 35: { # 'Ð’' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 2, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 43: { # 'Г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 37: { # 'Д' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 2, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 44: { # 'Е' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Ð¥' + 53: 2, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 0, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 55: { # 'Ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 47: { # 'З' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 40: { # 'И' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 2, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 3, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 0, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 59: { # 'Й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 33: { # 'К' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 46: { # 'Л' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 2, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 38: { # 'М' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 36: { # 'Ð' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 2, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 41: { # 'О' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 30: { # 'П' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 2, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 39: { # 'Р' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 2, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 28: { # 'С' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 3, # 'Ð' + 32: 2, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 34: { # 'Т' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 2, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 51: { # 'У' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 48: { # 'Ф' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 49: { # 'Ð¥' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 53: { # 'Ц' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 50: { # 'Ч' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 54: { # 'Ш' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 57: { # 'Щ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 61: { # 'Ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 60: { # 'Ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 2, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 56: { # 'Я' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 1: { # 'а' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 18: { # 'б' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 3, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 9: { # 'в' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 20: { # 'г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 11: { # 'д' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 3: { # 'е' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 23: { # 'ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 15: { # 'з' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 2: { # 'и' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 1, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 26: { # 'й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 12: { # 'к' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 10: { # 'л' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 14: { # 'м' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 6: { # 'н' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 3, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 4: { # 'о' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 13: { # 'п' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 7: { # 'Ñ€' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 1, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 3, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 8: { # 'Ñ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 5: { # 'Ñ‚' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 19: { # 'у' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 2, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 2, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 29: { # 'Ñ„' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 25: { # 'Ñ…' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 22: { # 'ц' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 21: { # 'ч' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 27: { # 'ш' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 2, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 1, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 24: { # 'щ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 17: { # 'ÑŠ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 52: { # 'ÑŒ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 42: { # 'ÑŽ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 16: { # 'Ñ' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 1, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 2, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 58: { # 'Ñ”' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 62: { # 'â„–' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 194, # '\x80' + 129: 195, # '\x81' + 130: 196, # '\x82' + 131: 197, # '\x83' + 132: 198, # '\x84' + 133: 199, # '\x85' + 134: 200, # '\x86' + 135: 201, # '\x87' + 136: 202, # '\x88' + 137: 203, # '\x89' + 138: 204, # '\x8a' + 139: 205, # '\x8b' + 140: 206, # '\x8c' + 141: 207, # '\x8d' + 142: 208, # '\x8e' + 143: 209, # '\x8f' + 144: 210, # '\x90' + 145: 211, # '\x91' + 146: 212, # '\x92' + 147: 213, # '\x93' + 148: 214, # '\x94' + 149: 215, # '\x95' + 150: 216, # '\x96' + 151: 217, # '\x97' + 152: 218, # '\x98' + 153: 219, # '\x99' + 154: 220, # '\x9a' + 155: 221, # '\x9b' + 156: 222, # '\x9c' + 157: 223, # '\x9d' + 158: 224, # '\x9e' + 159: 225, # '\x9f' + 160: 81, # '\xa0' + 161: 226, # 'Ð' + 162: 227, # 'Ђ' + 163: 228, # 'Ѓ' + 164: 229, # 'Є' + 165: 230, # 'Ð…' + 166: 105, # 'І' + 167: 231, # 'Ї' + 168: 232, # 'Ј' + 169: 233, # 'Љ' + 170: 234, # 'Њ' + 171: 235, # 'Ћ' + 172: 236, # 'ÐŒ' + 173: 45, # '\xad' + 174: 237, # 'ÐŽ' + 175: 238, # 'Ð' + 176: 31, # 'Ð' + 177: 32, # 'Б' + 178: 35, # 'Ð’' + 179: 43, # 'Г' + 180: 37, # 'Д' + 181: 44, # 'Е' + 182: 55, # 'Ж' + 183: 47, # 'З' + 184: 40, # 'И' + 185: 59, # 'Й' + 186: 33, # 'К' + 187: 46, # 'Л' + 188: 38, # 'М' + 189: 36, # 'Ð' + 190: 41, # 'О' + 191: 30, # 'П' + 192: 39, # 'Р' + 193: 28, # 'С' + 194: 34, # 'Т' + 195: 51, # 'У' + 196: 48, # 'Ф' + 197: 49, # 'Ð¥' + 198: 53, # 'Ц' + 199: 50, # 'Ч' + 200: 54, # 'Ш' + 201: 57, # 'Щ' + 202: 61, # 'Ъ' + 203: 239, # 'Ы' + 204: 67, # 'Ь' + 205: 240, # 'Э' + 206: 60, # 'Ю' + 207: 56, # 'Я' + 208: 1, # 'а' + 209: 18, # 'б' + 210: 9, # 'в' + 211: 20, # 'г' + 212: 11, # 'д' + 213: 3, # 'е' + 214: 23, # 'ж' + 215: 15, # 'з' + 216: 2, # 'и' + 217: 26, # 'й' + 218: 12, # 'к' + 219: 10, # 'л' + 220: 14, # 'м' + 221: 6, # 'н' + 222: 4, # 'о' + 223: 13, # 'п' + 224: 7, # 'Ñ€' + 225: 8, # 'Ñ' + 226: 5, # 'Ñ‚' + 227: 19, # 'у' + 228: 29, # 'Ñ„' + 229: 25, # 'Ñ…' + 230: 22, # 'ц' + 231: 21, # 'ч' + 232: 27, # 'ш' + 233: 24, # 'щ' + 234: 17, # 'ÑŠ' + 235: 75, # 'Ñ‹' + 236: 52, # 'ÑŒ' + 237: 241, # 'Ñ' + 238: 42, # 'ÑŽ' + 239: 16, # 'Ñ' + 240: 62, # 'â„–' + 241: 242, # 'Ñ‘' + 242: 243, # 'Ñ’' + 243: 244, # 'Ñ“' + 244: 58, # 'Ñ”' + 245: 245, # 'Ñ•' + 246: 98, # 'Ñ–' + 247: 246, # 'Ñ—' + 248: 247, # 'ј' + 249: 248, # 'Ñ™' + 250: 249, # 'Ñš' + 251: 250, # 'Ñ›' + 252: 251, # 'Ñœ' + 253: 91, # '§' + 254: 252, # 'Ñž' + 255: 253, # 'ÑŸ' +} + +ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Bulgarian', + char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрÑтуфхцчшщъьюÑ') + +WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 206, # 'Ђ' + 129: 207, # 'Ѓ' + 130: 208, # '‚' + 131: 209, # 'Ñ“' + 132: 210, # '„' + 133: 211, # '…' + 134: 212, # '†' + 135: 213, # '‡' + 136: 120, # '€' + 137: 214, # '‰' + 138: 215, # 'Љ' + 139: 216, # '‹' + 140: 217, # 'Њ' + 141: 218, # 'ÐŒ' + 142: 219, # 'Ћ' + 143: 220, # 'Ð' + 144: 221, # 'Ñ’' + 145: 78, # '‘' + 146: 64, # '’' + 147: 83, # '“' + 148: 121, # 'â€' + 149: 98, # '•' + 150: 117, # '–' + 151: 105, # '—' + 152: 222, # None + 153: 223, # 'â„¢' + 154: 224, # 'Ñ™' + 155: 225, # '›' + 156: 226, # 'Ñš' + 157: 227, # 'Ñœ' + 158: 228, # 'Ñ›' + 159: 229, # 'ÑŸ' + 160: 88, # '\xa0' + 161: 230, # 'ÐŽ' + 162: 231, # 'Ñž' + 163: 232, # 'Ј' + 164: 233, # '¤' + 165: 122, # 'Ò' + 166: 89, # '¦' + 167: 106, # '§' + 168: 234, # 'Ð' + 169: 235, # '©' + 170: 236, # 'Є' + 171: 237, # '«' + 172: 238, # '¬' + 173: 45, # '\xad' + 174: 239, # '®' + 175: 240, # 'Ї' + 176: 73, # '°' + 177: 80, # '±' + 178: 118, # 'І' + 179: 114, # 'Ñ–' + 180: 241, # 'Ò‘' + 181: 242, # 'µ' + 182: 243, # '¶' + 183: 244, # '·' + 184: 245, # 'Ñ‘' + 185: 62, # 'â„–' + 186: 58, # 'Ñ”' + 187: 246, # '»' + 188: 247, # 'ј' + 189: 248, # 'Ð…' + 190: 249, # 'Ñ•' + 191: 250, # 'Ñ—' + 192: 31, # 'Ð' + 193: 32, # 'Б' + 194: 35, # 'Ð’' + 195: 43, # 'Г' + 196: 37, # 'Д' + 197: 44, # 'Е' + 198: 55, # 'Ж' + 199: 47, # 'З' + 200: 40, # 'И' + 201: 59, # 'Й' + 202: 33, # 'К' + 203: 46, # 'Л' + 204: 38, # 'М' + 205: 36, # 'Ð' + 206: 41, # 'О' + 207: 30, # 'П' + 208: 39, # 'Р' + 209: 28, # 'С' + 210: 34, # 'Т' + 211: 51, # 'У' + 212: 48, # 'Ф' + 213: 49, # 'Ð¥' + 214: 53, # 'Ц' + 215: 50, # 'Ч' + 216: 54, # 'Ш' + 217: 57, # 'Щ' + 218: 61, # 'Ъ' + 219: 251, # 'Ы' + 220: 67, # 'Ь' + 221: 252, # 'Э' + 222: 60, # 'Ю' + 223: 56, # 'Я' + 224: 1, # 'а' + 225: 18, # 'б' + 226: 9, # 'в' + 227: 20, # 'г' + 228: 11, # 'д' + 229: 3, # 'е' + 230: 23, # 'ж' + 231: 15, # 'з' + 232: 2, # 'и' + 233: 26, # 'й' + 234: 12, # 'к' + 235: 10, # 'л' + 236: 14, # 'м' + 237: 6, # 'н' + 238: 4, # 'о' + 239: 13, # 'п' + 240: 7, # 'Ñ€' + 241: 8, # 'Ñ' + 242: 5, # 'Ñ‚' + 243: 19, # 'у' + 244: 29, # 'Ñ„' + 245: 25, # 'Ñ…' + 246: 22, # 'ц' + 247: 21, # 'ч' + 248: 27, # 'ш' + 249: 24, # 'щ' + 250: 17, # 'ÑŠ' + 251: 75, # 'Ñ‹' + 252: 52, # 'ÑŒ' + 253: 253, # 'Ñ' + 254: 42, # 'ÑŽ' + 255: 16, # 'Ñ' +} + +WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Bulgarian', + char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрÑтуфхцчшщъьюÑ') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langgreekmodel.py b/minor_project/lib/python3.6/site-packages/chardet/langgreekmodel.py new file mode 100644 index 0000000..02b94de --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langgreekmodel.py @@ -0,0 +1,4398 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +GREEK_LANG_MODEL = { + 60: { # 'e' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 55: { # 'o' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 1, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 58: { # 't' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 1, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 36: { # '·' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 61: { # 'Ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 1, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 1, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 46: { # 'Έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 1, # 'σ' + 2: 2, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 54: { # 'ÎŒ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 31: { # 'Α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 2, # 'Î’' + 43: 2, # 'Γ' + 41: 1, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 2, # 'Ï‚' + 7: 2, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 51: { # 'Î’' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 1, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 43: { # 'Γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 1, # 'Α' + 51: 0, # 'Î’' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 1, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 41: { # 'Δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 34: { # 'Ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 2, # 'Ï„' + 12: 2, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 1, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 40: { # 'Η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 52: { # 'Θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 1, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 47: { # 'Ι' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 1, # 'Î’' + 43: 1, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 1, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 44: { # 'Κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 1, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 53: { # 'Λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 1, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 38: { # 'Μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 2, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 2, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 49: { # 'Î' + 60: 2, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 59: { # 'Ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 39: { # 'Ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 1, # 'Î’' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 2, # 'Φ' + 50: 2, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 2, # 'Ï…' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 35: { # 'Π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 1, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 48: { # 'Ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 1, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 37: { # 'Σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 2, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 2, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 33: { # 'Τ' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 2, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 45: { # 'Î¥' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 2, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 1, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 56: { # 'Φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 1, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 1, # 'Ï' + 27: 1, # 'ÏŽ' + }, + 50: { # 'Χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 1, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 1, # 'Î' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 57: { # 'Ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 2, # 'Ï‚' + 7: 2, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 17: { # 'ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 18: { # 'έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 22: { # 'ή' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 15: { # 'ί' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 1, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 1: { # 'α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 3, # 'ζ' + 13: 1, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 29: { # 'β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 20: { # 'γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 21: { # 'δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 3: { # 'ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 32: { # 'ζ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 1, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 13: { # 'η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 25: { # 'θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 5: { # 'ι' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 11: { # 'κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 16: { # 'λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 1, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 10: { # 'μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 3, # 'φ' + 23: 0, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 6: { # 'ν' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 30: { # 'ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 1, # 'ÏŽ' + }, + 4: { # 'ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 9: { # 'Ï€' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 2, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 8: { # 'Ï' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 14: { # 'Ï‚' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 7: { # 'σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 2: { # 'Ï„' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 12: { # 'Ï…' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 28: { # 'φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 1, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 23: { # 'χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 42: { # 'ψ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 1, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 24: { # 'ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 1, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 19: { # 'ÏŒ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 1, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 26: { # 'Ï' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 27: { # 'ÏŽ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 1, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 1, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1253_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '€' + 129: 255, # None + 130: 255, # '‚' + 131: 255, # 'Æ’' + 132: 255, # '„' + 133: 255, # '…' + 134: 255, # '†' + 135: 255, # '‡' + 136: 255, # None + 137: 255, # '‰' + 138: 255, # None + 139: 255, # '‹' + 140: 255, # None + 141: 255, # None + 142: 255, # None + 143: 255, # None + 144: 255, # None + 145: 255, # '‘' + 146: 255, # '’' + 147: 255, # '“' + 148: 255, # 'â€' + 149: 255, # '•' + 150: 255, # '–' + 151: 255, # '—' + 152: 255, # None + 153: 255, # 'â„¢' + 154: 255, # None + 155: 255, # '›' + 156: 255, # None + 157: 255, # None + 158: 255, # None + 159: 255, # None + 160: 253, # '\xa0' + 161: 233, # 'Î…' + 162: 61, # 'Ά' + 163: 253, # '£' + 164: 253, # '¤' + 165: 253, # 'Â¥' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # None + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # '®' + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 253, # 'µ' + 182: 253, # '¶' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'ÎŒ' + 189: 253, # '½' + 190: 108, # 'ÎŽ' + 191: 123, # 'Î' + 192: 110, # 'Î' + 193: 31, # 'Α' + 194: 51, # 'Î’' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Î' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Î¥' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'Ï€' + 241: 8, # 'Ï' + 242: 14, # 'Ï‚' + 243: 7, # 'σ' + 244: 2, # 'Ï„' + 245: 12, # 'Ï…' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ÏŠ' + 251: 75, # 'Ï‹' + 252: 19, # 'ÏŒ' + 253: 26, # 'Ï' + 254: 27, # 'ÏŽ' + 255: 253, # None +} + +WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(charset_name='windows-1253', + language='Greek', + char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎÎΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπÏςστυφχψωόÏÏŽ') + +ISO_8859_7_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '\x80' + 129: 255, # '\x81' + 130: 255, # '\x82' + 131: 255, # '\x83' + 132: 255, # '\x84' + 133: 255, # '\x85' + 134: 255, # '\x86' + 135: 255, # '\x87' + 136: 255, # '\x88' + 137: 255, # '\x89' + 138: 255, # '\x8a' + 139: 255, # '\x8b' + 140: 255, # '\x8c' + 141: 255, # '\x8d' + 142: 255, # '\x8e' + 143: 255, # '\x8f' + 144: 255, # '\x90' + 145: 255, # '\x91' + 146: 255, # '\x92' + 147: 255, # '\x93' + 148: 255, # '\x94' + 149: 255, # '\x95' + 150: 255, # '\x96' + 151: 255, # '\x97' + 152: 255, # '\x98' + 153: 255, # '\x99' + 154: 255, # '\x9a' + 155: 255, # '\x9b' + 156: 255, # '\x9c' + 157: 255, # '\x9d' + 158: 255, # '\x9e' + 159: 255, # '\x9f' + 160: 253, # '\xa0' + 161: 233, # '‘' + 162: 90, # '’' + 163: 253, # '£' + 164: 253, # '€' + 165: 253, # '₯' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # 'ͺ' + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # None + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 248, # 'Î…' + 182: 61, # 'Ά' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'ÎŒ' + 189: 253, # '½' + 190: 108, # 'ÎŽ' + 191: 123, # 'Î' + 192: 110, # 'Î' + 193: 31, # 'Α' + 194: 51, # 'Î’' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Î' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Î¥' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'Ï€' + 241: 8, # 'Ï' + 242: 14, # 'Ï‚' + 243: 7, # 'σ' + 244: 2, # 'Ï„' + 245: 12, # 'Ï…' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ÏŠ' + 251: 75, # 'Ï‹' + 252: 19, # 'ÏŒ' + 253: 26, # 'Ï' + 254: 27, # 'ÏŽ' + 255: 253, # None +} + +ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-7', + language='Greek', + char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎÎΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπÏςστυφχψωόÏÏŽ') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langhebrewmodel.py b/minor_project/lib/python3.6/site-packages/chardet/langhebrewmodel.py new file mode 100644 index 0000000..40fd674 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langhebrewmodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HEBREW_LANG_MODEL = { + 50: { # 'a' + 50: 0, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 0, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 1, # '×§' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 60: { # 'c' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 61: { # 'd' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 42: { # 'e' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 2, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 53: { # 'i' + 50: 1, # 'a' + 60: 2, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 0, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 56: { # 'l' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 2, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 54: { # 'n' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 49: { # 'o' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 51: { # 'r' + 50: 2, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 43: { # 's' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 2, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 44: { # 't' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 63: { # 'u' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 34: { # '\xa0' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 2, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 55: { # '´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 2, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 48: { # '¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 39: { # '½' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 57: { # '¾' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 30: { # 'Ö°' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 2, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 1, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 2, # '×¢' + 26: 0, # '×£' + 18: 2, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 59: { # 'Ö±' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 1, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 0, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 41: { # 'Ö²' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 0, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 33: { # 'Ö´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 1, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 2, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 37: { # 'Öµ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 1, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 36: { # 'Ö¶' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 1, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 2, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 31: { # 'Ö·' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 2, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 29: { # 'Ö¸' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 2, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 35: { # 'Ö¹' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 62: { # 'Ö»' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 28: { # 'Ö¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'Ö°' + 59: 0, # 'Ö±' + 41: 1, # 'Ö²' + 33: 3, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 3, # 'Ö·' + 29: 3, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 2, # '×' + 45: 1, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 1, # '×”' + 2: 2, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 1, # '×' + 6: 2, # 'מ' + 23: 1, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 38: { # '×' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 2, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 45: { # 'ׂ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 2, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 9: { # '×' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 2, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 2, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 8: { # 'ב' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 3, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 1, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 20: { # '×’' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 1, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 16: { # 'ד' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 1, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 3: { # '×”' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 1, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 3, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 0, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 2: { # 'ו' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 3, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 3, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 3, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 24: { # '×–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 1, # 'Ö²' + 33: 1, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 14: { # '×—' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 1, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 1, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 22: { # 'ט' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 1, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 3, # 'ר' + 10: 2, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 1: { # '×™' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 3, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 25: { # 'ך' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 15: { # '×›' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 3, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 2, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 4: { # 'ל' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 11: { # '×' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 6: { # 'מ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 0, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 23: { # 'ן' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 12: { # '× ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 19: { # 'ס' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 2, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 1, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 13: { # '×¢' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 1, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 1, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 2, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 26: { # '×£' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 18: { # 'פ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 2, # 'ב' + 20: 3, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 27: { # '×¥' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 21: { # 'צ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 1, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 1, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 0, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 17: { # '×§' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 2, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 7: { # 'ר' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 1, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 3, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 10: { # 'ש' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 1, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 3, # '×' + 45: 2, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 5: { # 'ת' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 1, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 32: { # '–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 52: { # '’' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 47: { # '“' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 46: { # 'â€' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 58: { # '†' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 2, # '†' + 40: 0, # '…' + }, + 40: { # '…' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 69, # 'A' + 66: 91, # 'B' + 67: 79, # 'C' + 68: 80, # 'D' + 69: 92, # 'E' + 70: 89, # 'F' + 71: 97, # 'G' + 72: 90, # 'H' + 73: 68, # 'I' + 74: 111, # 'J' + 75: 112, # 'K' + 76: 82, # 'L' + 77: 73, # 'M' + 78: 95, # 'N' + 79: 85, # 'O' + 80: 78, # 'P' + 81: 121, # 'Q' + 82: 86, # 'R' + 83: 71, # 'S' + 84: 67, # 'T' + 85: 102, # 'U' + 86: 107, # 'V' + 87: 84, # 'W' + 88: 114, # 'X' + 89: 103, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 50, # 'a' + 98: 74, # 'b' + 99: 60, # 'c' + 100: 61, # 'd' + 101: 42, # 'e' + 102: 76, # 'f' + 103: 70, # 'g' + 104: 64, # 'h' + 105: 53, # 'i' + 106: 105, # 'j' + 107: 93, # 'k' + 108: 56, # 'l' + 109: 65, # 'm' + 110: 54, # 'n' + 111: 49, # 'o' + 112: 66, # 'p' + 113: 110, # 'q' + 114: 51, # 'r' + 115: 43, # 's' + 116: 44, # 't' + 117: 63, # 'u' + 118: 81, # 'v' + 119: 77, # 'w' + 120: 98, # 'x' + 121: 75, # 'y' + 122: 108, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 124, # '€' + 129: 202, # None + 130: 203, # '‚' + 131: 204, # 'Æ’' + 132: 205, # '„' + 133: 40, # '…' + 134: 58, # '†' + 135: 206, # '‡' + 136: 207, # 'ˆ' + 137: 208, # '‰' + 138: 209, # None + 139: 210, # '‹' + 140: 211, # None + 141: 212, # None + 142: 213, # None + 143: 214, # None + 144: 215, # None + 145: 83, # '‘' + 146: 52, # '’' + 147: 47, # '“' + 148: 46, # 'â€' + 149: 72, # '•' + 150: 32, # '–' + 151: 94, # '—' + 152: 216, # 'Ëœ' + 153: 113, # 'â„¢' + 154: 217, # None + 155: 109, # '›' + 156: 218, # None + 157: 219, # None + 158: 220, # None + 159: 221, # None + 160: 34, # '\xa0' + 161: 116, # '¡' + 162: 222, # '¢' + 163: 118, # '£' + 164: 100, # '₪' + 165: 223, # 'Â¥' + 166: 224, # '¦' + 167: 117, # '§' + 168: 119, # '¨' + 169: 104, # '©' + 170: 125, # '×' + 171: 225, # '«' + 172: 226, # '¬' + 173: 87, # '\xad' + 174: 99, # '®' + 175: 227, # '¯' + 176: 106, # '°' + 177: 122, # '±' + 178: 123, # '²' + 179: 228, # '³' + 180: 55, # '´' + 181: 229, # 'µ' + 182: 230, # '¶' + 183: 101, # '·' + 184: 231, # '¸' + 185: 232, # '¹' + 186: 120, # '÷' + 187: 233, # '»' + 188: 48, # '¼' + 189: 39, # '½' + 190: 57, # '¾' + 191: 234, # '¿' + 192: 30, # 'Ö°' + 193: 59, # 'Ö±' + 194: 41, # 'Ö²' + 195: 88, # 'Ö³' + 196: 33, # 'Ö´' + 197: 37, # 'Öµ' + 198: 36, # 'Ö¶' + 199: 31, # 'Ö·' + 200: 29, # 'Ö¸' + 201: 35, # 'Ö¹' + 202: 235, # None + 203: 62, # 'Ö»' + 204: 28, # 'Ö¼' + 205: 236, # 'Ö½' + 206: 126, # 'Ö¾' + 207: 237, # 'Ö¿' + 208: 238, # '×€' + 209: 38, # '×' + 210: 45, # 'ׂ' + 211: 239, # '׃' + 212: 240, # '×°' + 213: 241, # '×±' + 214: 242, # 'ײ' + 215: 243, # '׳' + 216: 127, # '×´' + 217: 244, # None + 218: 245, # None + 219: 246, # None + 220: 247, # None + 221: 248, # None + 222: 249, # None + 223: 250, # None + 224: 9, # '×' + 225: 8, # 'ב' + 226: 20, # '×’' + 227: 16, # 'ד' + 228: 3, # '×”' + 229: 2, # 'ו' + 230: 24, # '×–' + 231: 14, # '×—' + 232: 22, # 'ט' + 233: 1, # '×™' + 234: 25, # 'ך' + 235: 15, # '×›' + 236: 4, # 'ל' + 237: 11, # '×' + 238: 6, # 'מ' + 239: 23, # 'ן' + 240: 12, # '× ' + 241: 19, # 'ס' + 242: 13, # '×¢' + 243: 26, # '×£' + 244: 18, # 'פ' + 245: 27, # '×¥' + 246: 21, # 'צ' + 247: 17, # '×§' + 248: 7, # 'ר' + 249: 10, # 'ש' + 250: 5, # 'ת' + 251: 251, # None + 252: 252, # None + 253: 128, # '\u200e' + 254: 96, # '\u200f' + 255: 253, # None +} + +WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(charset_name='windows-1255', + language='Hebrew', + char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, + language_model=HEBREW_LANG_MODEL, + typical_positive_ratio=0.984004, + keep_ascii_letters=False, + alphabet='×בגדהוזחטיךכל×מןנסעףפץצקרשתװױײ') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langhungarianmodel.py b/minor_project/lib/python3.6/site-packages/chardet/langhungarianmodel.py new file mode 100644 index 0000000..24a097f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langhungarianmodel.py @@ -0,0 +1,4650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HUNGARIAN_LANG_MODEL = { + 28: { # 'A' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 2, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Ã' + 44: 0, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 40: { # 'B' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 3, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 54: { # 'C' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 3, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 45: { # 'D' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 32: { # 'E' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 50: { # 'F' + 28: 1, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 0, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 49: { # 'G' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 38: { # 'H' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 2, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 39: { # 'I' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 53: { # 'J' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 36: { # 'K' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 2, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 41: { # 'L' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 34: { # 'M' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 1, # 'ű' + }, + 35: { # 'N' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 47: { # 'O' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 46: { # 'P' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 43: { # 'R' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 2, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 2, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 33: { # 'S' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 3, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 37: { # 'T' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 2, # 'Ã' + 44: 2, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 57: { # 'U' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 48: { # 'V' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 2, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 55: { # 'Y' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 52: { # 'Z' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 2: { # 'a' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 18: { # 'b' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 26: { # 'c' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 17: { # 'd' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 1: { # 'e' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 2, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 27: { # 'f' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 3, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 12: { # 'g' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 20: { # 'h' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 9: { # 'i' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 1, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 1, # 'ű' + }, + 22: { # 'j' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 7: { # 'k' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 3, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 6: { # 'l' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 3, # 'Å‘' + 56: 1, # 'ű' + }, + 13: { # 'm' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 2, # 'ű' + }, + 4: { # 'n' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 1, # 'x' + 16: 3, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 8: { # 'o' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 23: { # 'p' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 10: { # 'r' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 2, # 'ű' + }, + 5: { # 's' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 3: { # 't' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 3, # 'Å‘' + 56: 2, # 'ű' + }, + 21: { # 'u' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 19: { # 'v' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 62: { # 'x' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 16: { # 'y' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 2, # 'ű' + }, + 11: { # 'z' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 51: { # 'Ã' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 44: { # 'É' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 61: { # 'Ã' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 58: { # 'Ó' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 2, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 59: { # 'Ö' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 60: { # 'Ú' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 2, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 63: { # 'Ü' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 14: { # 'á' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 15: { # 'é' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 30: { # 'í' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 25: { # 'ó' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 24: { # 'ö' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 31: { # 'ú' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 3, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 29: { # 'ü' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 42: { # 'Å‘' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 56: { # 'ű' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 72, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 161, # '€' + 129: 162, # None + 130: 163, # '‚' + 131: 164, # None + 132: 165, # '„' + 133: 166, # '…' + 134: 167, # '†' + 135: 168, # '‡' + 136: 169, # None + 137: 170, # '‰' + 138: 171, # 'Å ' + 139: 172, # '‹' + 140: 173, # 'Åš' + 141: 174, # 'Ť' + 142: 175, # 'Ž' + 143: 176, # 'Ź' + 144: 177, # None + 145: 178, # '‘' + 146: 179, # '’' + 147: 180, # '“' + 148: 78, # 'â€' + 149: 181, # '•' + 150: 69, # '–' + 151: 182, # '—' + 152: 183, # None + 153: 184, # 'â„¢' + 154: 185, # 'Å¡' + 155: 186, # '›' + 156: 187, # 'Å›' + 157: 188, # 'Å¥' + 158: 189, # 'ž' + 159: 190, # 'ź' + 160: 191, # '\xa0' + 161: 192, # 'ˇ' + 162: 193, # '˘' + 163: 194, # 'Å' + 164: 195, # '¤' + 165: 196, # 'Ä„' + 166: 197, # '¦' + 167: 76, # '§' + 168: 198, # '¨' + 169: 199, # '©' + 170: 200, # 'Åž' + 171: 201, # '«' + 172: 202, # '¬' + 173: 203, # '\xad' + 174: 204, # '®' + 175: 205, # 'Å»' + 176: 81, # '°' + 177: 206, # '±' + 178: 207, # 'Ë›' + 179: 208, # 'Å‚' + 180: 209, # '´' + 181: 210, # 'µ' + 182: 211, # '¶' + 183: 212, # '·' + 184: 213, # '¸' + 185: 214, # 'Ä…' + 186: 215, # 'ÅŸ' + 187: 216, # '»' + 188: 217, # 'Ľ' + 189: 218, # 'Ë' + 190: 219, # 'ľ' + 191: 220, # 'ż' + 192: 221, # 'Å”' + 193: 51, # 'Ã' + 194: 83, # 'Â' + 195: 222, # 'Ä‚' + 196: 80, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'ÄŒ' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Äš' + 205: 61, # 'Ã' + 206: 230, # 'ÃŽ' + 207: 231, # 'ÄŽ' + 208: 232, # 'Ä' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Å' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Å®' + 218: 60, # 'Ú' + 219: 70, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ã' + 222: 240, # 'Å¢' + 223: 241, # 'ß' + 224: 84, # 'Å•' + 225: 14, # 'á' + 226: 75, # 'â' + 227: 242, # 'ă' + 228: 71, # 'ä' + 229: 82, # 'ĺ' + 230: 243, # 'ć' + 231: 73, # 'ç' + 232: 244, # 'Ä' + 233: 15, # 'é' + 234: 85, # 'Ä™' + 235: 79, # 'ë' + 236: 86, # 'Ä›' + 237: 30, # 'í' + 238: 77, # 'î' + 239: 87, # 'Ä' + 240: 245, # 'Ä‘' + 241: 246, # 'Å„' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 74, # 'ô' + 245: 42, # 'Å‘' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'Å™' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'Å£' + 255: 253, # 'Ë™' +} + +WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1250', + language='Hungarian', + char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÃÉÃÓÖÚÜáéíóöúüÅőŰű') + +ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 71, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 159, # '\x80' + 129: 160, # '\x81' + 130: 161, # '\x82' + 131: 162, # '\x83' + 132: 163, # '\x84' + 133: 164, # '\x85' + 134: 165, # '\x86' + 135: 166, # '\x87' + 136: 167, # '\x88' + 137: 168, # '\x89' + 138: 169, # '\x8a' + 139: 170, # '\x8b' + 140: 171, # '\x8c' + 141: 172, # '\x8d' + 142: 173, # '\x8e' + 143: 174, # '\x8f' + 144: 175, # '\x90' + 145: 176, # '\x91' + 146: 177, # '\x92' + 147: 178, # '\x93' + 148: 179, # '\x94' + 149: 180, # '\x95' + 150: 181, # '\x96' + 151: 182, # '\x97' + 152: 183, # '\x98' + 153: 184, # '\x99' + 154: 185, # '\x9a' + 155: 186, # '\x9b' + 156: 187, # '\x9c' + 157: 188, # '\x9d' + 158: 189, # '\x9e' + 159: 190, # '\x9f' + 160: 191, # '\xa0' + 161: 192, # 'Ä„' + 162: 193, # '˘' + 163: 194, # 'Å' + 164: 195, # '¤' + 165: 196, # 'Ľ' + 166: 197, # 'Åš' + 167: 75, # '§' + 168: 198, # '¨' + 169: 199, # 'Å ' + 170: 200, # 'Åž' + 171: 201, # 'Ť' + 172: 202, # 'Ź' + 173: 203, # '\xad' + 174: 204, # 'Ž' + 175: 205, # 'Å»' + 176: 79, # '°' + 177: 206, # 'Ä…' + 178: 207, # 'Ë›' + 179: 208, # 'Å‚' + 180: 209, # '´' + 181: 210, # 'ľ' + 182: 211, # 'Å›' + 183: 212, # 'ˇ' + 184: 213, # '¸' + 185: 214, # 'Å¡' + 186: 215, # 'ÅŸ' + 187: 216, # 'Å¥' + 188: 217, # 'ź' + 189: 218, # 'Ë' + 190: 219, # 'ž' + 191: 220, # 'ż' + 192: 221, # 'Å”' + 193: 51, # 'Ã' + 194: 81, # 'Â' + 195: 222, # 'Ä‚' + 196: 78, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'ÄŒ' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Äš' + 205: 61, # 'Ã' + 206: 230, # 'ÃŽ' + 207: 231, # 'ÄŽ' + 208: 232, # 'Ä' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Å' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Å®' + 218: 60, # 'Ú' + 219: 69, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ã' + 222: 240, # 'Å¢' + 223: 241, # 'ß' + 224: 82, # 'Å•' + 225: 14, # 'á' + 226: 74, # 'â' + 227: 242, # 'ă' + 228: 70, # 'ä' + 229: 80, # 'ĺ' + 230: 243, # 'ć' + 231: 72, # 'ç' + 232: 244, # 'Ä' + 233: 15, # 'é' + 234: 83, # 'Ä™' + 235: 77, # 'ë' + 236: 84, # 'Ä›' + 237: 30, # 'í' + 238: 76, # 'î' + 239: 85, # 'Ä' + 240: 245, # 'Ä‘' + 241: 246, # 'Å„' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 73, # 'ô' + 245: 42, # 'Å‘' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'Å™' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'Å£' + 255: 253, # 'Ë™' +} + +ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-2', + language='Hungarian', + char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÃÉÃÓÖÚÜáéíóöúüÅőŰű') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langrussianmodel.py b/minor_project/lib/python3.6/site-packages/chardet/langrussianmodel.py new file mode 100644 index 0000000..569689d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langrussianmodel.py @@ -0,0 +1,5718 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +RUSSIAN_LANG_MODEL = { + 37: { # 'Ð' + 37: 0, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 44: { # 'Б' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 2, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 33: { # 'Ð’' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 46: { # 'Г' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 41: { # 'Д' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 3, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 48: { # 'Е' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 56: { # 'Ж' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 51: { # 'З' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 42: { # 'И' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 60: { # 'Й' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 36: { # 'К' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 2, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 49: { # 'Л' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 0, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 38: { # 'М' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 31: { # 'Ð' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 34: { # 'О' + 37: 0, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 2, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 2, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 35: { # 'П' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 2, # 'у' + 39: 1, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 45: { # 'Р' + 37: 2, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 2, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 32: { # 'С' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 2, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 40: { # 'Т' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 52: { # 'У' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 0, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 53: { # 'Ф' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 55: { # 'Ð¥' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 58: { # 'Ц' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 50: { # 'Ч' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 57: { # 'Ш' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 63: { # 'Щ' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 62: { # 'Ы' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 61: { # 'Ь' + 37: 0, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 47: { # 'Э' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 59: { # 'Ю' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 43: { # 'Я' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 3: { # 'а' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 21: { # 'б' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 10: { # 'в' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 19: { # 'г' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 13: { # 'д' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 3, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 2: { # 'е' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 24: { # 'ж' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 20: { # 'з' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 4: { # 'и' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 23: { # 'й' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 11: { # 'к' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 8: { # 'л' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 1, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 1, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 12: { # 'м' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 5: { # 'н' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 1, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 1: { # 'о' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 15: { # 'п' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 9: { # 'Ñ€' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 7: { # 'Ñ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 6: { # 'Ñ‚' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 14: { # 'у' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 39: { # 'Ñ„' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 2, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 26: { # 'Ñ…' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 28: { # 'ц' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 1, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 22: { # 'ч' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 25: { # 'ш' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 29: { # 'щ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 54: { # 'ÑŠ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 18: { # 'Ñ‹' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 1, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 17: { # 'ÑŒ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 0, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 0, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 30: { # 'Ñ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 27: { # 'ÑŽ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 0, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 16: { # 'Ñ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 2, # 'Ñ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +IBM866_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'Ð' + 129: 44, # 'Б' + 130: 33, # 'Ð’' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Ð' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Ð¥' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 3, # 'а' + 161: 21, # 'б' + 162: 10, # 'в' + 163: 19, # 'г' + 164: 13, # 'д' + 165: 2, # 'е' + 166: 24, # 'ж' + 167: 20, # 'з' + 168: 4, # 'и' + 169: 23, # 'й' + 170: 11, # 'к' + 171: 8, # 'л' + 172: 12, # 'м' + 173: 5, # 'н' + 174: 1, # 'о' + 175: 15, # 'п' + 176: 191, # 'â–‘' + 177: 192, # 'â–’' + 178: 193, # 'â–“' + 179: 194, # '│' + 180: 195, # '┤' + 181: 196, # 'â•¡' + 182: 197, # 'â•¢' + 183: 198, # 'â•–' + 184: 199, # 'â••' + 185: 200, # 'â•£' + 186: 201, # 'â•‘' + 187: 202, # 'â•—' + 188: 203, # 'â•' + 189: 204, # '╜' + 190: 205, # 'â•›' + 191: 206, # 'â”' + 192: 207, # 'â””' + 193: 208, # 'â”´' + 194: 209, # '┬' + 195: 210, # '├' + 196: 211, # '─' + 197: 212, # '┼' + 198: 213, # '╞' + 199: 214, # '╟' + 200: 215, # '╚' + 201: 216, # 'â•”' + 202: 217, # 'â•©' + 203: 218, # '╦' + 204: 219, # 'â• ' + 205: 220, # 'â•' + 206: 221, # '╬' + 207: 222, # 'â•§' + 208: 223, # '╨' + 209: 224, # '╤' + 210: 225, # 'â•¥' + 211: 226, # 'â•™' + 212: 227, # '╘' + 213: 228, # 'â•’' + 214: 229, # 'â•“' + 215: 230, # 'â•«' + 216: 231, # '╪' + 217: 232, # '┘' + 218: 233, # '┌' + 219: 234, # 'â–ˆ' + 220: 235, # 'â–„' + 221: 236, # 'â–Œ' + 222: 237, # 'â–' + 223: 238, # 'â–€' + 224: 9, # 'Ñ€' + 225: 7, # 'Ñ' + 226: 6, # 'Ñ‚' + 227: 14, # 'у' + 228: 39, # 'Ñ„' + 229: 26, # 'Ñ…' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ÑŠ' + 235: 18, # 'Ñ‹' + 236: 17, # 'ÑŒ' + 237: 30, # 'Ñ' + 238: 27, # 'ÑŽ' + 239: 16, # 'Ñ' + 240: 239, # 'Ð' + 241: 68, # 'Ñ‘' + 242: 240, # 'Є' + 243: 241, # 'Ñ”' + 244: 242, # 'Ї' + 245: 243, # 'Ñ—' + 246: 244, # 'ÐŽ' + 247: 245, # 'Ñž' + 248: 246, # '°' + 249: 247, # '∙' + 250: 248, # '·' + 251: 249, # '√' + 252: 250, # 'â„–' + 253: 251, # '¤' + 254: 252, # 'â– ' + 255: 255, # '\xa0' +} + +IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM866', + language='Russian', + char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ђ' + 129: 192, # 'Ѓ' + 130: 193, # '‚' + 131: 194, # 'Ñ“' + 132: 195, # '„' + 133: 196, # '…' + 134: 197, # '†' + 135: 198, # '‡' + 136: 199, # '€' + 137: 200, # '‰' + 138: 201, # 'Љ' + 139: 202, # '‹' + 140: 203, # 'Њ' + 141: 204, # 'ÐŒ' + 142: 205, # 'Ћ' + 143: 206, # 'Ð' + 144: 207, # 'Ñ’' + 145: 208, # '‘' + 146: 209, # '’' + 147: 210, # '“' + 148: 211, # 'â€' + 149: 212, # '•' + 150: 213, # '–' + 151: 214, # '—' + 152: 215, # None + 153: 216, # 'â„¢' + 154: 217, # 'Ñ™' + 155: 218, # '›' + 156: 219, # 'Ñš' + 157: 220, # 'Ñœ' + 158: 221, # 'Ñ›' + 159: 222, # 'ÑŸ' + 160: 223, # '\xa0' + 161: 224, # 'ÐŽ' + 162: 225, # 'Ñž' + 163: 226, # 'Ј' + 164: 227, # '¤' + 165: 228, # 'Ò' + 166: 229, # '¦' + 167: 230, # '§' + 168: 231, # 'Ð' + 169: 232, # '©' + 170: 233, # 'Є' + 171: 234, # '«' + 172: 235, # '¬' + 173: 236, # '\xad' + 174: 237, # '®' + 175: 238, # 'Ї' + 176: 239, # '°' + 177: 240, # '±' + 178: 241, # 'І' + 179: 242, # 'Ñ–' + 180: 243, # 'Ò‘' + 181: 244, # 'µ' + 182: 245, # '¶' + 183: 246, # '·' + 184: 68, # 'Ñ‘' + 185: 247, # 'â„–' + 186: 248, # 'Ñ”' + 187: 249, # '»' + 188: 250, # 'ј' + 189: 251, # 'Ð…' + 190: 252, # 'Ñ•' + 191: 253, # 'Ñ—' + 192: 37, # 'Ð' + 193: 44, # 'Б' + 194: 33, # 'Ð’' + 195: 46, # 'Г' + 196: 41, # 'Д' + 197: 48, # 'Е' + 198: 56, # 'Ж' + 199: 51, # 'З' + 200: 42, # 'И' + 201: 60, # 'Й' + 202: 36, # 'К' + 203: 49, # 'Л' + 204: 38, # 'М' + 205: 31, # 'Ð' + 206: 34, # 'О' + 207: 35, # 'П' + 208: 45, # 'Р' + 209: 32, # 'С' + 210: 40, # 'Т' + 211: 52, # 'У' + 212: 53, # 'Ф' + 213: 55, # 'Ð¥' + 214: 58, # 'Ц' + 215: 50, # 'Ч' + 216: 57, # 'Ш' + 217: 63, # 'Щ' + 218: 70, # 'Ъ' + 219: 62, # 'Ы' + 220: 61, # 'Ь' + 221: 47, # 'Э' + 222: 59, # 'Ю' + 223: 43, # 'Я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'Ñ€' + 241: 7, # 'Ñ' + 242: 6, # 'Ñ‚' + 243: 14, # 'у' + 244: 39, # 'Ñ„' + 245: 26, # 'Ñ…' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ÑŠ' + 251: 18, # 'Ñ‹' + 252: 17, # 'ÑŒ' + 253: 30, # 'Ñ' + 254: 27, # 'ÑŽ' + 255: 16, # 'Ñ' +} + +WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Russian', + char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +IBM855_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ñ’' + 129: 192, # 'Ђ' + 130: 193, # 'Ñ“' + 131: 194, # 'Ѓ' + 132: 68, # 'Ñ‘' + 133: 195, # 'Ð' + 134: 196, # 'Ñ”' + 135: 197, # 'Є' + 136: 198, # 'Ñ•' + 137: 199, # 'Ð…' + 138: 200, # 'Ñ–' + 139: 201, # 'І' + 140: 202, # 'Ñ—' + 141: 203, # 'Ї' + 142: 204, # 'ј' + 143: 205, # 'Ј' + 144: 206, # 'Ñ™' + 145: 207, # 'Љ' + 146: 208, # 'Ñš' + 147: 209, # 'Њ' + 148: 210, # 'Ñ›' + 149: 211, # 'Ћ' + 150: 212, # 'Ñœ' + 151: 213, # 'ÐŒ' + 152: 214, # 'Ñž' + 153: 215, # 'ÐŽ' + 154: 216, # 'ÑŸ' + 155: 217, # 'Ð' + 156: 27, # 'ÑŽ' + 157: 59, # 'Ю' + 158: 54, # 'ÑŠ' + 159: 70, # 'Ъ' + 160: 3, # 'а' + 161: 37, # 'Ð' + 162: 21, # 'б' + 163: 44, # 'Б' + 164: 28, # 'ц' + 165: 58, # 'Ц' + 166: 13, # 'д' + 167: 41, # 'Д' + 168: 2, # 'е' + 169: 48, # 'Е' + 170: 39, # 'Ñ„' + 171: 53, # 'Ф' + 172: 19, # 'г' + 173: 46, # 'Г' + 174: 218, # '«' + 175: 219, # '»' + 176: 220, # 'â–‘' + 177: 221, # 'â–’' + 178: 222, # 'â–“' + 179: 223, # '│' + 180: 224, # '┤' + 181: 26, # 'Ñ…' + 182: 55, # 'Ð¥' + 183: 4, # 'и' + 184: 42, # 'И' + 185: 225, # 'â•£' + 186: 226, # 'â•‘' + 187: 227, # 'â•—' + 188: 228, # 'â•' + 189: 23, # 'й' + 190: 60, # 'Й' + 191: 229, # 'â”' + 192: 230, # 'â””' + 193: 231, # 'â”´' + 194: 232, # '┬' + 195: 233, # '├' + 196: 234, # '─' + 197: 235, # '┼' + 198: 11, # 'к' + 199: 36, # 'К' + 200: 236, # '╚' + 201: 237, # 'â•”' + 202: 238, # 'â•©' + 203: 239, # '╦' + 204: 240, # 'â• ' + 205: 241, # 'â•' + 206: 242, # '╬' + 207: 243, # '¤' + 208: 8, # 'л' + 209: 49, # 'Л' + 210: 12, # 'м' + 211: 38, # 'М' + 212: 5, # 'н' + 213: 31, # 'Ð' + 214: 1, # 'о' + 215: 34, # 'О' + 216: 15, # 'п' + 217: 244, # '┘' + 218: 245, # '┌' + 219: 246, # 'â–ˆ' + 220: 247, # 'â–„' + 221: 35, # 'П' + 222: 16, # 'Ñ' + 223: 248, # 'â–€' + 224: 43, # 'Я' + 225: 9, # 'Ñ€' + 226: 45, # 'Р' + 227: 7, # 'Ñ' + 228: 32, # 'С' + 229: 6, # 'Ñ‚' + 230: 40, # 'Т' + 231: 14, # 'у' + 232: 52, # 'У' + 233: 24, # 'ж' + 234: 56, # 'Ж' + 235: 10, # 'в' + 236: 33, # 'Ð’' + 237: 17, # 'ÑŒ' + 238: 61, # 'Ь' + 239: 249, # 'â„–' + 240: 250, # '\xad' + 241: 18, # 'Ñ‹' + 242: 62, # 'Ы' + 243: 20, # 'з' + 244: 51, # 'З' + 245: 25, # 'ш' + 246: 57, # 'Ш' + 247: 30, # 'Ñ' + 248: 47, # 'Э' + 249: 29, # 'щ' + 250: 63, # 'Щ' + 251: 22, # 'ч' + 252: 50, # 'Ч' + 253: 251, # '§' + 254: 252, # 'â– ' + 255: 255, # '\xa0' +} + +IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM855', + language='Russian', + char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +KOI8_R_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '─' + 129: 192, # '│' + 130: 193, # '┌' + 131: 194, # 'â”' + 132: 195, # 'â””' + 133: 196, # '┘' + 134: 197, # '├' + 135: 198, # '┤' + 136: 199, # '┬' + 137: 200, # 'â”´' + 138: 201, # '┼' + 139: 202, # 'â–€' + 140: 203, # 'â–„' + 141: 204, # 'â–ˆ' + 142: 205, # 'â–Œ' + 143: 206, # 'â–' + 144: 207, # 'â–‘' + 145: 208, # 'â–’' + 146: 209, # 'â–“' + 147: 210, # '⌠' + 148: 211, # 'â– ' + 149: 212, # '∙' + 150: 213, # '√' + 151: 214, # '≈' + 152: 215, # '≤' + 153: 216, # '≥' + 154: 217, # '\xa0' + 155: 218, # '⌡' + 156: 219, # '°' + 157: 220, # '²' + 158: 221, # '·' + 159: 222, # '÷' + 160: 223, # 'â•' + 161: 224, # 'â•‘' + 162: 225, # 'â•’' + 163: 68, # 'Ñ‘' + 164: 226, # 'â•“' + 165: 227, # 'â•”' + 166: 228, # 'â••' + 167: 229, # 'â•–' + 168: 230, # 'â•—' + 169: 231, # '╘' + 170: 232, # 'â•™' + 171: 233, # '╚' + 172: 234, # 'â•›' + 173: 235, # '╜' + 174: 236, # 'â•' + 175: 237, # '╞' + 176: 238, # '╟' + 177: 239, # 'â• ' + 178: 240, # 'â•¡' + 179: 241, # 'Ð' + 180: 242, # 'â•¢' + 181: 243, # 'â•£' + 182: 244, # '╤' + 183: 245, # 'â•¥' + 184: 246, # '╦' + 185: 247, # 'â•§' + 186: 248, # '╨' + 187: 249, # 'â•©' + 188: 250, # '╪' + 189: 251, # 'â•«' + 190: 252, # '╬' + 191: 253, # '©' + 192: 27, # 'ÑŽ' + 193: 3, # 'а' + 194: 21, # 'б' + 195: 28, # 'ц' + 196: 13, # 'д' + 197: 2, # 'е' + 198: 39, # 'Ñ„' + 199: 19, # 'г' + 200: 26, # 'Ñ…' + 201: 4, # 'и' + 202: 23, # 'й' + 203: 11, # 'к' + 204: 8, # 'л' + 205: 12, # 'м' + 206: 5, # 'н' + 207: 1, # 'о' + 208: 15, # 'п' + 209: 16, # 'Ñ' + 210: 9, # 'Ñ€' + 211: 7, # 'Ñ' + 212: 6, # 'Ñ‚' + 213: 14, # 'у' + 214: 24, # 'ж' + 215: 10, # 'в' + 216: 17, # 'ÑŒ' + 217: 18, # 'Ñ‹' + 218: 20, # 'з' + 219: 25, # 'ш' + 220: 30, # 'Ñ' + 221: 29, # 'щ' + 222: 22, # 'ч' + 223: 54, # 'ÑŠ' + 224: 59, # 'Ю' + 225: 37, # 'Ð' + 226: 44, # 'Б' + 227: 58, # 'Ц' + 228: 41, # 'Д' + 229: 48, # 'Е' + 230: 53, # 'Ф' + 231: 46, # 'Г' + 232: 55, # 'Ð¥' + 233: 42, # 'И' + 234: 60, # 'Й' + 235: 36, # 'К' + 236: 49, # 'Л' + 237: 38, # 'М' + 238: 31, # 'Ð' + 239: 34, # 'О' + 240: 35, # 'П' + 241: 43, # 'Я' + 242: 45, # 'Р' + 243: 32, # 'С' + 244: 40, # 'Т' + 245: 52, # 'У' + 246: 56, # 'Ж' + 247: 33, # 'Ð’' + 248: 61, # 'Ь' + 249: 62, # 'Ы' + 250: 51, # 'З' + 251: 57, # 'Ш' + 252: 47, # 'Э' + 253: 63, # 'Щ' + 254: 50, # 'Ч' + 255: 70, # 'Ъ' +} + +KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='KOI8-R', + language='Russian', + char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'Ð' + 129: 44, # 'Б' + 130: 33, # 'Ð’' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Ð' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Ð¥' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 191, # '†' + 161: 192, # '°' + 162: 193, # 'Ò' + 163: 194, # '£' + 164: 195, # '§' + 165: 196, # '•' + 166: 197, # '¶' + 167: 198, # 'І' + 168: 199, # '®' + 169: 200, # '©' + 170: 201, # 'â„¢' + 171: 202, # 'Ђ' + 172: 203, # 'Ñ’' + 173: 204, # '≠' + 174: 205, # 'Ѓ' + 175: 206, # 'Ñ“' + 176: 207, # '∞' + 177: 208, # '±' + 178: 209, # '≤' + 179: 210, # '≥' + 180: 211, # 'Ñ–' + 181: 212, # 'µ' + 182: 213, # 'Ò‘' + 183: 214, # 'Ј' + 184: 215, # 'Є' + 185: 216, # 'Ñ”' + 186: 217, # 'Ї' + 187: 218, # 'Ñ—' + 188: 219, # 'Љ' + 189: 220, # 'Ñ™' + 190: 221, # 'Њ' + 191: 222, # 'Ñš' + 192: 223, # 'ј' + 193: 224, # 'Ð…' + 194: 225, # '¬' + 195: 226, # '√' + 196: 227, # 'Æ’' + 197: 228, # '≈' + 198: 229, # '∆' + 199: 230, # '«' + 200: 231, # '»' + 201: 232, # '…' + 202: 233, # '\xa0' + 203: 234, # 'Ћ' + 204: 235, # 'Ñ›' + 205: 236, # 'ÐŒ' + 206: 237, # 'Ñœ' + 207: 238, # 'Ñ•' + 208: 239, # '–' + 209: 240, # '—' + 210: 241, # '“' + 211: 242, # 'â€' + 212: 243, # '‘' + 213: 244, # '’' + 214: 245, # '÷' + 215: 246, # '„' + 216: 247, # 'ÐŽ' + 217: 248, # 'Ñž' + 218: 249, # 'Ð' + 219: 250, # 'ÑŸ' + 220: 251, # 'â„–' + 221: 252, # 'Ð' + 222: 68, # 'Ñ‘' + 223: 16, # 'Ñ' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'Ñ€' + 241: 7, # 'Ñ' + 242: 6, # 'Ñ‚' + 243: 14, # 'у' + 244: 39, # 'Ñ„' + 245: 26, # 'Ñ…' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ÑŠ' + 251: 18, # 'Ñ‹' + 252: 17, # 'ÑŒ' + 253: 30, # 'Ñ' + 254: 27, # 'ÑŽ' + 255: 255, # '€' +} + +MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='MacCyrillic', + language='Russian', + char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '\x80' + 129: 192, # '\x81' + 130: 193, # '\x82' + 131: 194, # '\x83' + 132: 195, # '\x84' + 133: 196, # '\x85' + 134: 197, # '\x86' + 135: 198, # '\x87' + 136: 199, # '\x88' + 137: 200, # '\x89' + 138: 201, # '\x8a' + 139: 202, # '\x8b' + 140: 203, # '\x8c' + 141: 204, # '\x8d' + 142: 205, # '\x8e' + 143: 206, # '\x8f' + 144: 207, # '\x90' + 145: 208, # '\x91' + 146: 209, # '\x92' + 147: 210, # '\x93' + 148: 211, # '\x94' + 149: 212, # '\x95' + 150: 213, # '\x96' + 151: 214, # '\x97' + 152: 215, # '\x98' + 153: 216, # '\x99' + 154: 217, # '\x9a' + 155: 218, # '\x9b' + 156: 219, # '\x9c' + 157: 220, # '\x9d' + 158: 221, # '\x9e' + 159: 222, # '\x9f' + 160: 223, # '\xa0' + 161: 224, # 'Ð' + 162: 225, # 'Ђ' + 163: 226, # 'Ѓ' + 164: 227, # 'Є' + 165: 228, # 'Ð…' + 166: 229, # 'І' + 167: 230, # 'Ї' + 168: 231, # 'Ј' + 169: 232, # 'Љ' + 170: 233, # 'Њ' + 171: 234, # 'Ћ' + 172: 235, # 'ÐŒ' + 173: 236, # '\xad' + 174: 237, # 'ÐŽ' + 175: 238, # 'Ð' + 176: 37, # 'Ð' + 177: 44, # 'Б' + 178: 33, # 'Ð’' + 179: 46, # 'Г' + 180: 41, # 'Д' + 181: 48, # 'Е' + 182: 56, # 'Ж' + 183: 51, # 'З' + 184: 42, # 'И' + 185: 60, # 'Й' + 186: 36, # 'К' + 187: 49, # 'Л' + 188: 38, # 'М' + 189: 31, # 'Ð' + 190: 34, # 'О' + 191: 35, # 'П' + 192: 45, # 'Р' + 193: 32, # 'С' + 194: 40, # 'Т' + 195: 52, # 'У' + 196: 53, # 'Ф' + 197: 55, # 'Ð¥' + 198: 58, # 'Ц' + 199: 50, # 'Ч' + 200: 57, # 'Ш' + 201: 63, # 'Щ' + 202: 70, # 'Ъ' + 203: 62, # 'Ы' + 204: 61, # 'Ь' + 205: 47, # 'Э' + 206: 59, # 'Ю' + 207: 43, # 'Я' + 208: 3, # 'а' + 209: 21, # 'б' + 210: 10, # 'в' + 211: 19, # 'г' + 212: 13, # 'д' + 213: 2, # 'е' + 214: 24, # 'ж' + 215: 20, # 'з' + 216: 4, # 'и' + 217: 23, # 'й' + 218: 11, # 'к' + 219: 8, # 'л' + 220: 12, # 'м' + 221: 5, # 'н' + 222: 1, # 'о' + 223: 15, # 'п' + 224: 9, # 'Ñ€' + 225: 7, # 'Ñ' + 226: 6, # 'Ñ‚' + 227: 14, # 'у' + 228: 39, # 'Ñ„' + 229: 26, # 'Ñ…' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ÑŠ' + 235: 18, # 'Ñ‹' + 236: 17, # 'ÑŒ' + 237: 30, # 'Ñ' + 238: 27, # 'ÑŽ' + 239: 16, # 'Ñ' + 240: 239, # 'â„–' + 241: 68, # 'Ñ‘' + 242: 240, # 'Ñ’' + 243: 241, # 'Ñ“' + 244: 242, # 'Ñ”' + 245: 243, # 'Ñ•' + 246: 244, # 'Ñ–' + 247: 245, # 'Ñ—' + 248: 246, # 'ј' + 249: 247, # 'Ñ™' + 250: 248, # 'Ñš' + 251: 249, # 'Ñ›' + 252: 250, # 'Ñœ' + 253: 251, # '§' + 254: 252, # 'Ñž' + 255: 255, # 'ÑŸ' +} + +ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Russian', + char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langthaimodel.py b/minor_project/lib/python3.6/site-packages/chardet/langthaimodel.py new file mode 100644 index 0000000..d0191f2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langthaimodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +THAI_LANG_MODEL = { + 5: { # 'à¸' + 5: 2, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 3, # 'ฎ' + 57: 2, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 1, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 30: { # 'ข' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 2, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 24: { # 'ค' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 3, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 8: { # 'ง' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 1, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 2, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 3, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 26: { # 'จ' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 52: { # 'ฉ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 34: { # 'ช' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 1, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 51: { # 'ซ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 47: { # 'à¸' + 5: 1, # 'à¸' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 58: { # 'ฎ' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 57: { # 'à¸' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 49: { # 'à¸' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 53: { # 'ฑ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 55: { # 'ฒ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 43: { # 'ณ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 3, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 3, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 20: { # 'ด' + 5: 2, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 2, # '็' + 6: 1, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 19: { # 'ต' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 2, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 44: { # 'ถ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 14: { # 'ท' + 5: 1, # 'à¸' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 3, # 'ศ' + 46: 1, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 48: { # 'ธ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 2, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 3: { # 'น' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 1, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 3, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 3, # 'โ' + 29: 3, # 'ใ' + 33: 3, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 17: { # 'บ' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 25: { # 'ป' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 1, # 'ฎ' + 57: 3, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 1, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 2, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 39: { # 'ผ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 0, # 'ุ' + 35: 3, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 62: { # 'à¸' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 2, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 31: { # 'พ' + 5: 1, # 'à¸' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 3, # 'ื' + 32: 1, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 0, # '่' + 7: 1, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 54: { # 'ฟ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 45: { # 'ภ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 9: { # 'ม' + 5: 2, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 2, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 16: { # 'ย' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 2: { # 'ร' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 3, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 3, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 3, # 'เ' + 28: 3, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 61: { # 'ฤ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 2, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 15: { # 'ล' + 5: 2, # 'à¸' + 30: 3, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 2, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 12: { # 'ว' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 42: { # 'ศ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 3, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 46: { # 'ษ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 2, # 'ฎ' + 57: 1, # 'à¸' + 49: 2, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 18: { # 'ส' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 3, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 0, # 'à¹' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 21: { # 'ห' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 4: { # 'อ' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 63: { # 'ฯ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 22: { # 'ะ' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 10: { # 'ั' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 2, # 'à¸' + 53: 0, # 'ฑ' + 55: 3, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 1: { # 'า' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 1, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 2, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'à¸' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 36: { # 'ำ' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 23: { # 'ิ' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 3, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 13: { # 'ี' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 40: { # 'ึ' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 27: { # 'ื' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 32: { # 'ุ' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 1, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 35: { # 'ู' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 11: { # 'เ' + 5: 3, # 'à¸' + 30: 3, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 3, # 'ฉ' + 34: 3, # 'ช' + 51: 2, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 3, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 28: { # 'à¹' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 3, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 41: { # 'โ' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 29: { # 'ใ' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 33: { # 'ไ' + 5: 1, # 'à¸' + 30: 2, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 50: { # 'ๆ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 37: { # '็' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 6: { # '่' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 7: { # '้' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 38: { # '์' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 56: { # '๑' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 1, # '๕' + }, + 59: { # '๒' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 1, # '๑' + 59: 1, # '๒' + 60: 3, # '๕' + }, + 60: { # '๕' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 0, # '๕' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +TIS_620_THAI_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 182, # 'A' + 66: 106, # 'B' + 67: 107, # 'C' + 68: 100, # 'D' + 69: 183, # 'E' + 70: 184, # 'F' + 71: 185, # 'G' + 72: 101, # 'H' + 73: 94, # 'I' + 74: 186, # 'J' + 75: 187, # 'K' + 76: 108, # 'L' + 77: 109, # 'M' + 78: 110, # 'N' + 79: 111, # 'O' + 80: 188, # 'P' + 81: 189, # 'Q' + 82: 190, # 'R' + 83: 89, # 'S' + 84: 95, # 'T' + 85: 112, # 'U' + 86: 113, # 'V' + 87: 191, # 'W' + 88: 192, # 'X' + 89: 193, # 'Y' + 90: 194, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 64, # 'a' + 98: 72, # 'b' + 99: 73, # 'c' + 100: 114, # 'd' + 101: 74, # 'e' + 102: 115, # 'f' + 103: 116, # 'g' + 104: 102, # 'h' + 105: 81, # 'i' + 106: 201, # 'j' + 107: 117, # 'k' + 108: 90, # 'l' + 109: 103, # 'm' + 110: 78, # 'n' + 111: 82, # 'o' + 112: 96, # 'p' + 113: 202, # 'q' + 114: 91, # 'r' + 115: 79, # 's' + 116: 84, # 't' + 117: 104, # 'u' + 118: 105, # 'v' + 119: 97, # 'w' + 120: 98, # 'x' + 121: 92, # 'y' + 122: 203, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 209, # '\x80' + 129: 210, # '\x81' + 130: 211, # '\x82' + 131: 212, # '\x83' + 132: 213, # '\x84' + 133: 88, # '\x85' + 134: 214, # '\x86' + 135: 215, # '\x87' + 136: 216, # '\x88' + 137: 217, # '\x89' + 138: 218, # '\x8a' + 139: 219, # '\x8b' + 140: 220, # '\x8c' + 141: 118, # '\x8d' + 142: 221, # '\x8e' + 143: 222, # '\x8f' + 144: 223, # '\x90' + 145: 224, # '\x91' + 146: 99, # '\x92' + 147: 85, # '\x93' + 148: 83, # '\x94' + 149: 225, # '\x95' + 150: 226, # '\x96' + 151: 227, # '\x97' + 152: 228, # '\x98' + 153: 229, # '\x99' + 154: 230, # '\x9a' + 155: 231, # '\x9b' + 156: 232, # '\x9c' + 157: 233, # '\x9d' + 158: 234, # '\x9e' + 159: 235, # '\x9f' + 160: 236, # None + 161: 5, # 'à¸' + 162: 30, # 'ข' + 163: 237, # 'ฃ' + 164: 24, # 'ค' + 165: 238, # 'ฅ' + 166: 75, # 'ฆ' + 167: 8, # 'ง' + 168: 26, # 'จ' + 169: 52, # 'ฉ' + 170: 34, # 'ช' + 171: 51, # 'ซ' + 172: 119, # 'ฌ' + 173: 47, # 'à¸' + 174: 58, # 'ฎ' + 175: 57, # 'à¸' + 176: 49, # 'à¸' + 177: 53, # 'ฑ' + 178: 55, # 'ฒ' + 179: 43, # 'ณ' + 180: 20, # 'ด' + 181: 19, # 'ต' + 182: 44, # 'ถ' + 183: 14, # 'ท' + 184: 48, # 'ธ' + 185: 3, # 'น' + 186: 17, # 'บ' + 187: 25, # 'ป' + 188: 39, # 'ผ' + 189: 62, # 'à¸' + 190: 31, # 'พ' + 191: 54, # 'ฟ' + 192: 45, # 'ภ' + 193: 9, # 'ม' + 194: 16, # 'ย' + 195: 2, # 'ร' + 196: 61, # 'ฤ' + 197: 15, # 'ล' + 198: 239, # 'ฦ' + 199: 12, # 'ว' + 200: 42, # 'ศ' + 201: 46, # 'ษ' + 202: 18, # 'ส' + 203: 21, # 'ห' + 204: 76, # 'ฬ' + 205: 4, # 'อ' + 206: 66, # 'ฮ' + 207: 63, # 'ฯ' + 208: 22, # 'ะ' + 209: 10, # 'ั' + 210: 1, # 'า' + 211: 36, # 'ำ' + 212: 23, # 'ิ' + 213: 13, # 'ี' + 214: 40, # 'ึ' + 215: 27, # 'ื' + 216: 32, # 'ุ' + 217: 35, # 'ู' + 218: 86, # 'ฺ' + 219: 240, # None + 220: 241, # None + 221: 242, # None + 222: 243, # None + 223: 244, # '฿' + 224: 11, # 'เ' + 225: 28, # 'à¹' + 226: 41, # 'โ' + 227: 29, # 'ใ' + 228: 33, # 'ไ' + 229: 245, # 'ๅ' + 230: 50, # 'ๆ' + 231: 37, # '็' + 232: 6, # '่' + 233: 7, # '้' + 234: 67, # '๊' + 235: 77, # '๋' + 236: 38, # '์' + 237: 93, # 'à¹' + 238: 246, # '๎' + 239: 247, # 'à¹' + 240: 68, # 'à¹' + 241: 56, # '๑' + 242: 59, # '๒' + 243: 65, # '๓' + 244: 69, # '๔' + 245: 60, # '๕' + 246: 70, # '๖' + 247: 80, # '๗' + 248: 71, # '๘' + 249: 87, # '๙' + 250: 248, # '๚' + 251: 249, # '๛' + 252: 250, # None + 253: 251, # None + 254: 252, # None + 255: 253, # None +} + +TIS_620_THAI_MODEL = SingleByteCharSetModel(charset_name='TIS-620', + language='Thai', + char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, + language_model=THAI_LANG_MODEL, + typical_positive_ratio=0.926386, + keep_ascii_letters=False, + alphabet='à¸à¸‚ฃคฅฆงจฉชซฌà¸à¸Žà¸à¸à¸‘ฒณดตถทธนบปผà¸à¸žà¸Ÿà¸ à¸¡à¸¢à¸£à¸¤à¸¥à¸¦à¸§à¸¨à¸©à¸ªà¸«à¸¬à¸­à¸®à¸¯à¸°à¸±à¸²à¸³à¸´à¸µà¸¶à¸·à¸¸à¸¹à¸ºà¸¿à¹€à¹à¹‚ใไๅๆ็่้๊๋์à¹à¹Žà¹à¹à¹‘๒๓๔๕๖๗๘๙๚๛') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/langturkishmodel.py b/minor_project/lib/python3.6/site-packages/chardet/langturkishmodel.py new file mode 100644 index 0000000..8ba9322 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/langturkishmodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +TURKISH_LANG_MODEL = { + 23: { # 'A' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 37: { # 'B' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 47: { # 'C' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 39: { # 'D' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 29: { # 'E' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 52: { # 'F' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 36: { # 'G' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 45: { # 'H' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 2, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 2, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 53: { # 'I' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 60: { # 'J' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 16: { # 'K' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 49: { # 'L' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 2, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 20: { # 'M' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 46: { # 'N' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 42: { # 'O' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 2, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 48: { # 'P' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 44: { # 'R' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 35: { # 'S' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 31: { # 'T' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 51: { # 'U' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 38: { # 'V' + 23: 1, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 62: { # 'W' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 43: { # 'Y' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 56: { # 'Z' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 1: { # 'a' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 21: { # 'b' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 28: { # 'c' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 3, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 1, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 12: { # 'd' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 2: { # 'e' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 18: { # 'f' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 1, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 27: { # 'g' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 25: { # 'h' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 3: { # 'i' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 24: { # 'j' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 10: { # 'k' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 5: { # 'l' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 13: { # 'm' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 4: { # 'n' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 15: { # 'o' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ÄŸ' + 41: 2, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 26: { # 'p' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 7: { # 'r' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 8: { # 's' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 9: { # 't' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 14: { # 'u' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 32: { # 'v' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 57: { # 'w' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 1, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 58: { # 'x' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 11: { # 'y' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 22: { # 'z' + 23: 2, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 2, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 3, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 2, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 1, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 63: { # '·' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 54: { # 'Ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 50: { # 'Ö' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 55: { # 'Ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 59: { # 'â' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 33: { # 'ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 61: { # 'î' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 34: { # 'ö' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 3, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 17: { # 'ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 30: { # 'ÄŸ' + 23: 0, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 2, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 41: { # 'İ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 6: { # 'ı' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 40: { # 'Åž' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 2, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 3, # 'f' + 27: 0, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 1, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 19: { # 'ÅŸ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_9_TURKISH_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 255, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 255, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 255, # ' ' + 33: 255, # '!' + 34: 255, # '"' + 35: 255, # '#' + 36: 255, # '$' + 37: 255, # '%' + 38: 255, # '&' + 39: 255, # "'" + 40: 255, # '(' + 41: 255, # ')' + 42: 255, # '*' + 43: 255, # '+' + 44: 255, # ',' + 45: 255, # '-' + 46: 255, # '.' + 47: 255, # '/' + 48: 255, # '0' + 49: 255, # '1' + 50: 255, # '2' + 51: 255, # '3' + 52: 255, # '4' + 53: 255, # '5' + 54: 255, # '6' + 55: 255, # '7' + 56: 255, # '8' + 57: 255, # '9' + 58: 255, # ':' + 59: 255, # ';' + 60: 255, # '<' + 61: 255, # '=' + 62: 255, # '>' + 63: 255, # '?' + 64: 255, # '@' + 65: 23, # 'A' + 66: 37, # 'B' + 67: 47, # 'C' + 68: 39, # 'D' + 69: 29, # 'E' + 70: 52, # 'F' + 71: 36, # 'G' + 72: 45, # 'H' + 73: 53, # 'I' + 74: 60, # 'J' + 75: 16, # 'K' + 76: 49, # 'L' + 77: 20, # 'M' + 78: 46, # 'N' + 79: 42, # 'O' + 80: 48, # 'P' + 81: 69, # 'Q' + 82: 44, # 'R' + 83: 35, # 'S' + 84: 31, # 'T' + 85: 51, # 'U' + 86: 38, # 'V' + 87: 62, # 'W' + 88: 65, # 'X' + 89: 43, # 'Y' + 90: 56, # 'Z' + 91: 255, # '[' + 92: 255, # '\\' + 93: 255, # ']' + 94: 255, # '^' + 95: 255, # '_' + 96: 255, # '`' + 97: 1, # 'a' + 98: 21, # 'b' + 99: 28, # 'c' + 100: 12, # 'd' + 101: 2, # 'e' + 102: 18, # 'f' + 103: 27, # 'g' + 104: 25, # 'h' + 105: 3, # 'i' + 106: 24, # 'j' + 107: 10, # 'k' + 108: 5, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 15, # 'o' + 112: 26, # 'p' + 113: 64, # 'q' + 114: 7, # 'r' + 115: 8, # 's' + 116: 9, # 't' + 117: 14, # 'u' + 118: 32, # 'v' + 119: 57, # 'w' + 120: 58, # 'x' + 121: 11, # 'y' + 122: 22, # 'z' + 123: 255, # '{' + 124: 255, # '|' + 125: 255, # '}' + 126: 255, # '~' + 127: 255, # '\x7f' + 128: 180, # '\x80' + 129: 179, # '\x81' + 130: 178, # '\x82' + 131: 177, # '\x83' + 132: 176, # '\x84' + 133: 175, # '\x85' + 134: 174, # '\x86' + 135: 173, # '\x87' + 136: 172, # '\x88' + 137: 171, # '\x89' + 138: 170, # '\x8a' + 139: 169, # '\x8b' + 140: 168, # '\x8c' + 141: 167, # '\x8d' + 142: 166, # '\x8e' + 143: 165, # '\x8f' + 144: 164, # '\x90' + 145: 163, # '\x91' + 146: 162, # '\x92' + 147: 161, # '\x93' + 148: 160, # '\x94' + 149: 159, # '\x95' + 150: 101, # '\x96' + 151: 158, # '\x97' + 152: 157, # '\x98' + 153: 156, # '\x99' + 154: 155, # '\x9a' + 155: 154, # '\x9b' + 156: 153, # '\x9c' + 157: 152, # '\x9d' + 158: 151, # '\x9e' + 159: 106, # '\x9f' + 160: 150, # '\xa0' + 161: 149, # '¡' + 162: 148, # '¢' + 163: 147, # '£' + 164: 146, # '¤' + 165: 145, # 'Â¥' + 166: 144, # '¦' + 167: 100, # '§' + 168: 143, # '¨' + 169: 142, # '©' + 170: 141, # 'ª' + 171: 140, # '«' + 172: 139, # '¬' + 173: 138, # '\xad' + 174: 137, # '®' + 175: 136, # '¯' + 176: 94, # '°' + 177: 80, # '±' + 178: 93, # '²' + 179: 135, # '³' + 180: 105, # '´' + 181: 134, # 'µ' + 182: 133, # '¶' + 183: 63, # '·' + 184: 132, # '¸' + 185: 131, # '¹' + 186: 130, # 'º' + 187: 129, # '»' + 188: 128, # '¼' + 189: 127, # '½' + 190: 126, # '¾' + 191: 125, # '¿' + 192: 124, # 'À' + 193: 104, # 'Ã' + 194: 73, # 'Â' + 195: 99, # 'Ã' + 196: 79, # 'Ä' + 197: 85, # 'Ã…' + 198: 123, # 'Æ' + 199: 54, # 'Ç' + 200: 122, # 'È' + 201: 98, # 'É' + 202: 92, # 'Ê' + 203: 121, # 'Ë' + 204: 120, # 'ÃŒ' + 205: 91, # 'Ã' + 206: 103, # 'ÃŽ' + 207: 119, # 'Ã' + 208: 68, # 'Äž' + 209: 118, # 'Ñ' + 210: 117, # 'Ã’' + 211: 97, # 'Ó' + 212: 116, # 'Ô' + 213: 115, # 'Õ' + 214: 50, # 'Ö' + 215: 90, # '×' + 216: 114, # 'Ø' + 217: 113, # 'Ù' + 218: 112, # 'Ú' + 219: 111, # 'Û' + 220: 55, # 'Ü' + 221: 41, # 'İ' + 222: 40, # 'Åž' + 223: 86, # 'ß' + 224: 89, # 'à' + 225: 70, # 'á' + 226: 59, # 'â' + 227: 78, # 'ã' + 228: 71, # 'ä' + 229: 82, # 'Ã¥' + 230: 88, # 'æ' + 231: 33, # 'ç' + 232: 77, # 'è' + 233: 66, # 'é' + 234: 84, # 'ê' + 235: 83, # 'ë' + 236: 110, # 'ì' + 237: 75, # 'í' + 238: 61, # 'î' + 239: 96, # 'ï' + 240: 30, # 'ÄŸ' + 241: 67, # 'ñ' + 242: 109, # 'ò' + 243: 74, # 'ó' + 244: 87, # 'ô' + 245: 102, # 'õ' + 246: 34, # 'ö' + 247: 95, # '÷' + 248: 81, # 'ø' + 249: 108, # 'ù' + 250: 76, # 'ú' + 251: 72, # 'û' + 252: 17, # 'ü' + 253: 6, # 'ı' + 254: 19, # 'ÅŸ' + 255: 107, # 'ÿ' +} + +ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-9', + language='Turkish', + char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, + language_model=TURKISH_LANG_MODEL, + typical_positive_ratio=0.97029, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş') + diff --git a/minor_project/lib/python3.6/site-packages/chardet/latin1prober.py b/minor_project/lib/python3.6/site-packages/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/minor_project/lib/python3.6/site-packages/chardet/mbcharsetprober.py b/minor_project/lib/python3.6/site-packages/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/minor_project/lib/python3.6/site-packages/chardet/mbcsgroupprober.py b/minor_project/lib/python3.6/site-packages/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/minor_project/lib/python3.6/site-packages/chardet/mbcssm.py b/minor_project/lib/python3.6/site-packages/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/minor_project/lib/python3.6/site-packages/chardet/metadata/__init__.py b/minor_project/lib/python3.6/site-packages/chardet/metadata/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/chardet/metadata/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/metadata/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..019336a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/metadata/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/metadata/__pycache__/languages.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/chardet/metadata/__pycache__/languages.cpython-36.pyc new file mode 100644 index 0000000..2f704b1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/chardet/metadata/__pycache__/languages.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/chardet/metadata/languages.py b/minor_project/lib/python3.6/site-packages/chardet/metadata/languages.py new file mode 100644 index 0000000..3237d5a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/metadata/languages.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +Metadata about languages used by our model training code for our +SingleByteCharSetProbers. Could be used for other things in the future. + +This code is based on the language metadata from the uchardet project. +""" +from __future__ import absolute_import, print_function + +from string import ascii_letters + + +# TODO: Add Ukranian (KOI8-U) + +class Language(object): + """Metadata about a language useful for training models + + :ivar name: The human name for the language, in English. + :type name: str + :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise, + or use another catalog as a last resort. + :type iso_code: str + :ivar use_ascii: Whether or not ASCII letters should be included in trained + models. + :type use_ascii: bool + :ivar charsets: The charsets we want to support and create data for. + :type charsets: list of str + :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is + `True`, you only need to add those not in the ASCII set. + :type alphabet: str + :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling + Wikipedia for training data. + :type wiki_start_pages: list of str + """ + def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None, + alphabet=None, wiki_start_pages=None): + super(Language, self).__init__() + self.name = name + self.iso_code = iso_code + self.use_ascii = use_ascii + self.charsets = charsets + if self.use_ascii: + if alphabet: + alphabet += ascii_letters + else: + alphabet = ascii_letters + elif not alphabet: + raise ValueError('Must supply alphabet if use_ascii is False') + self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None + self.wiki_start_pages = wiki_start_pages + + def __repr__(self): + return '{}({})'.format(self.__class__.__name__, + ', '.join('{}={!r}'.format(k, v) + for k, v in self.__dict__.items() + if not k.startswith('_'))) + + +LANGUAGES = {'Arabic': Language(name='Arabic', + iso_code='ar', + use_ascii=False, + # We only support encodings that use isolated + # forms, because the current recommendation is + # that the rendering system handles presentation + # forms. This means we purposefully skip IBM864. + charsets=['ISO-8859-6', 'WINDOWS-1256', + 'CP720', 'CP864'], + alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـÙقكلمنهوىيًٌÙÙŽÙÙÙ‘', + wiki_start_pages=[u'Ø§Ù„ØµÙØ­Ø©_الرئيسية']), + 'Belarusian': Language(name='Belarusian', + iso_code='be', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM866', 'MacCyrillic'], + alphabet=(u'ÐБВГДЕÐЖЗІЙКЛМÐОПРСТУЎФХЦЧШЫЬЭЮЯ' + u'абвгдеёжзійклмнопрÑтуўфхцчшыьÑÑŽÑʼ'), + wiki_start_pages=[u'ГалоўнаÑ_Ñтаронка']), + 'Bulgarian': Language(name='Bulgarian', + iso_code='bg', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM855'], + alphabet=(u'ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯ' + u'абвгдежзийклмнопрÑтуфхцчшщъьюÑ'), + wiki_start_pages=[u'Ðачална_Ñтраница']), + 'Czech': Language(name='Czech', + iso_code='cz', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áÄÄéěíňóřšťúůýžÃČĎÉĚÃŇÓŘŠŤÚŮÃŽ', + wiki_start_pages=[u'Hlavní_strana']), + 'Danish': Language(name='Danish', + iso_code='da', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'æøåÆØÅ', + wiki_start_pages=[u'Forside']), + 'German': Language(name='German', + iso_code='de', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + alphabet=u'äöüßÄÖÜ', + wiki_start_pages=[u'Wikipedia:Hauptseite']), + 'Greek': Language(name='Greek', + iso_code='el', + use_ascii=False, + charsets=['ISO-8859-7', 'WINDOWS-1253'], + alphabet=(u'αβγδεζηθικλμνξοπÏσςτυφχψωάέήίόÏÏŽ' + u'ΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎÎ'), + wiki_start_pages=[u'ΠÏλη:ΚÏÏια']), + 'English': Language(name='English', + iso_code='en', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Main_Page']), + 'Esperanto': Language(name='Esperanto', + iso_code='eo', + # Q, W, X, and Y not used at all + use_ascii=False, + charsets=['ISO-8859-3'], + alphabet=(u'abcĉdefgÄhÄ¥ijĵklmnoprsÅtuÅ­vz' + u'ABCĈDEFGÄœHĤIJÄ´KLMNOPRSÅœTUŬVZ'), + wiki_start_pages=[u'Vikipedio:ĈefpaÄo']), + 'Spanish': Language(name='Spanish', + iso_code='es', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ñáéíóúüÑÃÉÃÓÚÜ', + wiki_start_pages=[u'Wikipedia:Portada']), + 'Estonian': Language(name='Estonian', + iso_code='et', + use_ascii=False, + charsets=['ISO-8859-4', 'ISO-8859-13', + 'WINDOWS-1257'], + # C, F, Å , Q, W, X, Y, Z, Ž are only for + # loanwords + alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ' + u'abdeghijklmnoprstuvõäöü'), + wiki_start_pages=[u'Esileht']), + 'Finnish': Language(name='Finnish', + iso_code='fi', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÅÄÖŠŽåäöšž', + wiki_start_pages=[u'Wikipedia:Etusivu']), + 'French': Language(name='French', + iso_code='fr', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÃÙÛÊ', + wiki_start_pages=[u'Wikipédia:Accueil_principal', + u'BÅ“uf (animal)']), + 'Hebrew': Language(name='Hebrew', + iso_code='he', + use_ascii=False, + charsets=['ISO-8859-8', 'WINDOWS-1255'], + alphabet=u'×בגדהוזחטיךכל×מןנסעףפץצקרשתװױײ', + wiki_start_pages=[u'עמוד_ר×שי']), + 'Croatian': Language(name='Croatian', + iso_code='hr', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcÄćdÄ‘efghijklmnoprsÅ¡tuvzž' + u'ABCČĆDÄEFGHIJKLMNOPRSÅ TUVZŽ'), + wiki_start_pages=[u'Glavna_stranica']), + 'Hungarian': Language(name='Hungarian', + iso_code='hu', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű' + u'ABCDEFGHIJKLMNOPRSTUVZÃÉÃÓÖÅÚÜŰ'), + wiki_start_pages=[u'KezdÅ‘lap']), + 'Italian': Language(name='Italian', + iso_code='it', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÀÈÉÌÒÓÙàèéìòóù', + wiki_start_pages=[u'Pagina_principale']), + 'Lithuanian': Language(name='Lithuanian', + iso_code='lt', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, and X not used at all + alphabet=(u'AÄ„BCÄŒDEĘĖFGHIÄ®YJKLMNOPRSÅ TUŲŪVZŽ' + u'aÄ…bcÄdeęėfghiįyjklmnoprsÅ¡tuųūvzž'), + wiki_start_pages=[u'Pagrindinis_puslapis']), + 'Latvian': Language(name='Latvian', + iso_code='lv', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, X, Y are only for loanwords + alphabet=(u'AÄ€BCÄŒDEÄ’FGÄ¢HIĪJKĶLÄ»MNÅ…OPRSÅ TUŪVZŽ' + u'aÄbcÄdeÄ“fgÄ£hiÄ«jkÄ·lļmnņoprsÅ¡tuÅ«vzž'), + wiki_start_pages=[u'SÄkumlapa']), + 'Macedonian': Language(name='Macedonian', + iso_code='mk', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + alphabet=(u'ÐБВГДЃЕЖЗЅИЈКЛЉМÐЊОПРСТЌУФХЦЧÐШ' + u'абвгдѓежзѕијклљмнњопрÑтќуфхцчџш'), + wiki_start_pages=[u'Главна_Ñтраница']), + 'Dutch': Language(name='Dutch', + iso_code='nl', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Hoofdpagina']), + 'Polish': Language(name='Polish', + iso_code='pl', + # Q and X are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'AÄ„BCĆDEĘFGHIJKLÅMNŃOÓPRSÅšTUWYZŹŻ' + u'aÄ…bcćdeÄ™fghijklÅ‚mnÅ„oóprsÅ›tuwyzźż'), + wiki_start_pages=[u'Wikipedia:Strona_główna']), + 'Portuguese': Language(name='Portuguese', + iso_code='pt', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÃÂÃÀÇÉÊÃÓÔÕÚáâãàçéêíóôõú', + wiki_start_pages=[u'Wikipédia:Página_principal']), + 'Romanian': Language(name='Romanian', + iso_code='ro', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'ăâîșțĂÂÎȘȚ', + wiki_start_pages=[u'Pagina_principală']), + 'Russian': Language(name='Russian', + iso_code='ru', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'KOI8-R', 'MacCyrillic', 'IBM866', + 'IBM855'], + alphabet=(u'абвгдеёжзийклмнопрÑтуфхцчшщъыьÑÑŽÑ' + u'ÐБВГДЕÐЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯ'), + wiki_start_pages=[u'ЗаглавнаÑ_Ñтраница']), + 'Slovak': Language(name='Slovak', + iso_code='sk', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áäÄÄéíĺľňóôŕšťúýžÃÄČĎÉÃĹĽŇÓÔŔŠŤÚÃŽ', + wiki_start_pages=[u'Hlavná_stránka']), + 'Slovene': Language(name='Slovene', + iso_code='sl', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcÄdefghijklmnoprsÅ¡tuvzž' + u'ABCÄŒDEFGHIJKLMNOPRSÅ TUVZŽ'), + wiki_start_pages=[u'Glavna_stran']), + # Serbian can be written in both Latin and Cyrillic, but there's no + # simple way to get the Latin alphabet pages from Wikipedia through + # the API, so for now we just support Cyrillic. + 'Serbian': Language(name='Serbian', + iso_code='sr', + alphabet=(u'ÐБВГДЂЕЖЗИЈКЛЉМÐЊОПРСТЋУФХЦЧÐШ' + u'абвгдђежзијклљмнњопрÑтћуфхцчџш'), + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + wiki_start_pages=[u'Главна_Ñтрана']), + 'Thai': Language(name='Thai', + iso_code='th', + use_ascii=False, + charsets=['ISO-8859-11', 'TIS-620', 'CP874'], + alphabet=u'à¸à¸‚ฃคฅฆงจฉชซฌà¸à¸Žà¸à¸à¸‘ฒณดตถทธนบปผà¸à¸žà¸Ÿà¸ à¸¡à¸¢à¸£à¸¤à¸¥à¸¦à¸§à¸¨à¸©à¸ªà¸«à¸¬à¸­à¸®à¸¯à¸°à¸±à¸²à¸³à¸´à¸µà¸¶à¸·à¸ºà¸¸à¸¹à¸¿à¹€à¹à¹‚ใไๅๆ็่้๊๋์à¹à¹Žà¹à¹à¹‘๒๓๔๕๖๗๘๙๚๛', + wiki_start_pages=[u'หน้าหลัà¸']), + 'Turkish': Language(name='Turkish', + iso_code='tr', + # Q, W, and X are not used by Turkish + use_ascii=False, + charsets=['ISO-8859-3', 'ISO-8859-9', + 'WINDOWS-1254'], + alphabet=(u'abcçdefgÄŸhıijklmnoöprsÅŸtuüvyzâîû' + u'ABCÇDEFGÄžHIİJKLMNOÖPRSÅžTUÜVYZÂÎÛ'), + wiki_start_pages=[u'Ana_Sayfa']), + 'Vietnamese': Language(name='Vietnamese', + iso_code='vi', + use_ascii=False, + # Windows-1258 is the only common 8-bit + # Vietnamese encoding supported by Python. + # From Wikipedia: + # For systems that lack support for Unicode, + # dozens of 8-bit Vietnamese code pages are + # available.[1] The most common are VISCII + # (TCVN 5712:1993), VPS, and Windows-1258.[3] + # Where ASCII is required, such as when + # ensuring readability in plain text e-mail, + # Vietnamese letters are often encoded + # according to Vietnamese Quoted-Readable + # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4] + # though usage of either variable-width + # scheme has declined dramatically following + # the adoption of Unicode on the World Wide + # Web. + charsets=['WINDOWS-1258'], + alphabet=(u'aăâbcdÄ‘eêghiklmnoôơpqrstuưvxy' + u'AĂÂBCDÄEÊGHIKLMNOÔƠPQRSTUƯVXY'), + wiki_start_pages=[u'Chữ_Quốc_ngữ']), + } diff --git a/minor_project/lib/python3.6/site-packages/chardet/sbcharsetprober.py b/minor_project/lib/python3.6/site-packages/chardet/sbcharsetprober.py new file mode 100644 index 0000000..46ba835 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/sbcharsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from collections import namedtuple + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +SingleByteCharSetModel = namedtuple('SingleByteCharSetModel', + ['charset_name', + 'language', + 'char_to_order_map', + 'language_model', + 'typical_positive_ratio', + 'keep_ascii_letters', + 'alphabet']) + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model.charset_name + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.language + + def feed(self, byte_str): + # TODO: Make filter_international_words keep things in self.alphabet + if not self._model.keep_ascii_letters: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model.char_to_order_map + language_model = self._model.language_model + for char in byte_str: + order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + # TODO: Follow uchardet's lead and discount confidence for frequent + # control characters. + # See https://github.com/BYVoid/uchardet/commit/55b4f23971db61 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + lm_cat = language_model[self._last_order][order] + else: + lm_cat = language_model[order][self._last_order] + self._seq_counters[lm_cat] += 1 + self._last_order = order + + charset_name = self._model.charset_name + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model.typical_positive_ratio) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/minor_project/lib/python3.6/site-packages/chardet/sbcsgroupprober.py b/minor_project/lib/python3.6/site-packages/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..bdeef4e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/sbcsgroupprober.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .hebrewprober import HebrewProber +from .langbulgarianmodel import (ISO_8859_5_BULGARIAN_MODEL, + WINDOWS_1251_BULGARIAN_MODEL) +from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL +from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL +# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL, +# WINDOWS_1250_HUNGARIAN_MODEL) +from .langrussianmodel import (IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL, + ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL, + MACCYRILLIC_RUSSIAN_MODEL, + WINDOWS_1251_RUSSIAN_MODEL) +from .langthaimodel import TIS_620_THAI_MODEL +from .langturkishmodel import ISO_8859_9_TURKISH_MODEL +from .sbcharsetprober import SingleByteCharSetProber + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + False, hebrew_prober) + # TODO: See if using ISO-8859-8 Hebrew model works better here, since + # it's actually the visual one + visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + True, hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, + visual_hebrew_prober) + # TODO: ORDER MATTERS HERE. I changed the order vs what was in master + # and several tests failed that did not before. Some thought + # should be put into the ordering, and we should consider making + # order not matter here, because that is very counter-intuitive. + self.probers = [ + SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), + SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), + SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), + SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), + SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), + SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL), + # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL), + SingleByteCharSetProber(TIS_620_THAI_MODEL), + SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), + hebrew_prober, + logical_hebrew_prober, + visual_hebrew_prober, + ] + self.reset() diff --git a/minor_project/lib/python3.6/site-packages/chardet/sjisprober.py b/minor_project/lib/python3.6/site-packages/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/minor_project/lib/python3.6/site-packages/chardet/universaldetector.py b/minor_project/lib/python3.6/site-packages/chardet/universaldetector.py new file mode 100644 index 0000000..055a8ac --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() <= logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + group_prober.charset_name, + group_prober.language, + group_prober.get_confidence()) + return self.result diff --git a/minor_project/lib/python3.6/site-packages/chardet/utf8prober.py b/minor_project/lib/python3.6/site-packages/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/minor_project/lib/python3.6/site-packages/chardet/version.py b/minor_project/lib/python3.6/site-packages/chardet/version.py new file mode 100644 index 0000000..70369b9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "4.0.0" +VERSION = __version__.split('.') diff --git a/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/LICENSE.rst b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/LICENSE.rst new file mode 100644 index 0000000..d12a849 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/METADATA new file mode 100644 index 0000000..00d6974 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: click +Version: 7.1.2 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/click +Project-URL: Issue tracker, https://github.com/pallets/click/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Website: https://palletsprojects.com/p/click/ +- Documentation: https://click.palletsprojects.com/ +- Releases: https://pypi.org/project/click/ +- Code: https://github.com/pallets/click +- Issue tracker: https://github.com/pallets/click/issues +- Test status: https://dev.azure.com/pallets/click/_build +- Official chat: https://discord.gg/t6rrQZH + + diff --git a/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/RECORD new file mode 100644 index 0000000..b0f72ad --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/RECORD @@ -0,0 +1,40 @@ +click-7.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-7.1.2.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-7.1.2.dist-info/METADATA,sha256=LrRgakZKV7Yg3qJqX_plu2WhFW81MzP3EqQmZhHIO8M,2868 +click-7.1.2.dist-info/RECORD,, +click-7.1.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +click-7.1.2.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=FkyGDQ-cbiQxP_lxgUspyFYS48f2S_pTcfKPz-d_RMo,2463 +click/__pycache__/__init__.cpython-36.pyc,, +click/__pycache__/_bashcomplete.cpython-36.pyc,, +click/__pycache__/_compat.cpython-36.pyc,, +click/__pycache__/_termui_impl.cpython-36.pyc,, +click/__pycache__/_textwrap.cpython-36.pyc,, +click/__pycache__/_unicodefun.cpython-36.pyc,, +click/__pycache__/_winconsole.cpython-36.pyc,, +click/__pycache__/core.cpython-36.pyc,, +click/__pycache__/decorators.cpython-36.pyc,, +click/__pycache__/exceptions.cpython-36.pyc,, +click/__pycache__/formatting.cpython-36.pyc,, +click/__pycache__/globals.cpython-36.pyc,, +click/__pycache__/parser.cpython-36.pyc,, +click/__pycache__/termui.cpython-36.pyc,, +click/__pycache__/testing.cpython-36.pyc,, +click/__pycache__/types.cpython-36.pyc,, +click/__pycache__/utils.cpython-36.pyc,, +click/_bashcomplete.py,sha256=9J98IHQYmCAr2Jup6TDshUr5FJEen-AoQCZR0K5nKxQ,12309 +click/_compat.py,sha256=AoMaYnZ-3pwtNXuHtlb6_UXsayoG0QZiHKIRy2VFezc,24169 +click/_termui_impl.py,sha256=yNktUMAdjYOU1HMkq915jR3zgAzUNtGSQqSTSSMn3eQ,20702 +click/_textwrap.py,sha256=ajCzkzFly5tjm9foQ5N9_MOeaYJMBjAltuFa69n4iXY,1197 +click/_unicodefun.py,sha256=apLSNEBZgUsQNPMUv072zJ1swqnm0dYVT5TqcIWTt6w,4201 +click/_winconsole.py,sha256=6YDu6Rq1Wxx4w9uinBMK2LHvP83aerZM9GQurlk3QDo,10010 +click/core.py,sha256=V6DJzastGhrC6WTDwV9MSLwcJUdX2Uf1ypmgkjBdn_Y,77650 +click/decorators.py,sha256=3TvEO_BkaHl7k6Eh1G5eC7JK4LKPdpFqH9JP0QDyTlM,11215 +click/exceptions.py,sha256=3pQAyyMFzx5A3eV0Y27WtDTyGogZRbrC6_o5DjjKBbw,8118 +click/formatting.py,sha256=Wb4gqFEpWaKPgAbOvnkCl8p-bEZx5KpM5ZSByhlnJNk,9281 +click/globals.py,sha256=ht7u2kUGI08pAarB4e4yC8Lkkxy6gJfRZyzxEj8EbWQ,1501 +click/parser.py,sha256=mFK-k58JtPpqO0AC36WAr0t5UfzEw1mvgVSyn7WCe9M,15691 +click/termui.py,sha256=G7QBEKIepRIGLvNdGwBTYiEtSImRxvTO_AglVpyHH2s,23998 +click/testing.py,sha256=EUEsDUqNXFgCLhZ0ZFOROpaVDA5I_rijwnNPE6qICgA,12854 +click/types.py,sha256=wuubik4VqgqAw5dvbYFkDt-zSAx97y9TQXuXcVaRyQA,25045 +click/utils.py,sha256=4VEcJ7iEHwjnFuzEuRtkT99o5VG3zqSD7Q2CVzv13WU,15940 diff --git a/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/WHEEL new file mode 100644 index 0000000..ef99c6c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/top_level.txt new file mode 100644 index 0000000..dca9a90 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click-7.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/minor_project/lib/python3.6/site-packages/click/__init__.py b/minor_project/lib/python3.6/site-packages/click/__init__.py new file mode 100644 index 0000000..2b6008f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/__init__.py @@ -0,0 +1,79 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument +from .core import BaseCommand +from .core import Command +from .core import CommandCollection +from .core import Context +from .core import Group +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .decorators import argument +from .decorators import command +from .decorators import confirmation_option +from .decorators import group +from .decorators import help_option +from .decorators import make_pass_decorator +from .decorators import option +from .decorators import pass_context +from .decorators import pass_obj +from .decorators import password_option +from .decorators import version_option +from .exceptions import Abort +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import FileError +from .exceptions import MissingParameter +from .exceptions import NoSuchOption +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import wrap_text +from .globals import get_current_context +from .parser import OptionParser +from .termui import clear +from .termui import confirm +from .termui import echo_via_pager +from .termui import edit +from .termui import get_terminal_size +from .termui import getchar +from .termui import launch +from .termui import pause +from .termui import progressbar +from .termui import prompt +from .termui import secho +from .termui import style +from .termui import unstyle +from .types import BOOL +from .types import Choice +from .types import DateTime +from .types import File +from .types import FLOAT +from .types import FloatRange +from .types import INT +from .types import IntRange +from .types import ParamType +from .types import Path +from .types import STRING +from .types import Tuple +from .types import UNPROCESSED +from .types import UUID +from .utils import echo +from .utils import format_filename +from .utils import get_app_dir +from .utils import get_binary_stream +from .utils import get_os_args +from .utils import get_text_stream +from .utils import open_file + +# Controls if click should emit the warning about the use of unicode +# literals. +disable_unicode_literals_warning = False + +__version__ = "7.1.2" diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..7a7d1dd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/_bashcomplete.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/_bashcomplete.cpython-36.pyc new file mode 100644 index 0000000..f79cc84 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/_bashcomplete.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/_compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/_compat.cpython-36.pyc new file mode 100644 index 0000000..a726a5b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/_compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/_termui_impl.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/_termui_impl.cpython-36.pyc new file mode 100644 index 0000000..0b093e3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/_termui_impl.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/_textwrap.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/_textwrap.cpython-36.pyc new file mode 100644 index 0000000..6d2bb20 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/_textwrap.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/_unicodefun.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/_unicodefun.cpython-36.pyc new file mode 100644 index 0000000..dd52310 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/_unicodefun.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/_winconsole.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/_winconsole.cpython-36.pyc new file mode 100644 index 0000000..ac338b4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/_winconsole.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/core.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/core.cpython-36.pyc new file mode 100644 index 0000000..dd8e3a2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/core.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/decorators.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/decorators.cpython-36.pyc new file mode 100644 index 0000000..5543aa2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/decorators.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/exceptions.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/exceptions.cpython-36.pyc new file mode 100644 index 0000000..0269622 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/exceptions.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/formatting.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/formatting.cpython-36.pyc new file mode 100644 index 0000000..e624bb1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/formatting.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/globals.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/globals.cpython-36.pyc new file mode 100644 index 0000000..cd9d5c7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/globals.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/parser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/parser.cpython-36.pyc new file mode 100644 index 0000000..978150a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/parser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/termui.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/termui.cpython-36.pyc new file mode 100644 index 0000000..d24c9b0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/termui.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/testing.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/testing.cpython-36.pyc new file mode 100644 index 0000000..68168a7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/testing.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/types.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/types.cpython-36.pyc new file mode 100644 index 0000000..077f705 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/types.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/__pycache__/utils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/click/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000..93364b8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/click/__pycache__/utils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/click/_bashcomplete.py b/minor_project/lib/python3.6/site-packages/click/_bashcomplete.py new file mode 100644 index 0000000..8bca244 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/_bashcomplete.py @@ -0,0 +1,375 @@ +import copy +import os +import re + +from .core import Argument +from .core import MultiCommand +from .core import Option +from .parser import split_arg_string +from .types import Choice +from .utils import echo + +try: + from collections import abc +except ImportError: + import collections as abc + +WORDBREAK = "=" + +# Note, only BASH version 4.4 and later have the nosort option. +COMPLETION_SCRIPT_BASH = """ +%(complete_func)s() { + local IFS=$'\n' + COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + %(autocomplete_var)s=complete $1 ) ) + return 0 +} + +%(complete_func)setup() { + local COMPLETION_OPTIONS="" + local BASH_VERSION_ARR=(${BASH_VERSION//./ }) + # Only BASH version 4.4 and later have the nosort option. + if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] \ +&& [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then + COMPLETION_OPTIONS="-o nosort" + fi + + complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s +} + +%(complete_func)setup +""" + +COMPLETION_SCRIPT_ZSH = """ +#compdef %(script_names)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(script_names)s] )) && return 1 + + response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\ + COMP_CWORD=$((CURRENT-1)) \\ + %(autocomplete_var)s=\"complete_zsh\" \\ + %(script_names)s )}") + + for key descr in ${(kv)response}; do + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi + compstate[insert]="automenu" +} + +compdef %(complete_func)s %(script_names)s +""" + +COMPLETION_SCRIPT_FISH = ( + "complete --no-files --command %(script_names)s --arguments" + ' "(env %(autocomplete_var)s=complete_fish' + " COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t)" + ' %(script_names)s)"' +) + +_completion_scripts = { + "bash": COMPLETION_SCRIPT_BASH, + "zsh": COMPLETION_SCRIPT_ZSH, + "fish": COMPLETION_SCRIPT_FISH, +} + +_invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]") + + +def get_completion_script(prog_name, complete_var, shell): + cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_")) + script = _completion_scripts.get(shell, COMPLETION_SCRIPT_BASH) + return ( + script + % { + "complete_func": "_{}_completion".format(cf_name), + "script_names": prog_name, + "autocomplete_var": complete_var, + } + ).strip() + ";" + + +def resolve_ctx(cli, prog_name, args): + """Parse into a hierarchy of contexts. Contexts are connected + through the parent variable. + + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :return: the final context/command parsed + """ + ctx = cli.make_context(prog_name, args, resilient_parsing=True) + args = ctx.protected_args + ctx.args + while args: + if isinstance(ctx.command, MultiCommand): + if not ctx.command.chain: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + ctx = cmd.make_context( + cmd_name, args, parent=ctx, resilient_parsing=True + ) + args = ctx.protected_args + ctx.args + else: + # Walk chained subcommand contexts saving the last one. + while args: + cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) + if cmd is None: + return ctx + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + ctx = sub_ctx + args = sub_ctx.protected_args + sub_ctx.args + else: + break + return ctx + + +def start_of_option(param_str): + """ + :param param_str: param_str to check + :return: whether or not this is the start of an option declaration + (i.e. starts "-" or "--") + """ + return param_str and param_str[:1] == "-" + + +def is_incomplete_option(all_args, cmd_param): + """ + :param all_args: the full original list of args supplied + :param cmd_param: the current command paramter + :return: whether or not the last option declaration (i.e. starts + "-" or "--") is incomplete and corresponds to this cmd_param. In + other words whether this cmd_param option can still accept + values + """ + if not isinstance(cmd_param, Option): + return False + if cmd_param.is_flag: + return False + last_option = None + for index, arg_str in enumerate( + reversed([arg for arg in all_args if arg != WORDBREAK]) + ): + if index + 1 > cmd_param.nargs: + break + if start_of_option(arg_str): + last_option = arg_str + + return True if last_option and last_option in cmd_param.opts else False + + +def is_incomplete_argument(current_params, cmd_param): + """ + :param current_params: the current params and values for this + argument as already entered + :param cmd_param: the current command parameter + :return: whether or not the last argument is incomplete and + corresponds to this cmd_param. In other words whether or not the + this cmd_param argument can still accept values + """ + if not isinstance(cmd_param, Argument): + return False + current_param_values = current_params[cmd_param.name] + if current_param_values is None: + return True + if cmd_param.nargs == -1: + return True + if ( + isinstance(current_param_values, abc.Iterable) + and cmd_param.nargs > 1 + and len(current_param_values) < cmd_param.nargs + ): + return True + return False + + +def get_user_autocompletions(ctx, args, incomplete, cmd_param): + """ + :param ctx: context associated with the parsed command + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :param cmd_param: command definition + :return: all the possible user-specified completions for the param + """ + results = [] + if isinstance(cmd_param.type, Choice): + # Choices don't support descriptions. + results = [ + (c, None) for c in cmd_param.type.choices if str(c).startswith(incomplete) + ] + elif cmd_param.autocompletion is not None: + dynamic_completions = cmd_param.autocompletion( + ctx=ctx, args=args, incomplete=incomplete + ) + results = [ + c if isinstance(c, tuple) else (c, None) for c in dynamic_completions + ] + return results + + +def get_visible_commands_starting_with(ctx, starts_with): + """ + :param ctx: context associated with the parsed command + :starts_with: string that visible commands must start with. + :return: all visible (not hidden) commands that start with starts_with. + """ + for c in ctx.command.list_commands(ctx): + if c.startswith(starts_with): + command = ctx.command.get_command(ctx, c) + if not command.hidden: + yield command + + +def add_subcommand_completions(ctx, incomplete, completions_out): + # Add subcommand completions. + if isinstance(ctx.command, MultiCommand): + completions_out.extend( + [ + (c.name, c.get_short_help_str()) + for c in get_visible_commands_starting_with(ctx, incomplete) + ] + ) + + # Walk up the context list and add any other completion + # possibilities from chained commands + while ctx.parent is not None: + ctx = ctx.parent + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + remaining_commands = [ + c + for c in get_visible_commands_starting_with(ctx, incomplete) + if c.name not in ctx.protected_args + ] + completions_out.extend( + [(c.name, c.get_short_help_str()) for c in remaining_commands] + ) + + +def get_choices(cli, prog_name, args, incomplete): + """ + :param cli: command definition + :param prog_name: the program that is running + :param args: full list of args + :param incomplete: the incomplete text to autocomplete + :return: all the possible completions for the incomplete + """ + all_args = copy.deepcopy(args) + + ctx = resolve_ctx(cli, prog_name, args) + if ctx is None: + return [] + + has_double_dash = "--" in all_args + + # In newer versions of bash long opts with '='s are partitioned, but + # it's easier to parse without the '=' + if start_of_option(incomplete) and WORDBREAK in incomplete: + partition_incomplete = incomplete.partition(WORDBREAK) + all_args.append(partition_incomplete[0]) + incomplete = partition_incomplete[2] + elif incomplete == WORDBREAK: + incomplete = "" + + completions = [] + if not has_double_dash and start_of_option(incomplete): + # completions for partial options + for param in ctx.command.params: + if isinstance(param, Option) and not param.hidden: + param_opts = [ + param_opt + for param_opt in param.opts + param.secondary_opts + if param_opt not in all_args or param.multiple + ] + completions.extend( + [(o, param.help) for o in param_opts if o.startswith(incomplete)] + ) + return completions + # completion for option values from user supplied values + for param in ctx.command.params: + if is_incomplete_option(all_args, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + # completion for argument values from user supplied values + for param in ctx.command.params: + if is_incomplete_argument(ctx.params, param): + return get_user_autocompletions(ctx, all_args, incomplete, param) + + add_subcommand_completions(ctx, incomplete, completions) + # Sort before returning so that proper ordering can be enforced in custom types. + return sorted(completions) + + +def do_complete(cli, prog_name, include_descriptions): + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + for item in get_choices(cli, prog_name, args, incomplete): + echo(item[0]) + if include_descriptions: + # ZSH has trouble dealing with empty array parameters when + # returned from commands, use '_' to indicate no description + # is present. + echo(item[1] if item[1] else "_") + + return True + + +def do_complete_fish(cli, prog_name): + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + for item in get_choices(cli, prog_name, args, incomplete): + if item[1]: + echo("{arg}\t{desc}".format(arg=item[0], desc=item[1])) + else: + echo(item[0]) + + return True + + +def bashcomplete(cli, prog_name, complete_var, complete_instr): + if "_" in complete_instr: + command, shell = complete_instr.split("_", 1) + else: + command = complete_instr + shell = "bash" + + if command == "source": + echo(get_completion_script(prog_name, complete_var, shell)) + return True + elif command == "complete": + if shell == "fish": + return do_complete_fish(cli, prog_name) + elif shell in {"bash", "zsh"}: + return do_complete(cli, prog_name, shell == "zsh") + + return False diff --git a/minor_project/lib/python3.6/site-packages/click/_compat.py b/minor_project/lib/python3.6/site-packages/click/_compat.py new file mode 100644 index 0000000..60cb115 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/_compat.py @@ -0,0 +1,786 @@ +# flake8: noqa +import codecs +import io +import os +import re +import sys +from weakref import WeakKeyDictionary + +PY2 = sys.version_info[0] == 2 +CYGWIN = sys.platform.startswith("cygwin") +MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) +# Determine local App Engine environment, per Google's own suggestion +APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( + "SERVER_SOFTWARE", "" +) +WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 +DEFAULT_COLUMNS = 80 + + +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def get_filesystem_encoding(): + return sys.getfilesystemencoding() or sys.getdefaultencoding() + + +def _make_text_stream( + stream, encoding, errors, force_readable=False, force_writable=False +): + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding): + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream): + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream, + encoding, + errors, + force_readable=False, + force_writable=False, + **extra + ): + self._stream = stream = _FixupStream(stream, force_readable, force_writable) + io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) + + # The io module is a place where the Python 3 text behavior + # was forced upon Python 2, so we need to unbreak + # it to look like Python 2. + if PY2: + + def write(self, x): + if isinstance(x, str) or is_bytes(x): + try: + self.flush() + except Exception: + pass + return self.buffer.write(str(x)) + return io.TextIOWrapper.write(self, x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __del__(self): + try: + self.detach() + except Exception: + pass + + def isatty(self): + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream(object): + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__(self, stream, force_readable=False, force_writable=False): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name): + return getattr(self._stream, name) + + def read1(self, size): + f = getattr(self._stream, "read1", None) + if f is not None: + return f(size) + # We only dispatch to readline instead of read in Python 2 as we + # do not want cause problems with the different implementation + # of line buffering. + if PY2: + return self._stream.readline(size) + return self._stream.read(size) + + def readable(self): + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return x() + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self): + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return x() + try: + self._stream.write("") + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self): + x = getattr(self._stream, "seekable", None) + if x is not None: + return x() + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +if PY2: + text_type = unicode + raw_input = raw_input + string_types = (str, unicode) + int_types = (int, long) + iteritems = lambda x: x.iteritems() + range_type = xrange + + def is_bytes(x): + return isinstance(x, (buffer, bytearray)) + + _identifier_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") + + # For Windows, we need to force stdout/stdin/stderr to binary if it's + # fetched for that. This obviously is not the most correct way to do + # it as it changes global state. Unfortunately, there does not seem to + # be a clear better way to do it as just reopening the file in binary + # mode does not change anything. + # + # An option would be to do what Python 3 does and to open the file as + # binary only, patch it back to the system, and then use a wrapper + # stream that converts newlines. It's not quite clear what's the + # correct option here. + # + # This code also lives in _winconsole for the fallback to the console + # emulation stream. + # + # There are also Windows environments where the `msvcrt` module is not + # available (which is why we use try-catch instead of the WIN variable + # here), such as the Google App Engine development server on Windows. In + # those cases there is just nothing we can do. + def set_binary_mode(f): + return f + + try: + import msvcrt + except ImportError: + pass + else: + + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + msvcrt.setmode(fileno, os.O_BINARY) + return f + + try: + import fcntl + except ImportError: + pass + else: + + def set_binary_mode(f): + try: + fileno = f.fileno() + except Exception: + pass + else: + flags = fcntl.fcntl(fileno, fcntl.F_GETFL) + fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) + return f + + def isidentifier(x): + return _identifier_re.search(x) is not None + + def get_binary_stdin(): + return set_binary_mode(sys.stdin) + + def get_binary_stdout(): + _wrap_std_stream("stdout") + return set_binary_mode(sys.stdout) + + def get_binary_stderr(): + _wrap_std_stream("stderr") + return set_binary_mode(sys.stderr) + + def get_text_stdin(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stdin, encoding, errors, force_readable=True) + + def get_text_stdout(encoding=None, errors=None): + _wrap_std_stream("stdout") + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stdout, encoding, errors, force_writable=True) + + def get_text_stderr(encoding=None, errors=None): + _wrap_std_stream("stderr") + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _make_text_stream(sys.stderr, encoding, errors, force_writable=True) + + def filename_to_ui(value): + if isinstance(value, bytes): + value = value.decode(get_filesystem_encoding(), "replace") + return value + + +else: + import io + + text_type = str + raw_input = input + string_types = (str,) + int_types = (int,) + range_type = range + isidentifier = lambda x: x.isidentifier() + iteritems = lambda x: iter(x.items()) + + def is_bytes(x): + return isinstance(x, (bytes, memoryview, bytearray)) + + def _is_binary_reader(stream, default=False): + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + def _is_binary_writer(stream, default=False): + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + def _find_binary_reader(stream): + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return stream + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return buf + + def _find_binary_writer(stream): + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detatching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return stream + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return buf + + def _stream_is_misconfigured(stream): + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + def _is_compat_stream_attr(stream, attr, value): + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + def _is_compatible_text_stream(stream, encoding, errors): + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + def _force_correct_text_stream( + text_stream, + encoding, + errors, + is_binary, + find_binary, + force_readable=False, + force_writable=False, + ): + if is_binary(text_stream, False): + binary_reader = text_stream + else: + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if binary_reader is None: + return text_stream + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + def _force_correct_text_reader(text_reader, encoding, errors, force_readable=False): + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + def _force_correct_text_writer(text_writer, encoding, errors, force_writable=False): + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + def get_binary_stdin(): + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + def get_binary_stdout(): + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError( + "Was not able to determine binary stream for sys.stdout." + ) + return writer + + def get_binary_stderr(): + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError( + "Was not able to determine binary stream for sys.stderr." + ) + return writer + + def get_text_stdin(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader( + sys.stdin, encoding, errors, force_readable=True + ) + + def get_text_stdout(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer( + sys.stdout, encoding, errors, force_writable=True + ) + + def get_text_stderr(encoding=None, errors=None): + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer( + sys.stderr, encoding, errors, force_writable=True + ) + + def filename_to_ui(value): + if isinstance(value, bytes): + value = value.decode(get_filesystem_encoding(), "replace") + else: + value = value.encode("utf-8", "surrogateescape").decode("utf-8", "replace") + return value + + +def get_streerror(e, default=None): + if hasattr(e, "strerror"): + msg = e.strerror + else: + if default is not None: + msg = default + else: + msg = str(e) + if isinstance(msg, bytes): + msg = msg.decode("utf-8", "replace") + return msg + + +def _wrap_io_open(file, mode, encoding, errors): + """On Python 2, :func:`io.open` returns a text file wrapper that + requires passing ``unicode`` to ``write``. Need to open the file in + binary mode then wrap it in a subclass that can write ``str`` and + ``unicode``. + + Also handles not passing ``encoding`` and ``errors`` in binary mode. + """ + binary = "b" in mode + + if binary: + kwargs = {} + else: + kwargs = {"encoding": encoding, "errors": errors} + + if not PY2 or binary: + return io.open(file, mode, **kwargs) + + f = io.open(file, "{}b".format(mode.replace("t", ""))) + return _make_text_stream(f, **kwargs) + + +def open_stream(filename, mode="r", encoding=None, errors="strict", atomic=False): + binary = "b" in mode + + # Standard streams first. These are simple because they don't need + # special handling for the atomic flag. It's entirely ignored. + if filename == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + ".__atomic-write{:08x}".format(random.randrange(1 << 32)), + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True + + +# Used in a destructor call, needs extra protection from interpreter cleanup. +if hasattr(os, "replace"): + _replace = os.replace + _can_replace = True +else: + _replace = os.rename + _can_replace = not WIN + + +class _AtomicFile(object): + def __init__(self, f, tmp_filename, real_filename): + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self): + return self._real_filename + + def close(self, delete=False): + if self.closed: + return + self._f.close() + if not _can_replace: + try: + os.remove(self._real_filename) + except OSError: + pass + _replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name): + return getattr(self._f, name) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close(delete=exc_type is not None) + + def __repr__(self): + return repr(self._f) + + +auto_wrap_for_ansi = None +colorama = None +get_winterm_size = None + + +def strip_ansi(value): + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream): + if WIN: + # TODO: Couldn't test on Windows, should't try to support until + # someone tests the details wrt colorama. + return + + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi(stream=None, color=None): + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# If we're on Windows, we provide transparent integration through +# colorama. This will make ANSI colors through the echo function +# work automatically. +if WIN: + # Windows has a smaller terminal + DEFAULT_COLUMNS = 79 + + from ._winconsole import _get_windows_console_stream, _wrap_std_stream + + def _get_argv_encoding(): + import locale + + return locale.getpreferredencoding() + + if PY2: + + def raw_input(prompt=""): + sys.stderr.flush() + if prompt: + stdout = _default_text_stdout() + stdout.write(prompt) + stdin = _default_text_stdin() + return stdin.readline().rstrip("\r\n") + + try: + import colorama + except ImportError: + pass + else: + _ansi_stream_wrappers = WeakKeyDictionary() + + def auto_wrap_for_ansi(stream, color=None): + """This function wraps a stream so that calls through colorama + are issued to the win32 console API to recolor on demand. It + also ensures to reset the colors if a write call is interrupted + to not destroy the console afterwards. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + if cached is not None: + return cached + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = ansi_wrapper.stream + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + return rv + + def get_winterm_size(): + win = colorama.win32.GetConsoleScreenBufferInfo( + colorama.win32.STDOUT + ).srWindow + return win.Right - win.Left, win.Bottom - win.Top + + +else: + + def _get_argv_encoding(): + return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() + + _get_windows_console_stream = lambda *x: None + _wrap_std_stream = lambda *x: None + + +def term_len(x): + return len(strip_ansi(x)) + + +def isatty(stream): + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func(src_func, wrapper_func): + cache = WeakKeyDictionary() + + def func(): + stream = src_func() + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + stream = src_func() # In case wrapper_func() modified the stream + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/minor_project/lib/python3.6/site-packages/click/_termui_impl.py b/minor_project/lib/python3.6/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..88bec37 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/_termui_impl.py @@ -0,0 +1,657 @@ +# -*- coding: utf-8 -*- +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import int_types +from ._compat import isatty +from ._compat import open_stream +from ._compat import range_type +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +def _length_hint(obj): + """Returns the length hint of an object.""" + try: + return len(obj) + except (AttributeError, TypeError): + try: + get_hint = type(obj).__length_hint__ + except AttributeError: + return None + try: + hint = get_hint(obj) + except TypeError: + return None + if hint is NotImplemented or not isinstance(hint, int_types) or hint < 0: + return None + return hint + + +class ProgressBar(object): + def __init__( + self, + iterable, + length=None, + fill_char="#", + empty_char=" ", + bar_template="%(bar)s", + info_sep=" ", + show_eta=True, + show_percent=None, + show_pos=False, + item_show_func=None, + label=None, + file=None, + color=None, + width=30, + ): + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label = label or "" + if file is None: + file = _default_text_stdout() + self.file = file + self.color = color + self.width = width + self.autowidth = width == 0 + + if length is None: + length = _length_hint(iterable) + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = range_type(length) + self.iter = iter(iterable) + self.length = length + self.length_known = length is not None + self.pos = 0 + self.avg = [] + self.start = self.last_eta = time.time() + self.eta_known = False + self.finished = False + self.max_width = None + self.entered = False + self.current_item = None + self.is_hidden = not isatty(self.file) + self._last_line = None + self.short_limit = 0.5 + + def __enter__(self): + self.entered = True + self.render_progress() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.render_finish() + + def __iter__(self): + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self): + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + # Python 2 compat + next = __next__ + + def is_fast(self): + return time.time() - self.start <= self.short_limit + + def render_finish(self): + if self.is_hidden or self.is_fast(): + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self): + if self.finished: + return 1.0 + return min(self.pos / (float(self.length) or 1), 1.0) + + @property + def time_per_iteration(self): + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self): + if self.length_known and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self): + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return "{}d {:02}:{:02}:{:02}".format(t, hours, minutes, seconds) + else: + return "{:02}:{:02}:{:02}".format(hours, minutes, seconds) + return "" + + def format_pos(self): + pos = str(self.pos) + if self.length_known: + pos += "/{}".format(self.length) + return pos + + def format_pct(self): + return "{: 4}%".format(int(self.pct * 100))[1:] + + def format_bar(self): + if self.length_known: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + bar = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + bar[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(bar) + return bar + + def format_progress_line(self): + show_percent = self.show_percent + + info_bits = [] + if self.length_known and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self): + from .termui import get_terminal_size + + if self.is_hidden: + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, get_terminal_size()[0] - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line and not self.is_fast(): + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps): + self.pos += n_steps + if self.length_known and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length_known + + def update(self, n_steps): + self.make_step(n_steps) + self.render_progress() + + def finish(self): + self.eta_known = 0 + self.current_item = None + self.finished = True + + def generator(self): + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + for rv in self.iter: + yield rv + else: + for rv in self.iter: + self.current_item = rv + yield rv + self.update(1) + self.finish() + self.render_progress() + + +def pager(generator, color=None): + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system('more "{}"'.format(filename)) == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator, cmd, color): + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = "{}{}".format(os.environ.get("LESS", ""), " ".join(cmd_detail[1:])) + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + encoding = get_best_encoding(c.stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text.encode(encoding, "replace")) + except (IOError, KeyboardInterrupt): + pass + else: + c.stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager(generator, cmd, color): + """Page through text by invoking a program on a temporary file.""" + import tempfile + + filename = tempfile.mktemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system('{} "{}"'.format(cmd, filename)) + finally: + os.unlink(filename) + + +def _nullpager(stream, generator, color): + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor(object): + def __init__(self, editor=None, env=None, require_save=True, extension=".txt"): + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self): + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system("which {} >/dev/null 2>&1".format(editor)) == 0: + return editor + return "vi" + + def edit_file(self, filename): + import subprocess + + editor = self.get_editor() + if self.env: + environ = os.environ.copy() + environ.update(self.env) + else: + environ = None + try: + c = subprocess.Popen( + '{} "{}"'.format(editor, filename), env=environ, shell=True, + ) + exit_code = c.wait() + if exit_code != 0: + raise ClickException("{}: Editing failed!".format(editor)) + except OSError as e: + raise ClickException("{}: Editing failed: {}".format(editor, e)) + + def edit(self, text): + import tempfile + + text = text or "" + if text and not text.endswith("\n"): + text += "\n" + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + try: + if WIN: + encoding = "utf-8-sig" + text = text.replace("\n", "\r\n") + else: + encoding = "utf-8" + text = text.encode(encoding) + + f = os.fdopen(fd, "wb") + f.write(text) + f.close() + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + f = open(name, "rb") + try: + rv = f.read() + finally: + f.close() + return rv.decode("utf-8-sig").replace("\r\n", "\n") + finally: + os.unlink(name) + + +def open_url(url, wait=False, locate=False): + import subprocess + + def _unquote_file(url): + try: + import urllib + except ImportError: + import urllib + if url.startswith("file://"): + url = urllib.unquote(url[7:]) + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url) + args = 'explorer /select,"{}"'.format(_unquote_file(url.replace('"', ""))) + else: + args = 'start {} "" "{}"'.format( + "/WAIT" if wait else "", url.replace('"', "") + ) + return os.system(args) + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = 'cygstart "{}"'.format(os.path.dirname(url).replace('"', "")) + else: + args = 'cygstart {} "{}"'.format("-w" if wait else "", url.replace('"', "")) + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch): + if ch == u"\x03": + raise KeyboardInterrupt() + if ch == u"\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + if ch == u"\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal(): + yield + + def getchar(echo): + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + if echo: + func = msvcrt.getwche + else: + func = msvcrt.getwch + + rv = func() + if rv in (u"\x00", u"\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + _translate_ch_to_exc(rv) + return rv + + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal(): + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + try: + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo): + with raw_terminal() as fd: + ch = os.read(fd, 32) + ch = ch.decode(get_best_encoding(sys.stdin), "replace") + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + _translate_ch_to_exc(ch) + return ch diff --git a/minor_project/lib/python3.6/site-packages/click/_textwrap.py b/minor_project/lib/python3.6/site-packages/click/_textwrap.py new file mode 100644 index 0000000..6959087 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/_textwrap.py @@ -0,0 +1,37 @@ +import textwrap +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent): + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text): + rv = [] + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + if idx > 0: + indent = self.subsequent_indent + rv.append(indent + line) + return "\n".join(rv) diff --git a/minor_project/lib/python3.6/site-packages/click/_unicodefun.py b/minor_project/lib/python3.6/site-packages/click/_unicodefun.py new file mode 100644 index 0000000..781c365 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/_unicodefun.py @@ -0,0 +1,131 @@ +import codecs +import os +import sys + +from ._compat import PY2 + + +def _find_unicode_literals_frame(): + import __future__ + + if not hasattr(sys, "_getframe"): # not all Python implementations have it + return 0 + frm = sys._getframe(1) + idx = 1 + while frm is not None: + if frm.f_globals.get("__name__", "").startswith("click."): + frm = frm.f_back + idx += 1 + elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: + return idx + else: + break + return 0 + + +def _check_for_unicode_literals(): + if not __debug__: + return + + from . import disable_unicode_literals_warning + + if not PY2 or disable_unicode_literals_warning: + return + bad_frame = _find_unicode_literals_frame() + if bad_frame <= 0: + return + from warnings import warn + + warn( + Warning( + "Click detected the use of the unicode_literals __future__" + " import. This is heavily discouraged because it can" + " introduce subtle bugs in your code. You should instead" + ' use explicit u"" literals for your unicode strings. For' + " more information see" + " https://click.palletsprojects.com/python3/" + ), + stacklevel=bad_frame, + ) + + +def _verify_python3_env(): + """Ensures that the environment is good for unicode on Python 3.""" + if PY2: + return + try: + import locale + + fs_enc = codecs.lookup(locale.getpreferredencoding()).name + except Exception: + fs_enc = "ascii" + if fs_enc != "ascii": + return + + extra = "" + if os.name == "posix": + import subprocess + + try: + rv = subprocess.Popen( + ["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ).communicate()[0] + except OSError: + rv = b"" + good_locales = set() + has_c_utf8 = False + + # Make sure we're operating on text here. + if isinstance(rv, bytes): + rv = rv.decode("ascii", "replace") + + for line in rv.splitlines(): + locale = line.strip() + if locale.lower().endswith((".utf-8", ".utf8")): + good_locales.add(locale) + if locale.lower() in ("c.utf8", "c.utf-8"): + has_c_utf8 = True + + extra += "\n\n" + if not good_locales: + extra += ( + "Additional information: on this system no suitable" + " UTF-8 locales were discovered. This most likely" + " requires resolving by reconfiguring the locale" + " system." + ) + elif has_c_utf8: + extra += ( + "This system supports the C.UTF-8 locale which is" + " recommended. You might be able to resolve your issue" + " by exporting the following environment variables:\n\n" + " export LC_ALL=C.UTF-8\n" + " export LANG=C.UTF-8" + ) + else: + extra += ( + "This system lists a couple of UTF-8 supporting locales" + " that you can pick from. The following suitable" + " locales were discovered: {}".format(", ".join(sorted(good_locales))) + ) + + bad_locale = None + for locale in os.environ.get("LC_ALL"), os.environ.get("LANG"): + if locale and locale.lower().endswith((".utf-8", ".utf8")): + bad_locale = locale + if locale is not None: + break + if bad_locale is not None: + extra += ( + "\n\nClick discovered that you exported a UTF-8 locale" + " but the locale system could not pick up from it" + " because it does not exist. The exported locale is" + " '{}' but it is not supported".format(bad_locale) + ) + + raise RuntimeError( + "Click will abort further execution because Python 3 was" + " configured to use ASCII as encoding for the environment." + " Consult https://click.palletsprojects.com/python3/ for" + " mitigation steps.{}".format(extra) + ) diff --git a/minor_project/lib/python3.6/site-packages/click/_winconsole.py b/minor_project/lib/python3.6/site-packages/click/_winconsole.py new file mode 100644 index 0000000..b6c4274 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/_winconsole.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# This module is based on the excellent work by Adam BartoÅ¡ who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prmopt. +import ctypes +import io +import os +import sys +import time +import zlib +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import windll +from ctypes import WinError +from ctypes import WINFUNCTYPE +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +import msvcrt + +from ._compat import _NonClosingTextIOWrapper +from ._compat import PY2 +from ._compat import text_type + +try: + from ctypes import pythonapi + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release +except ImportError: + pythonapi = None + + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)( + ("LocalFree", windll.kernel32) +) + + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + + +class Py_buffer(ctypes.Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + if PY2: + _fields_.insert(-1, ("smalltable", c_ssize_t * 2)) + + +# On PyPy we cannot get buffers so our ability to operate here is +# serverly limited. +if pythonapi is None: + get_buffer = None +else: + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + io.RawIOBase.isatty(self) + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError("Windows error: {}".format(GetLastError())) + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return "Windows error {}".format(errno) + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream(object): + def __init__(self, text_stream, byte_stream): + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self): + return self.buffer.name + + def write(self, x): + if isinstance(x, text_type): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines): + for line in lines: + self.write(line) + + def __getattr__(self, name): + return getattr(self._text_stream, name) + + def isatty(self): + return self.buffer.isatty() + + def __repr__(self): + return "".format( + self.name, self.encoding + ) + + +class WindowsChunkedWriter(object): + """ + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()' which we wrap to write in + limited chunks due to a Windows limitation on binary console streams. + """ + + def __init__(self, wrapped): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + total_to_write = len(text) + written = 0 + + while written < total_to_write: + to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) + self.__wrapped.write(text[written : written + to_write]) + written += to_write + + +_wrapped_std_streams = set() + + +def _wrap_std_stream(name): + # Python 2 & Windows 7 and below + if ( + PY2 + and sys.getwindowsversion()[:2] <= (6, 1) + and name not in _wrapped_std_streams + ): + setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) + _wrapped_std_streams.add(name) + + +def _get_text_stdin(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stdout(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return ConsoleStream(text_stream, buffer_stream) + + +def _get_text_stderr(buffer_stream): + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return ConsoleStream(text_stream, buffer_stream) + + +if PY2: + + def _hash_py_argv(): + return zlib.crc32("\x00".join(sys.argv[1:])) + + _initial_argv_hash = _hash_py_argv() + + def _get_windows_argv(): + argc = c_int(0) + argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) + if not argv_unicode: + raise WinError() + try: + argv = [argv_unicode[i] for i in range(0, argc.value)] + finally: + LocalFree(argv_unicode) + del argv_unicode + + if not hasattr(sys, "frozen"): + argv = argv[1:] + while len(argv) > 0: + arg = argv[0] + if not arg.startswith("-") or arg == "-": + break + argv = argv[1:] + if arg.startswith(("-c", "-m")): + break + + return argv[1:] + + +_stream_factories = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f): + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except OSError: + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream(f, encoding, errors): + if ( + get_buffer is not None + and encoding in ("utf-16-le", None) + and errors in ("strict", None) + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + if not PY2: + f = getattr(f, "buffer", None) + if f is None: + return None + else: + # If we are on Python 2 we need to set the stream that we + # deal with to binary mode as otherwise the exercise if a + # bit moot. The same problems apply as for + # get_binary_stdin and friends from _compat. + msvcrt.setmode(f.fileno(), os.O_BINARY) + return func(f) diff --git a/minor_project/lib/python3.6/site-packages/click/core.py b/minor_project/lib/python3.6/site-packages/click/core.py new file mode 100644 index 0000000..f58bf26 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/core.py @@ -0,0 +1,2030 @@ +import errno +import inspect +import os +import sys +from contextlib import contextmanager +from functools import update_wrapper +from itertools import repeat + +from ._compat import isidentifier +from ._compat import iteritems +from ._compat import PY2 +from ._compat import string_types +from ._unicodefun import _check_for_unicode_literals +from ._unicodefun import _verify_python3_env +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .types import BOOL +from .types import convert_type +from .types import IntRange +from .utils import echo +from .utils import get_os_args +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +_missing = object() + +SUBCOMMAND_METAVAR = "COMMAND [ARGS]..." +SUBCOMMANDS_METAVAR = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + +DEPRECATED_HELP_NOTICE = " (DEPRECATED)" +DEPRECATED_INVOKE_NOTICE = "DeprecationWarning: The command %(name)s is deprecated." + + +def _maybe_show_deprecated_notice(cmd): + if cmd.deprecated: + echo(style(DEPRECATED_INVOKE_NOTICE % {"name": cmd.name}, fg="red"), err=True) + + +def fast_exit(code): + """Exit without garbage collection, this speeds up exit by about 10ms for + things like bash completion. + """ + sys.stdout.flush() + sys.stderr.flush() + os._exit(code) + + +def _bashcomplete(cmd, prog_name, complete_var=None): + """Internal handler for the bash completion support.""" + if complete_var is None: + complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper()) + complete_instr = os.environ.get(complete_var) + if not complete_instr: + return + + from ._bashcomplete import bashcomplete + + if bashcomplete(cmd, prog_name, complete_var, complete_instr): + fast_exit(1) + + +def _check_multicommand(base_command, cmd_name, cmd, register=False): + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + "{}. Command '{}' is set to chain and '{}' was added as" + " subcommand but it in itself is a multi command. ('{}' is a {}" + " within a chained {} named '{}').".format( + hint, + base_command.name, + cmd_name, + cmd_name, + cmd.__class__.__name__, + base_command.__class__.__name__, + base_command.name, + ) + ) + + +def batch(iterable, batch_size): + return list(zip(*repeat(iter(iterable), batch_size))) + + +def invoke_param_callback(callback, ctx, param, value): + code = getattr(callback, "__code__", None) + args = getattr(code, "co_argcount", 3) + + if args < 3: + from warnings import warn + + warn( + "Parameter callbacks take 3 args, (ctx, param, value). The" + " 2-arg style is deprecated and will be removed in 8.0.".format(callback), + DeprecationWarning, + stacklevel=3, + ) + return callback(ctx, value) + + return callback(ctx, param, value) + + +@contextmanager +def augment_usage_errors(ctx, param=None): + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing(invocation_order, declaration_order): + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item): + try: + idx = invocation_order.index(item) + except ValueError: + idx = float("inf") + return (not item.is_eager, idx) + + return sorted(declaration_order, key=sort_key) + + +class Context(object): + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + .. versionadded:: 2.0 + Added the `resilient_parsing`, `help_option_names`, + `token_normalize_func` parameters. + + .. versionadded:: 3.0 + Added the `allow_extra_args` and `allow_interspersed_args` + parameters. + + .. versionadded:: 4.0 + Added the `color`, `ignore_unknown_options`, and + `max_content_width` parameters. + + .. versionadded:: 7.1 + Added the `show_default` parameter. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: if True, shows defaults for all options. + Even if an option is later created with show_default=False, + this command-level setting overrides it. + """ + + def __init__( + self, + command, + parent=None, + info_name=None, + obj=None, + auto_envvar_prefix=None, + default_map=None, + terminal_width=None, + max_content_width=None, + resilient_parsing=False, + allow_extra_args=None, + allow_interspersed_args=None, + ignore_unknown_options=None, + help_option_names=None, + token_normalize_func=None, + color=None, + show_default=None, + ): + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: the parsed parameters except if the value is hidden in which + #: case it's not remembered. + self.params = {} + #: the leftover arguments. + self.args = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args = [] + if obj is None and parent is not None: + obj = parent.obj + #: the user object stored. + self.obj = obj + self._meta = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + self.default_map = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`resultcallback`. + self.invoked_subcommand = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + #: The width of the terminal (None is autodetection). + self.terminal_width = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = "{}_{}".format( + parent.auto_envvar_prefix, self.info_name.upper() + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + self.auto_envvar_prefix = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color = color + + self.show_default = show_default + + self._close_callbacks = [] + self._depth = 0 + + def __enter__(self): + self._depth += 1 + push_context(self) + return self + + def __exit__(self, exc_type, exc_value, tb): + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup=True): + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self): + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self): + """Creates the formatter for the help and usage output.""" + return HelpFormatter( + width=self.terminal_width, max_width=self.max_content_width + ) + + def call_on_close(self, f): + """This decorator remembers a function as callback that should be + executed when the context tears down. This is most useful to bind + resource handling to the script execution. For instance, file objects + opened by the :class:`File` type will register their close callbacks + here. + + :param f: the function to execute on teardown. + """ + self._close_callbacks.append(f) + return f + + def close(self): + """Invokes all close callbacks.""" + for cb in self._close_callbacks: + cb() + self._close_callbacks = [] + + @property + def command_path(self): + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + rv = "{} {}".format(self.parent.command_path, rv) + return rv.lstrip() + + def find_root(self): + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type): + """Finds the closest object of a given type.""" + node = self + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + node = node.parent + + def ensure_object(self, object_type): + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + def lookup_default(self, name): + """Looks up the default for a parameter name. This by default + looks into the :attr:`default_map` if available. + """ + if self.default_map is not None: + rv = self.default_map.get(name) + if callable(rv): + rv = rv() + return rv + + def fail(self, message): + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self): + """Aborts the script.""" + raise Abort() + + def exit(self, code=0): + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self): + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self): + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def invoke(*args, **kwargs): # noqa: B902 + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + """ + self, callback = args[:2] + ctx = self + + # It's also possible to invoke another command which might or + # might not have a callback. In that case we also fill + # in defaults and make a new context for this command. + if isinstance(callback, Command): + other_cmd = callback + callback = other_cmd.callback + ctx = Context(other_cmd, info_name=other_cmd.name, parent=self) + if callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.get_default(ctx) + + args = args[2:] + with augment_usage_errors(self): + with ctx: + return callback(*args, **kwargs) + + def forward(*args, **kwargs): # noqa: B902 + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + """ + self, cmd = args[:2] + + # It's also possible to invoke another command which might or + # might not have a callback. + if not isinstance(cmd, Command): + raise TypeError("Callback is not a command.") + + for param in self.params: + if param not in kwargs: + kwargs[param] = self.params[param] + + return self.invoke(cmd, **kwargs) + + +class BaseCommand(object): + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__(self, name, context_settings=None): + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + if context_settings is None: + context_settings = {} + #: an optional dictionary with defaults passed to the context. + self.context_settings = context_settings + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.name) + + def get_usage(self, ctx): + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx): + raise NotImplementedError("Base commands cannot get help") + + def make_context(self, info_name, args, parent=None, **extra): + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + :param info_name: the info name for this invokation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it it's + the name of the script. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + """ + for key, value in iteritems(self.context_settings): + if key not in extra: + extra[key] = value + ctx = Context(self, info_name=info_name, parent=parent, **extra) + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx, args): + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx): + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invokable by default") + + def main( + self, + args=None, + prog_name=None, + complete_var=None, + standalone_mode=True, + **extra + ): + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + .. versionadded:: 3.0 + Added the `standalone_mode` flag to control the standalone mode. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + """ + # If we are in Python 3, we will verify that the environment is + # sane at this point or reject further execution to avoid a + # broken script. + if not PY2: + _verify_python3_env() + else: + _check_for_unicode_literals() + + if args is None: + args = get_os_args() + else: + args = list(args) + + if prog_name is None: + prog_name = make_str( + os.path.basename(sys.argv[0] if sys.argv else __file__) + ) + + # Hook for the Bash completion. This only activates if the Bash + # completion is actually enabled, otherwise this is quite a fast + # noop. + _bashcomplete(self, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt): + echo(file=sys.stderr) + raise Abort() + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except IOError as e: + if e.errno == errno.EPIPE: + sys.stdout = PacifyFlushWrapper(sys.stdout) + sys.stderr = PacifyFlushWrapper(sys.stderr) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo("Aborted!", file=sys.stderr) + sys.exit(1) + + def __call__(self, *args, **kwargs): + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + .. versionchanged:: 7.1 + Added the `no_args_is_help` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + """ + + def __init__( + self, + name, + context_settings=None, + callback=None, + params=None, + help=None, + epilog=None, + short_help=None, + options_metavar="[OPTIONS]", + add_help_option=True, + no_args_is_help=False, + hidden=False, + deprecated=False, + ): + BaseCommand.__init__(self, name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params = params or [] + # if a form feed (page break) is found in the help text, truncate help + # text to the content preceding the first form feed + if help and "\f" in help: + help = help.split("\f", 1)[0] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def get_usage(self, ctx): + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx): + rv = self.params + help_option = self.get_help_option(ctx) + if help_option is not None: + rv = rv + [help_option] + return rv + + def format_usage(self, ctx, formatter): + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx): + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + return rv + + def get_help_option_names(self, ctx): + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return all_names + + def get_help_option(self, ctx): + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + if not help_options or not self.add_help_option: + return + + def show_help(ctx, param, value): + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help="Show this message and exit.", + ) + + def make_parser(self, ctx): + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx): + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit=45): + """Gets short help for the command or makes it by shortening the + long help string. + """ + return ( + self.short_help + or self.help + and make_default_short_help(self.help, limit) + or "" + ) + + def format_help(self, ctx, formatter): + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx, formatter): + """Writes the help text to the formatter if it exists.""" + if self.help: + formatter.write_paragraph() + with formatter.indentation(): + help_text = self.help + if self.deprecated: + help_text += DEPRECATED_HELP_NOTICE + formatter.write_text(help_text) + elif self.deprecated: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(DEPRECATED_HELP_NOTICE) + + def format_options(self, ctx, formatter): + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section("Options"): + formatter.write_dl(opts) + + def format_epilog(self, ctx, formatter): + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + formatter.write_paragraph() + with formatter.indentation(): + formatter.write_text(self.epilog) + + def parse_args(self, ctx, args): + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + "Got unexpected extra argument{} ({})".format( + "s" if len(args) != 1 else "", " ".join(map(make_str, args)) + ) + ) + + ctx.args = args + return args + + def invoke(self, ctx): + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + _maybe_show_deprecated_notice(self) + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: the result callback to attach to this multi + command. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name=None, + invoke_without_command=False, + no_args_is_help=None, + subcommand_metavar=None, + chain=False, + result_callback=None, + **attrs + ): + Command.__init__(self, name, **attrs) + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + if subcommand_metavar is None: + if chain: + subcommand_metavar = SUBCOMMANDS_METAVAR + else: + subcommand_metavar = SUBCOMMAND_METAVAR + self.subcommand_metavar = subcommand_metavar + self.chain = chain + #: The result callback that is stored. This can be set or + #: overridden with the :func:`resultcallback` decorator. + self.result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def collect_usage_pieces(self, ctx): + rv = Command.collect_usage_pieces(self, ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx, formatter): + Command.format_options(self, ctx, formatter) + self.format_commands(ctx, formatter) + + def resultcallback(self, replace=False): + """Adds a result callback to the chain command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.resultcallback() + def process_result(result, input): + return result + input + + .. versionadded:: 3.0 + + :param replace: if set to `True` an already existing result + callback will be removed. + """ + + def decorator(f): + old_callback = self.result_callback + if old_callback is None or replace: + self.result_callback = f + return f + + def function(__value, *args, **kwargs): + return f(old_callback(__value, *args, **kwargs), *args, **kwargs) + + self.result_callback = rv = update_wrapper(function, f) + return rv + + return decorator + + def format_commands(self, ctx, formatter): + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section("Commands"): + formatter.write_dl(rows) + + def parse_args(self, ctx, args): + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = Command.parse_args(self, ctx, args) + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx): + def _process_result(value): + if self.result_callback is not None: + value = ctx.invoke(self.result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + # If we are invoked without command the chain flag controls + # how this happens. If we are not in chain mode, the return + # value here is the return value of the command. + # If however we are in chain mode, the return value is the + # return value of the result processor invoked with an empty + # list (which means that no subcommand actually was executed). + if self.invoke_without_command: + if not self.chain: + return Command.invoke(self, ctx) + with ctx: + Command.invoke(self, ctx) + return _process_result([]) + ctx.fail("Missing command.") + + # Fetch args back out + args = ctx.protected_args + ctx.args + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + ctx.invoked_subcommand = cmd_name + Command.invoke(self, ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + Command.invoke(self, ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command(self, ctx, args): + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail("No such command '{}'.".format(original_cmd_name)) + + return cmd_name, cmd, args[1:] + + def get_command(self, ctx, cmd_name): + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError() + + def list_commands(self, ctx): + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is the + most common way to implement nesting in Click. + + :param commands: a dictionary of commands. + """ + + def __init__(self, name=None, commands=None, **attrs): + MultiCommand.__init__(self, name, **attrs) + #: the registered subcommands by their exported names. + self.commands = commands or {} + + def add_command(self, cmd, name=None): + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + def command(self, *args, **kwargs): + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` but + immediately registers the created command with this instance by + calling into :meth:`add_command`. + """ + from .decorators import command + + def decorator(f): + cmd = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + return decorator + + def group(self, *args, **kwargs): + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` but + immediately registers the created command with this instance by + calling into :meth:`add_command`. + """ + from .decorators import group + + def decorator(f): + cmd = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + return decorator + + def get_command(self, ctx, cmd_name): + return self.commands.get(cmd_name) + + def list_commands(self, ctx): + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + """ + + def __init__(self, name=None, sources=None, **attrs): + MultiCommand.__init__(self, name, **attrs) + #: The list of registered multi commands. + self.sources = sources or [] + + def add_source(self, multi_cmd): + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx, cmd_name): + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + return rv + + def list_commands(self, ctx): + rv = set() + for source in self.sources: + rv.update(source.list_commands(ctx)) + return sorted(rv) + + +class Parameter(object): + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The later is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: a callback that should be executed after the parameter + was matched. This is called as ``fn(ctx, param, + value)`` and needs to return the value. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + param_type_name = "parameter" + + def __init__( + self, + param_decls=None, + type=None, + required=False, + default=None, + callback=None, + nargs=None, + metavar=None, + expose_value=True, + is_eager=False, + envvar=None, + autocompletion=None, + ): + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + + self.type = convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = False + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self.autocompletion = autocompletion + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.name) + + @property + def human_readable_name(self): + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name + + def make_metavar(self): + if self.metavar is not None: + return self.metavar + metavar = self.type.get_metavar(self) + if metavar is None: + metavar = self.type.name.upper() + if self.nargs != 1: + metavar += "..." + return metavar + + def get_default(self, ctx): + """Given a context variable this calculates the default value.""" + # Otherwise go with the regular default. + if callable(self.default): + rv = self.default() + else: + rv = self.default + return self.type_cast_value(ctx, rv) + + def add_to_parser(self, parser, ctx): + pass + + def consume_value(self, ctx, opts): + value = opts.get(self.name) + if value is None: + value = self.value_from_envvar(ctx) + if value is None: + value = ctx.lookup_default(self.name) + return value + + def type_cast_value(self, ctx, value): + """Given a value this runs it properly through the type system. + This automatically handles things like `nargs` and `multiple` as + well as composite types. + """ + if self.type.is_composite: + if self.nargs <= 1: + raise TypeError( + "Attempted to invoke composite type but nargs has" + " been set to {}. This is not supported; nargs" + " needs to be set to a fixed value > 1.".format(self.nargs) + ) + if self.multiple: + return tuple(self.type(x or (), self, ctx) for x in value or ()) + return self.type(value or (), self, ctx) + + def _convert(value, level): + if level == 0: + return self.type(value, self, ctx) + return tuple(_convert(x, level - 1) for x in value or ()) + + return _convert(value, (self.nargs != 1) + bool(self.multiple)) + + def process_value(self, ctx, value): + """Given a value and context this runs the logic to convert the + value as necessary. + """ + # If the value we were given is None we do nothing. This way + # code that calls this can easily figure out if something was + # not provided. Otherwise it would be converted into an empty + # tuple for multiple invocations which is inconvenient. + if value is not None: + return self.type_cast_value(ctx, value) + + def value_is_missing(self, value): + if value is None: + return True + if (self.nargs != 1 or self.multiple) and value == (): + return True + return False + + def full_process_value(self, ctx, value): + value = self.process_value(ctx, value) + + if value is None and not ctx.resilient_parsing: + value = self.get_default(ctx) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + return value + + def resolve_envvar_value(self, ctx): + if self.envvar is None: + return + if isinstance(self.envvar, (tuple, list)): + for envvar in self.envvar: + rv = os.environ.get(envvar) + if rv is not None: + return rv + else: + rv = os.environ.get(self.envvar) + + if rv != "": + return rv + + def value_from_envvar(self, ctx): + rv = self.resolve_envvar_value(ctx) + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + return rv + + def handle_parse_result(self, ctx, opts, args): + with augment_usage_errors(ctx, param=self): + value = self.consume_value(ctx, opts) + try: + value = self.full_process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + value = None + if self.callback is not None: + try: + value = invoke_param_callback(self.callback, ctx, self, value) + except Exception: + if not ctx.resilient_parsing: + raise + + if self.expose_value: + ctx.params[self.name] = value + return value, args + + def get_help_record(self, ctx): + pass + + def get_usage_pieces(self, ctx): + return [] + + def get_error_hint(self, ctx): + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(repr(x) for x in hint_list) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: controls if the default value should be shown on the + help page. Normally, defaults are not shown. If this + value is a string, it shows the string instead of the + value. This is particularly useful for dynamic options. + :param show_envvar: controls if an environment variable should be shown on + the help page. Normally, environment variables + are not shown. + :param prompt: if set to `True` or a non empty string then the user will be + prompted for input. If set to `True` the prompt will be the + option name capitalized. + :param confirmation_prompt: if set then the value will need to be confirmed + if it was prompted for. + :param hide_input: if this is `True` then the input on the prompt will be + hidden from the user. This is useful for password + input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls=None, + show_default=False, + prompt=False, + confirmation_prompt=False, + hide_input=False, + is_flag=None, + flag_value=None, + multiple=False, + count=False, + allow_from_autoenv=True, + type=None, + help=None, + hidden=False, + show_choices=True, + show_envvar=False, + **attrs + ): + default_is_missing = attrs.get("default", _missing) is _missing + Parameter.__init__(self, param_decls, type=type, **attrs) + + if prompt is True: + prompt_text = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.hide_input = hide_input + self.hidden = hidden + + # Flags + if is_flag is None: + if flag_value is not None: + is_flag = True + else: + is_flag = bool(self.secondary_opts) + if is_flag and default_is_missing: + self.default = False + if flag_value is None: + flag_value = not self.default + self.is_flag = is_flag + self.flag_value = flag_value + if self.is_flag and isinstance(self.flag_value, bool) and type in [None, bool]: + self.type = BOOL + self.is_bool_flag = True + else: + self.is_bool_flag = False + + # Counting + self.count = count + if count: + if type is None: + self.type = IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.multiple = multiple + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + # Sanity check for stuff we don't support + if __debug__: + if self.nargs < 0: + raise TypeError("Options cannot have nargs < 0") + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("Cannot prompt for flags that are not bools.") + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Got secondary option for non boolean flag.") + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError("Hidden input does not work with boolean flag prompts.") + if self.count: + if self.multiple: + raise TypeError( + "Options cannot be multiple and count at the same time." + ) + elif self.is_flag: + raise TypeError( + "Options cannot be count and flags at the same time." + ) + + def _parse_decls(self, decls, expose_value): + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if isidentifier(decl): + if name is not None: + raise TypeError("Name defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not isidentifier(name): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + "No options defined but a name was passed ({}). Did you" + " mean to declare an argument instead of an option?".format(name) + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser, ctx): + kwargs = { + "dest": self.name, + "nargs": self.nargs, + "obj": self, + } + + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + kwargs.pop("nargs", None) + action_const = "{}_const".format(action) + if self.is_bool_flag and self.secondary_opts: + parser.add_option(self.opts, action=action_const, const=True, **kwargs) + parser.add_option( + self.secondary_opts, action=action_const, const=False, **kwargs + ) + else: + parser.add_option( + self.opts, action=action_const, const=self.flag_value, **kwargs + ) + else: + kwargs["action"] = action + parser.add_option(self.opts, **kwargs) + + def get_help_record(self, ctx): + if self.hidden: + return + any_prefix_is_slash = [] + + def _write_opts(opts): + rv, any_slashes = join_options(opts) + if any_slashes: + any_prefix_is_slash[:] = [True] + if not self.is_flag and not self.count: + rv += " {}".format(self.make_metavar()) + return rv + + rv = [_write_opts(self.opts)] + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + if self.show_envvar: + envvar = self.envvar + if envvar is None: + if self.allow_from_autoenv and ctx.auto_envvar_prefix is not None: + envvar = "{}_{}".format(ctx.auto_envvar_prefix, self.name.upper()) + if envvar is not None: + extra.append( + "env var: {}".format( + ", ".join(str(d) for d in envvar) + if isinstance(envvar, (list, tuple)) + else envvar + ) + ) + if self.default is not None and (self.show_default or ctx.show_default): + if isinstance(self.show_default, string_types): + default_string = "({})".format(self.show_default) + elif isinstance(self.default, (list, tuple)): + default_string = ", ".join(str(d) for d in self.default) + elif inspect.isfunction(self.default): + default_string = "(dynamic)" + else: + default_string = self.default + extra.append("default: {}".format(default_string)) + + if self.required: + extra.append("required") + if extra: + help = "{}[{}]".format( + "{} ".format(help) if help else "", "; ".join(extra) + ) + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + def get_default(self, ctx): + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return param.flag_value + return None + return Parameter.get_default(self, ctx) + + def prompt_for_value(self, ctx): + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx): + rv = Parameter.resolve_envvar_value(self, ctx) + if rv is not None: + return rv + if self.allow_from_autoenv and ctx.auto_envvar_prefix is not None: + envvar = "{}_{}".format(ctx.auto_envvar_prefix, self.name.upper()) + return os.environ.get(envvar) + + def value_from_envvar(self, ctx): + rv = self.resolve_envvar_value(ctx) + if rv is None: + return None + value_depth = (self.nargs != 1) + bool(self.multiple) + if value_depth > 0 and rv is not None: + rv = self.type.split_envvar_value(rv) + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + return rv + + def full_process_value(self, ctx, value): + if value is None and self.prompt is not None and not ctx.resilient_parsing: + return self.prompt_for_value(ctx) + return Parameter.full_process_value(self, ctx, value) + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the parameter constructor. + """ + + param_type_name = "argument" + + def __init__(self, param_decls, required=None, **attrs): + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + Parameter.__init__(self, param_decls, required=required, **attrs) + if self.default is not None and self.nargs < 0: + raise TypeError( + "nargs=-1 in combination with a default value is not supported." + ) + + @property + def human_readable_name(self): + if self.metavar is not None: + return self.metavar + return self.name.upper() + + def make_metavar(self): + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() + if not self.required: + var = "[{}]".format(var) + if self.nargs != 1: + var += "..." + return var + + def _parse_decls(self, decls, expose_value): + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + " {}".format(len(decls)) + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx): + return [self.make_metavar()] + + def get_error_hint(self, ctx): + return repr(self.make_metavar()) + + def add_to_parser(self, parser, ctx): + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/minor_project/lib/python3.6/site-packages/click/decorators.py b/minor_project/lib/python3.6/site-packages/click/decorators.py new file mode 100644 index 0000000..c7b5af6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/decorators.py @@ -0,0 +1,333 @@ +import inspect +import sys +from functools import update_wrapper + +from ._compat import iteritems +from ._unicodefun import _check_for_unicode_literals +from .core import Argument +from .core import Command +from .core import Group +from .core import Option +from .globals import get_current_context +from .utils import echo + + +def pass_context(f): + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args, **kwargs): + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f): + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args, **kwargs): + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator(object_type, ensure=False): + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f): + def new_func(*args, **kwargs): + ctx = get_current_context() + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + " object of type '{}' existing".format(object_type.__name__) + ) + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator + + +def _make_command(f, name, attrs, cls): + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + try: + params = f.__click_params__ + params.reverse() + del f.__click_params__ + except AttributeError: + params = [] + help = attrs.get("help") + if help is None: + help = inspect.getdoc(f) + if isinstance(help, bytes): + help = help.decode("utf-8") + else: + help = inspect.cleandoc(help) + attrs["help"] = help + _check_for_unicode_literals() + return cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs + ) + + +def command(name=None, cls=None, **attrs): + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + """ + if cls is None: + cls = Command + + def decorator(f): + cmd = _make_command(f, name, attrs, cls) + cmd.__doc__ = f.__doc__ + return cmd + + return decorator + + +def group(name=None, **attrs): + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + """ + attrs.setdefault("cls", Group) + return command(name, **attrs) + + +def _param_memo(f, param): + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] + f.__click_params__.append(param) + + +def argument(*param_decls, **attrs): + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + """ + + def decorator(f): + ArgumentClass = attrs.pop("cls", Argument) + _param_memo(f, ArgumentClass(param_decls, **attrs)) + return f + + return decorator + + +def option(*param_decls, **attrs): + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + """ + + def decorator(f): + # Issue 926, copy attrs, so pre-defined options can re-use the same cls= + option_attrs = attrs.copy() + + if "help" in option_attrs: + option_attrs["help"] = inspect.cleandoc(option_attrs["help"]) + OptionClass = option_attrs.pop("cls", Option) + _param_memo(f, OptionClass(param_decls, **option_attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls, **attrs): + """Shortcut for confirmation prompts that can be ignored by passing + ``--yes`` as parameter. + + This is equivalent to decorating a function with :func:`option` with + the following parameters:: + + def callback(ctx, param, value): + if not value: + ctx.abort() + + @click.command() + @click.option('--yes', is_flag=True, callback=callback, + expose_value=False, prompt='Do you want to continue?') + def dropdb(): + pass + """ + + def decorator(f): + def callback(ctx, param, value): + if not value: + ctx.abort() + + attrs.setdefault("is_flag", True) + attrs.setdefault("callback", callback) + attrs.setdefault("expose_value", False) + attrs.setdefault("prompt", "Do you want to continue?") + attrs.setdefault("help", "Confirm the action without prompting.") + return option(*(param_decls or ("--yes",)), **attrs)(f) + + return decorator + + +def password_option(*param_decls, **attrs): + """Shortcut for password prompts. + + This is equivalent to decorating a function with :func:`option` with + the following parameters:: + + @click.command() + @click.option('--password', prompt=True, confirmation_prompt=True, + hide_input=True) + def changeadmin(password): + pass + """ + + def decorator(f): + attrs.setdefault("prompt", True) + attrs.setdefault("confirmation_prompt", True) + attrs.setdefault("hide_input", True) + return option(*(param_decls or ("--password",)), **attrs)(f) + + return decorator + + +def version_option(version=None, *param_decls, **attrs): + """Adds a ``--version`` option which immediately ends the program + printing out the version number. This is implemented as an eager + option that prints the version and exits the program in the callback. + + :param version: the version number to show. If not provided Click + attempts an auto discovery via setuptools. + :param prog_name: the name of the program (defaults to autodetection) + :param message: custom message to show instead of the default + (``'%(prog)s, version %(version)s'``) + :param others: everything else is forwarded to :func:`option`. + """ + if version is None: + if hasattr(sys, "_getframe"): + module = sys._getframe(1).f_globals.get("__name__") + else: + module = "" + + def decorator(f): + prog_name = attrs.pop("prog_name", None) + message = attrs.pop("message", "%(prog)s, version %(version)s") + + def callback(ctx, param, value): + if not value or ctx.resilient_parsing: + return + prog = prog_name + if prog is None: + prog = ctx.find_root().info_name + ver = version + if ver is None: + try: + import pkg_resources + except ImportError: + pass + else: + for dist in pkg_resources.working_set: + scripts = dist.get_entry_map().get("console_scripts") or {} + for _, entry_point in iteritems(scripts): + if entry_point.module_name == module: + ver = dist.version + break + if ver is None: + raise RuntimeError("Could not determine version") + echo(message % {"prog": prog, "version": ver}, color=ctx.color) + ctx.exit() + + attrs.setdefault("is_flag", True) + attrs.setdefault("expose_value", False) + attrs.setdefault("is_eager", True) + attrs.setdefault("help", "Show the version and exit.") + attrs["callback"] = callback + return option(*(param_decls or ("--version",)), **attrs)(f) + + return decorator + + +def help_option(*param_decls, **attrs): + """Adds a ``--help`` option which immediately ends the program + printing out the help page. This is usually unnecessary to add as + this is added by default to all commands unless suppressed. + + Like :func:`version_option`, this is implemented as eager option that + prints in the callback and exits. + + All arguments are forwarded to :func:`option`. + """ + + def decorator(f): + def callback(ctx, param, value): + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + attrs.setdefault("is_flag", True) + attrs.setdefault("expose_value", False) + attrs.setdefault("help", "Show this message and exit.") + attrs.setdefault("is_eager", True) + attrs["callback"] = callback + return option(*(param_decls or ("--help",)), **attrs)(f) + + return decorator diff --git a/minor_project/lib/python3.6/site-packages/click/exceptions.py b/minor_project/lib/python3.6/site-packages/click/exceptions.py new file mode 100644 index 0000000..592ee38 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/exceptions.py @@ -0,0 +1,253 @@ +from ._compat import filename_to_ui +from ._compat import get_text_stderr +from ._compat import PY2 +from .utils import echo + + +def _join_param_hints(param_hint): + if isinstance(param_hint, (tuple, list)): + return " / ".join(repr(x) for x in param_hint) + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception + exit_code = 1 + + def __init__(self, message): + ctor_msg = message + if PY2: + if ctor_msg is not None: + ctor_msg = ctor_msg.encode("utf-8") + Exception.__init__(self, ctor_msg) + self.message = message + + def format_message(self): + return self.message + + def __str__(self): + return self.message + + if PY2: + __unicode__ = __str__ + + def __str__(self): + return self.message.encode("utf-8") + + def show(self, file=None): + if file is None: + file = get_text_stderr() + echo("Error: {}".format(self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message, ctx=None): + ClickException.__init__(self, message) + self.ctx = ctx + self.cmd = self.ctx.command if self.ctx else None + + def show(self, file=None): + if file is None: + file = get_text_stderr() + color = None + hint = "" + if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None: + hint = "Try '{} {}' for help.\n".format( + self.ctx.command_path, self.ctx.help_option_names[0] + ) + if self.ctx is not None: + color = self.ctx.color + echo("{}\n{}".format(self.ctx.get_usage(), hint), file=file, color=color) + echo("Error: {}".format(self.format_message()), file=file, color=color) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__(self, message, ctx=None, param=None, param_hint=None): + UsageError.__init__(self, message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self): + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) + else: + return "Invalid value: {}".format(self.message) + param_hint = _join_param_hints(param_hint) + + return "Invalid value for {}: {}".format(param_hint, self.message) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, message=None, ctx=None, param=None, param_hint=None, param_type=None + ): + BadParameter.__init__(self, message, ctx, param, param_hint) + self.param_type = param_type + + def format_message(self): + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) + else: + param_hint = None + param_hint = _join_param_hints(param_hint) + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += ". {}".format(msg_extra) + else: + msg = msg_extra + + return "Missing {}{}{}{}".format( + param_type, + " {}".format(param_hint) if param_hint else "", + ". " if msg else ".", + msg or "", + ) + + def __str__(self): + if self.message is None: + param_name = self.param.name if self.param else None + return "missing parameter: {}".format(param_name) + else: + return self.message + + if PY2: + __unicode__ = __str__ + + def __str__(self): + return self.__unicode__().encode("utf-8") + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__(self, option_name, message=None, possibilities=None, ctx=None): + if message is None: + message = "no such option: {}".format(option_name) + UsageError.__init__(self, message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self): + bits = [self.message] + if self.possibilities: + if len(self.possibilities) == 1: + bits.append("Did you mean {}?".format(self.possibilities[0])) + else: + possibilities = sorted(self.possibilities) + bits.append("(Possible options: {})".format(", ".join(possibilities))) + return " ".join(bits) + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__(self, option_name, message, ctx=None): + UsageError.__init__(self, message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + def __init__(self, message, ctx=None): + UsageError.__init__(self, message, ctx) + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename, hint=None): + ui_filename = filename_to_ui(filename) + if hint is None: + hint = "unknown error" + ClickException.__init__(self, hint) + self.ui_filename = ui_filename + self.filename = filename + + def format_message(self): + return "Could not open file {}: {}".format(self.ui_filename, self.message) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code=0): + self.exit_code = code diff --git a/minor_project/lib/python3.6/site-packages/click/formatting.py b/minor_project/lib/python3.6/site-packages/click/formatting.py new file mode 100644 index 0000000..319c7f6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/formatting.py @@ -0,0 +1,283 @@ +from contextlib import contextmanager + +from ._compat import term_len +from .parser import split_opt +from .termui import get_terminal_size + +# Can force a width. This is used by the test system +FORCED_WIDTH = None + + +def measure_table(rows): + widths = {} + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows(rows, col_count): + for row in rows: + row = tuple(row) + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text, width=78, initial_indent="", subsequent_indent="", preserve_paragraphs=False +): + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p = [] + buf = [] + indent = None + + def _flush_par(): + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter(object): + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__(self, indent_increment=2, width=None, max_width=None): + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(get_terminal_size()[0], max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer = [] + + def write(self, string): + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self): + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self): + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage(self, prog, args="", prefix="Usage: "): + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: the prefix for the first line. + """ + usage_prefix = "{:>{w}}{} ".format(prefix, prog, w=self.current_indent) + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading): + """Writes a heading into the buffer.""" + self.write("{:>{w}}{}:\n".format("", heading, w=self.current_indent)) + + def write_paragraph(self): + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text): + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + text_width = max(self.width - self.current_indent, 11) + indent = " " * self.current_indent + self.write( + wrap_text( + text, + text_width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl(self, rows, col_max=30, col_spacing=2): + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write("{:>{w}}{}".format("", first, w=self.current_indent)) + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write("{}\n".format(lines[0])) + + for line in lines[1:]: + self.write( + "{:>{w}}{}\n".format( + "", line, w=first_col + self.current_indent + ) + ) + + if len(lines) > 1: + # separate long help from next option + self.write("\n") + else: + self.write("\n") + + @contextmanager + def section(self, name): + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self): + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self): + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options): + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + for opt in options: + prefix = split_opt(opt)[0] + if prefix == "/": + any_prefix_is_slash = True + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + + rv = ", ".join(x[1] for x in rv) + return rv, any_prefix_is_slash diff --git a/minor_project/lib/python3.6/site-packages/click/globals.py b/minor_project/lib/python3.6/site-packages/click/globals.py new file mode 100644 index 0000000..1649f9a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/globals.py @@ -0,0 +1,47 @@ +from threading import local + +_local = local() + + +def get_current_context(silent=False): + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return _local.stack[-1] + except (AttributeError, IndexError): + if not silent: + raise RuntimeError("There is no active click context.") + + +def push_context(ctx): + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context(): + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color=None): + """"Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + ctx = get_current_context(silent=True) + if ctx is not None: + return ctx.color diff --git a/minor_project/lib/python3.6/site-packages/click/parser.py b/minor_project/lib/python3.6/site-packages/click/parser.py new file mode 100644 index 0000000..f43ebfe --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/parser.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +import re +from collections import deque + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + + +def _unpack_args(args, nargs_spec): + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv = [] + spos = None + + def _fetch(c): + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def _error_opt_args(nargs, opt): + if nargs == 1: + raise BadOptionUsage(opt, "{} option requires an argument".format(opt)) + raise BadOptionUsage(opt, "{} option requires {} arguments".format(opt, nargs)) + + +def split_opt(opt): + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt, ctx): + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return prefix + ctx.token_normalize_func(opt) + + +def split_arg_string(string): + """Given an argument string this attempts to split it into small parts.""" + rv = [] + for match in re.finditer( + r"('([^'\\]*(?:\\.[^'\\]*)*)'|\"([^\"\\]*(?:\\.[^\"\\]*)*)\"|\S+)\s*", + string, + re.S, + ): + arg = match.group().strip() + if arg[:1] == arg[-1:] and arg[:1] in "\"'": + arg = arg[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape") + try: + arg = type(string)(arg) + except UnicodeError: + pass + rv.append(arg) + return rv + + +class Option(object): + def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): + self._short_opts = [] + self._long_opts = [] + self.prefixes = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError("Invalid start character for option ({})".format(opt)) + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self): + return self.action in ("store", "append") + + def process(self, value, state): + if self.action == "store": + state.opts[self.dest] = value + elif self.action == "store_const": + state.opts[self.dest] = self.const + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 + else: + raise ValueError("unknown action '{}'".format(self.action)) + state.order.append(self.obj) + + +class Argument(object): + def __init__(self, dest, nargs=1, obj=None): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process(self, value, state): + if self.nargs > 1: + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + "argument {} takes {} values".format(self.dest, self.nargs) + ) + state.opts[self.dest] = value + state.order.append(self.obj) + + +class ParsingState(object): + def __init__(self, rargs): + self.opts = {} + self.largs = [] + self.rargs = rargs + self.order = [] + + +class OptionParser(object): + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx=None): + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options = False + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + self._short_opt = {} + self._long_opt = {} + self._opt_prefixes = {"-", "--"} + self._args = [] + + def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None): + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``appnd_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + if obj is None: + obj = dest + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument(self, dest, nargs=1, obj=None): + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + if obj is None: + obj = dest + self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) + + def parse_args(self, args): + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state): + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state): + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt(self, opt, explicit_value, state): + if opt not in self._long_opt: + possibilities = [word for word in self._long_opt if word.startswith(opt)] + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + nargs = option.nargs + if len(state.rargs) < nargs: + _error_opt_args(nargs, opt) + elif nargs == 1: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + elif explicit_value is not None: + raise BadOptionUsage(opt, "{} option does not take a value".format(opt)) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg, state): + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(prefix + ch, self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + nargs = option.nargs + if len(state.rargs) < nargs: + _error_opt_args(nargs, opt) + elif nargs == 1: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we re-combinate the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append("{}{}".format(prefix, "".join(unknown_options))) + + def _process_opts(self, arg, state): + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + return self._match_short_opt(arg, state) + if not self.ignore_unknown_options: + raise + state.largs.append(arg) diff --git a/minor_project/lib/python3.6/site-packages/click/termui.py b/minor_project/lib/python3.6/site-packages/click/termui.py new file mode 100644 index 0000000..02ef9e9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/termui.py @@ -0,0 +1,681 @@ +import inspect +import io +import itertools +import os +import struct +import sys + +from ._compat import DEFAULT_COLUMNS +from ._compat import get_winterm_size +from ._compat import isatty +from ._compat import raw_input +from ._compat import string_types +from ._compat import strip_ansi +from ._compat import text_type +from ._compat import WIN +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import Path +from .utils import echo +from .utils import LazyFile + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func = raw_input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt): + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text, suffix, show_default=False, default=None, show_choices=True, type=None +): + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += " ({})".format(", ".join(map(str, type.choices))) + if default is not None and show_default: + prompt = "{} [{}]".format(prompt, _format_default(default)) + return prompt + suffix + + +def _format_default(default): + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text, + default=None, + hide_input=False, + confirmation_prompt=False, + type=None, + value_proc=None, + prompt_suffix=": ", + show_default=True, + err=False, + show_choices=True, +): + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending a interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + .. versionadded:: 7.0 + Added the show_choices parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: asks for confirmation for the value. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + """ + result = None + + def prompt_func(text): + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text, nl=False, err=err) + return f("") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + while 1: + while 1: + value = prompt_func(prompt) + if value: + break + elif default is not None: + if isinstance(value_proc, Path): + # validate Path default value(exists, dir_okay etc.) + value = default + break + return default + try: + result = value_proc(value) + except UsageError as e: + echo("Error: {}".format(e.message), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while 1: + value2 = prompt_func("Repeat for confirmation: ") + if value2: + break + if value == value2: + return result + echo("Error: the two entered values do not match", err=err) + + +def confirm( + text, default=False, abort=False, prompt_suffix=": ", show_default=True, err=False +): + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param text: the question to ask. + :param default: the default for the prompt. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + prompt = _build_prompt( + text, prompt_suffix, show_default, "Y/n" if default else "y/N" + ) + while 1: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt, nl=False, err=err) + value = visible_prompt_func("").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif value == "": + rv = default + else: + echo("Error: invalid input", err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def get_terminal_size(): + """Returns the current size of the terminal as tuple in the form + ``(width, height)`` in columns and rows. + """ + # If shutil has get_terminal_size() (Python 3.3 and later) use that + if sys.version_info >= (3, 3): + import shutil + + shutil_get_terminal_size = getattr(shutil, "get_terminal_size", None) + if shutil_get_terminal_size: + sz = shutil_get_terminal_size() + return sz.columns, sz.lines + + # We provide a sensible default for get_winterm_size() when being invoked + # inside a subprocess. Without this, it would not provide a useful input. + if get_winterm_size is not None: + size = get_winterm_size() + if size == (0, 0): + return (79, 24) + else: + return size + + def ioctl_gwinsz(fd): + try: + import fcntl + import termios + + cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")) + except Exception: + return + return cr + + cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + try: + cr = ioctl_gwinsz(fd) + finally: + os.close(fd) + except Exception: + pass + if not cr or not cr[0] or not cr[1]: + cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", DEFAULT_COLUMNS)) + return int(cr[1]), int(cr[0]) + + +def echo_via_pager(text_or_generator, color=None): + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = text_or_generator() + elif isinstance(text_or_generator, string_types): + i = [text_or_generator] + else: + i = iter(text_or_generator) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, string_types) else text_type(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable=None, + length=None, + label=None, + show_eta=True, + show_percent=None, + show_pos=False, + item_show_func=None, + fill_char="#", + empty_char="-", + bar_template="%(label)s [%(bar)s] %(info)s", + info_sep=" ", + width=36, + file=None, + color=None, +): + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `color` parameter. Added a `update` method to the + progressbar object. + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: a function called with the current item which + can return a string to show the current item + next to the progress bar. Note that the current + item can be `None`! + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: the file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + ) + + +def clear(): + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + # If we're on Windows and we don't have colorama available, then we + # clear the screen by shelling out. Otherwise we can use an escape + # sequence. + if WIN: + os.system("cls") + else: + sys.stdout.write("\033[2J\033[1;1H") + + +def style( + text, + fg=None, + bg=None, + bold=None, + dim=None, + underline=None, + blink=None, + reverse=None, + reset=True, +): + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + .. versionadded:: 2.0 + + .. versionadded:: 7.0 + Added support for bright colors. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + """ + bits = [] + if fg: + try: + bits.append("\033[{}m".format(_ansi_colors[fg])) + except KeyError: + raise TypeError("Unknown color '{}'".format(fg)) + if bg: + try: + bits.append("\033[{}m".format(_ansi_colors[bg] + 10)) + except KeyError: + raise TypeError("Unknown color '{}'".format(bg)) + if bold is not None: + bits.append("\033[{}m".format(1 if bold else 22)) + if dim is not None: + bits.append("\033[{}m".format(2 if dim else 22)) + if underline is not None: + bits.append("\033[{}m".format(4 if underline else 24)) + if blink is not None: + bits.append("\033[{}m".format(5 if blink else 25)) + if reverse is not None: + bits.append("\033[{}m".format(7 if reverse else 27)) + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text): + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho(message=None, file=None, nl=True, err=False, color=None, **styles): + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + .. versionadded:: 2.0 + """ + if message is not None: + message = style(message, **styles) + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text=None, editor=None, env=None, require_save=True, extension=".txt", filename=None +): + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + editor = Editor( + editor=editor, env=env, require_save=require_save, extension=extension + ) + if filename is None: + return editor.edit(text) + editor.edit_file(filename) + + +def launch(url, wait=False, locate=False): + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: waits for the program to stop. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar = None + + +def getchar(echo=False): + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + f = _getchar + if f is None: + from ._termui_impl import getchar as f + return f(echo) + + +def raw_terminal(): + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info="Press any key to continue ...", err=False): + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: the info string to print before pausing. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/minor_project/lib/python3.6/site-packages/click/testing.py b/minor_project/lib/python3.6/site-packages/click/testing.py new file mode 100644 index 0000000..a3dba3b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/testing.py @@ -0,0 +1,382 @@ +import contextlib +import os +import shlex +import shutil +import sys +import tempfile + +from . import formatting +from . import termui +from . import utils +from ._compat import iteritems +from ._compat import PY2 +from ._compat import string_types + + +if PY2: + from cStringIO import StringIO +else: + import io + from ._compat import _find_binary_reader + + +class EchoingStdin(object): + def __init__(self, input, output): + self._input = input + self._output = output + + def __getattr__(self, x): + return getattr(self._input, x) + + def _echo(self, rv): + self._output.write(rv) + return rv + + def read(self, n=-1): + return self._echo(self._input.read(n)) + + def readline(self, n=-1): + return self._echo(self._input.readline(n)) + + def readlines(self): + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self): + return iter(self._echo(x) for x in self._input) + + def __repr__(self): + return repr(self._input) + + +def make_input_stream(input, charset): + # Is already an input stream. + if hasattr(input, "read"): + if PY2: + return input + rv = _find_binary_reader(input) + if rv is not None: + return rv + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif not isinstance(input, bytes): + input = input.encode(charset) + if PY2: + return StringIO(input) + return io.BytesIO(input) + + +class Result(object): + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, runner, stdout_bytes, stderr_bytes, exit_code, exception, exc_info=None + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self): + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self): + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self): + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self): + return "<{} {}>".format( + type(self).__name__, repr(self.exception) if self.exception else "okay" + ) + + +class CliRunner(object): + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. This is + UTF-8 by default and should not be changed currently as + the reporting to Click only works in Python 2 properly. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__(self, charset=None, env=None, echo_stdin=False, mix_stderr=True): + if charset is None: + charset = "utf-8" + self.charset = charset + self.env = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli): + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env(self, overrides=None): + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation(self, input=None, env=None, color=False): + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + .. versionadded:: 4.0 + The ``color`` parameter was added. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + """ + input = make_input_stream(input, self.charset) + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + if PY2: + bytes_output = StringIO() + if self.echo_stdin: + input = EchoingStdin(input, bytes_output) + sys.stdout = bytes_output + if not self.mix_stderr: + bytes_error = StringIO() + sys.stderr = bytes_error + else: + bytes_output = io.BytesIO() + if self.echo_stdin: + input = EchoingStdin(input, bytes_output) + input = io.TextIOWrapper(input, encoding=self.charset) + sys.stdout = io.TextIOWrapper(bytes_output, encoding=self.charset) + if not self.mix_stderr: + bytes_error = io.BytesIO() + sys.stderr = io.TextIOWrapper(bytes_error, encoding=self.charset) + + if self.mix_stderr: + sys.stderr = sys.stdout + + sys.stdin = input + + def visible_input(prompt=None): + sys.stdout.write(prompt or "") + val = input.readline().rstrip("\r\n") + sys.stdout.write("{}\n".format(val)) + sys.stdout.flush() + return val + + def hidden_input(prompt=None): + sys.stdout.write("{}\n".format(prompt or "")) + sys.stdout.flush() + return input.readline().rstrip("\r\n") + + def _getchar(echo): + char = sys.stdin.read(1) + if echo: + sys.stdout.write(char) + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi(stream=None, color=None): + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi + + old_env = {} + try: + for key, value in iteritems(env): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, not self.mix_stderr and bytes_error) + finally: + for key, value in iteritems(old_env): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli, + args=None, + input=None, + env=None, + catch_exceptions=True, + color=False, + **extra + ): + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + .. versionadded:: 3.0 + The ``catch_exceptions`` parameter was added. + + .. versionchanged:: 3.0 + The result object now has an `exc_info` attribute with the + traceback if available. + + .. versionadded:: 4.0 + The ``color`` parameter was added. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + exception = None + exit_code = 0 + + if isinstance(args, string_types): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + exit_code = e.code + if exit_code is None: + exit_code = 0 + + if exit_code != 0: + exception = e + + if not isinstance(exit_code, int): + sys.stdout.write(str(exit_code)) + sys.stdout.write("\n") + exit_code = 1 + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, + ) + + @contextlib.contextmanager + def isolated_filesystem(self): + """A context manager that creates a temporary folder and changes + the current working directory to it for isolated filesystem tests. + """ + cwd = os.getcwd() + t = tempfile.mkdtemp() + os.chdir(t) + try: + yield t + finally: + os.chdir(cwd) + try: + shutil.rmtree(t) + except (OSError, IOError): # noqa: B014 + pass diff --git a/minor_project/lib/python3.6/site-packages/click/types.py b/minor_project/lib/python3.6/site-packages/click/types.py new file mode 100644 index 0000000..505c39f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/types.py @@ -0,0 +1,762 @@ +import os +import stat +from datetime import datetime + +from ._compat import _get_argv_encoding +from ._compat import filename_to_ui +from ._compat import get_filesystem_encoding +from ._compat import get_streerror +from ._compat import open_stream +from ._compat import PY2 +from ._compat import text_type +from .exceptions import BadParameter +from .utils import LazyFile +from .utils import safecall + + +class ParamType(object): + """Helper for converting values through types. The following is + necessary for a valid type: + + * it needs a name + * it needs to pass through None unchanged + * it needs to convert from a string + * it needs to convert its result type through unchanged + (eg: needs to be idempotent) + * it needs to be able to deal with param and context being `None`. + This can be the case when the object is used with prompt + inputs. + """ + + is_composite = False + + #: the descriptive name of this type + name = None + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter = None + + def __call__(self, value, param=None, ctx=None): + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param): + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param): + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert(self, value, param, ctx): + """Converts the value. This is not invoked for values that are + `None` (the missing value). + """ + return value + + def split_envvar_value(self, rv): + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail(self, message, param=None, ctx=None): + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self): + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func): + self.name = func.__name__ + self.func = func + + def convert(self, value, param, ctx): + try: + return self.func(value) + except ValueError: + try: + value = text_type(value) + except UnicodeError: + value = str(value).decode("utf-8", "replace") + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert(self, value, param, ctx): + return value + + def __repr__(self): + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert(self, value, param, ctx): + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = get_filesystem_encoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return value + + def __repr__(self): + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices, case_sensitive=True): + self.choices = choices + self.case_sensitive = case_sensitive + + def get_metavar(self, param): + return "[{}]".format("|".join(self.choices)) + + def get_missing_message(self, param): + return "Choose from:\n\t{}.".format(",\n\t".join(self.choices)) + + def convert(self, value, param, ctx): + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + if PY2: + lower = str.lower + else: + lower = str.casefold + + normed_value = lower(normed_value) + normed_choices = { + lower(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + self.fail( + "invalid choice: {}. (choose from {})".format( + value, ", ".join(self.choices) + ), + param, + ctx, + ) + + def __repr__(self): + return "Choice('{}')".format(list(self.choices)) + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats=None): + self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] + + def get_metavar(self, param): + return "[{}]".format("|".join(self.formats)) + + def _try_to_convert_date(self, value, format): + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert(self, value, param, ctx): + # Exact match + for format in self.formats: + dtime = self._try_to_convert_date(value, format) + if dtime: + return dtime + + self.fail( + "invalid datetime format: {}. (choose from {})".format( + value, ", ".join(self.formats) + ) + ) + + def __repr__(self): + return "DateTime" + + +class IntParamType(ParamType): + name = "integer" + + def convert(self, value, param, ctx): + try: + return int(value) + except ValueError: + self.fail("{} is not a valid integer".format(value), param, ctx) + + def __repr__(self): + return "INT" + + +class IntRange(IntParamType): + """A parameter that works similar to :data:`click.INT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + + name = "integer range" + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = IntParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if ( + self.min is not None + and rv < self.min + or self.max is not None + and rv > self.max + ): + if self.min is None: + self.fail( + "{} is bigger than the maximum valid value {}.".format( + rv, self.max + ), + param, + ctx, + ) + elif self.max is None: + self.fail( + "{} is smaller than the minimum valid value {}.".format( + rv, self.min + ), + param, + ctx, + ) + else: + self.fail( + "{} is not in the valid range of {} to {}.".format( + rv, self.min, self.max + ), + param, + ctx, + ) + return rv + + def __repr__(self): + return "IntRange({}, {})".format(self.min, self.max) + + +class FloatParamType(ParamType): + name = "float" + + def convert(self, value, param, ctx): + try: + return float(value) + except ValueError: + self.fail( + "{} is not a valid floating point value".format(value), param, ctx + ) + + def __repr__(self): + return "FLOAT" + + +class FloatRange(FloatParamType): + """A parameter that works similar to :data:`click.FLOAT` but restricts + the value to fit into a range. The default behavior is to fail if the + value falls outside the range, but it can also be silently clamped + between the two edges. + + See :ref:`ranges` for an example. + """ + + name = "float range" + + def __init__(self, min=None, max=None, clamp=False): + self.min = min + self.max = max + self.clamp = clamp + + def convert(self, value, param, ctx): + rv = FloatParamType.convert(self, value, param, ctx) + if self.clamp: + if self.min is not None and rv < self.min: + return self.min + if self.max is not None and rv > self.max: + return self.max + if ( + self.min is not None + and rv < self.min + or self.max is not None + and rv > self.max + ): + if self.min is None: + self.fail( + "{} is bigger than the maximum valid value {}.".format( + rv, self.max + ), + param, + ctx, + ) + elif self.max is None: + self.fail( + "{} is smaller than the minimum valid value {}.".format( + rv, self.min + ), + param, + ctx, + ) + else: + self.fail( + "{} is not in the valid range of {} to {}.".format( + rv, self.min, self.max + ), + param, + ctx, + ) + return rv + + def __repr__(self): + return "FloatRange({}, {})".format(self.min, self.max) + + +class BoolParamType(ParamType): + name = "boolean" + + def convert(self, value, param, ctx): + if isinstance(value, bool): + return bool(value) + value = value.lower() + if value in ("true", "t", "1", "yes", "y"): + return True + elif value in ("false", "f", "0", "no", "n"): + return False + self.fail("{} is not a valid boolean".format(value), param, ctx) + + def __repr__(self): + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert(self, value, param, ctx): + import uuid + + try: + if PY2 and isinstance(value, text_type): + value = value.encode("ascii") + return uuid.UUID(value) + except ValueError: + self.fail("{} is not a valid UUID value".format(value), param, ctx) + + def __repr__(self): + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter = os.path.pathsep + + def __init__( + self, mode="r", encoding=None, errors="strict", lazy=None, atomic=False + ): + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def resolve_lazy_flag(self, value): + if self.lazy is not None: + return self.lazy + if value == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert(self, value, param, ctx): + try: + if hasattr(value, "read") or hasattr(value, "write"): + return value + + lazy = self.resolve_lazy_flag(value) + + if lazy: + f = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + if ctx is not None: + ctx.call_on_close(f.close_intelligently) + return f + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + return f + except (IOError, OSError) as e: # noqa: B014 + self.fail( + "Could not open file: {}: {}".format( + filename_to_ui(value), get_streerror(e) + ), + param, + ctx, + ) + + +class Path(ParamType): + """The path type is similar to the :class:`File` type but it performs + different checks. First of all, instead of returning an open file + handle it returns just the filename. Secondly, it can perform various + basic checks about what the file or directory should be. + + .. versionchanged:: 6.0 + `allow_dash` was added. + + :param exists: if set to true, the file or directory needs to exist for + this value to be valid. If this is not required and a + file does indeed not exist, then all further checks are + silently skipped. + :param file_okay: controls if a file is a possible value. + :param dir_okay: controls if a directory is a possible value. + :param writable: if true, a writable check is performed. + :param readable: if true, a readable check is performed. + :param resolve_path: if this is true, then the path is fully resolved + before the value is passed onwards. This means + that it's absolute and symlinks are resolved. It + will not expand a tilde-prefix, as this is + supposed to be done by the shell only. + :param allow_dash: If this is set to `True`, a single dash to indicate + standard streams is permitted. + :param path_type: optionally a string type that should be used to + represent the path. The default is `None` which + means the return value will be either bytes or + unicode depending on what makes most sense given the + input data Click deals with. + """ + + envvar_list_splitter = os.path.pathsep + + def __init__( + self, + exists=False, + file_okay=True, + dir_okay=True, + writable=False, + readable=True, + resolve_path=False, + allow_dash=False, + path_type=None, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.writable = writable + self.readable = readable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name = "file" + self.path_type = "File" + elif self.dir_okay and not self.file_okay: + self.name = "directory" + self.path_type = "Directory" + else: + self.name = "path" + self.path_type = "Path" + + def coerce_path_result(self, rv): + if self.type is not None and not isinstance(rv, self.type): + if self.type is text_type: + rv = rv.decode(get_filesystem_encoding()) + else: + rv = rv.encode(get_filesystem_encoding()) + return rv + + def convert(self, value, param, ctx): + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + rv = os.path.realpath(rv) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + "{} '{}' does not exist.".format( + self.path_type, filename_to_ui(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + "{} '{}' is a file.".format(self.path_type, filename_to_ui(value)), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + "{} '{}' is a directory.".format( + self.path_type, filename_to_ui(value) + ), + param, + ctx, + ) + if self.writable and not os.access(value, os.W_OK): + self.fail( + "{} '{}' is not writable.".format( + self.path_type, filename_to_ui(value) + ), + param, + ctx, + ) + if self.readable and not os.access(value, os.R_OK): + self.fail( + "{} '{}' is not readable.".format( + self.path_type, filename_to_ui(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types): + self.types = [convert_type(ty) for ty in types] + + @property + def name(self): + return "<{}>".format(" ".join(ty.name for ty in self.types)) + + @property + def arity(self): + return len(self.types) + + def convert(self, value, param, ctx): + if len(value) != len(self.types): + raise TypeError( + "It would appear that nargs is set to conflict with the" + " composite type arity." + ) + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty, default=None): + """Converts a callable or python type into the most appropriate + param type. + """ + guessed_type = False + if ty is None and default is not None: + if isinstance(default, tuple): + ty = tuple(map(type, default)) + else: + ty = type(default) + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + if isinstance(ty, ParamType): + return ty + if ty is text_type or ty is str or ty is None: + return STRING + if ty is int: + return INT + # Booleans are only okay if not guessed. This is done because for + # flags the default value is actually a bit of a lie in that it + # indicates which of the flags is the one we want. See get_default() + # for more information. + if ty is bool and not guessed_type: + return BOOL + if ty is float: + return FLOAT + if guessed_type: + return STRING + + # Catch a common mistake + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + "Attempted to use an uninstantiated parameter type ({}).".format(ty) + ) + except TypeError: + pass + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but internally +#: no string conversion takes place. This is necessary to achieve the +#: same bytes/unicode behavior on Python 2/3 in situations where you want +#: to not convert argument types. This is usually useful when working +#: with file paths as they can appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/minor_project/lib/python3.6/site-packages/click/utils.py b/minor_project/lib/python3.6/site-packages/click/utils.py new file mode 100644 index 0000000..79265e7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/click/utils.py @@ -0,0 +1,455 @@ +import os +import sys + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import filename_to_ui +from ._compat import get_filesystem_encoding +from ._compat import get_streerror +from ._compat import is_bytes +from ._compat import open_stream +from ._compat import PY2 +from ._compat import should_strip_ansi +from ._compat import string_types +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import text_type +from ._compat import WIN +from .globals import resolve_color_default + +if not PY2: + from ._compat import _find_binary_writer +elif WIN: + from ._winconsole import _get_windows_argv + from ._winconsole import _hash_py_argv + from ._winconsole import _initial_argv_hash + +echo_native_types = string_types + (bytes, bytearray) + + +def _posixify(name): + return "-".join(name.split()).lower() + + +def safecall(func): + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception: + pass + + return wrapper + + +def make_str(value): + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(get_filesystem_encoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return text_type(value) + + +def make_default_short_help(help, max_length=45): + """Return a condensed version of help string.""" + words = help.split() + total_length = 0 + result = [] + done = False + + for word in words: + if word[-1:] == ".": + done = True + new_length = 1 + len(word) if result else len(word) + if total_length + new_length > max_length: + result.append("...") + done = True + else: + if result: + result.append(" ") + result.append(word) + if done: + break + total_length += new_length + + return "".join(result) + + +class LazyFile(object): + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, filename, mode="r", encoding=None, errors="strict", atomic=False + ): + self.name = filename + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + + if filename == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name): + return getattr(self.open(), name) + + def __repr__(self): + if self._f is not None: + return repr(self._f) + return "".format(self.name, self.mode) + + def open(self): + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except (IOError, OSError) as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=get_streerror(e)) + self._f = rv + return rv + + def close(self): + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self): + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close_intelligently() + + def __iter__(self): + self.open() + return iter(self._f) + + +class KeepOpenFile(object): + def __init__(self, file): + self._file = file + + def __getattr__(self, name): + return getattr(self._file, name) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + pass + + def __repr__(self): + return repr(self._file) + + def __iter__(self): + return iter(self._file) + + +def echo(message=None, file=None, nl=True, err=False, color=None): + """Prints a message plus a newline to the given file or stdout. On + first sight, this looks like the print function, but it has improved + support for handling Unicode and binary data that does not fail no + matter how badly configured the system is. + + Primarily it means that you can print binary data as well as Unicode + data on both 2.x and 3.x to the given file in the most appropriate way + possible. This is a very carefree function in that it will try its + best to not fail. As of Click 6.0 this includes support for unicode + output on the Windows console. + + In addition to that, if `colorama`_ is installed, the echo function will + also support clever handling of ANSI codes. Essentially it will then + do the following: + + - add transparent handling of ANSI color codes on Windows. + - hide ANSI codes automatically if the destination file is not a + terminal. + + .. _colorama: https://pypi.org/project/colorama/ + + .. versionchanged:: 6.0 + As of Click 6.0 the echo function will properly support unicode + output on the windows console. Not that click does not modify + the interpreter in any way which means that `sys.stdout` or the + print statement or function will still not provide unicode support. + + .. versionchanged:: 2.0 + Starting with version 2.0 of Click, the echo function will work + with colorama if it's installed. + + .. versionadded:: 3.0 + The `err` parameter was added. + + .. versionchanged:: 4.0 + Added the `color` flag. + + :param message: the message to print + :param file: the file to write to (defaults to ``stdout``) + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``. This is faster and easier than calling + :func:`get_text_stderr` yourself. + :param nl: if set to `True` (the default) a newline is printed afterwards. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, echo_native_types): + message = text_type(message) + + if nl: + message = message or u"" + if isinstance(message, text_type): + message += u"\n" + else: + message += b"\n" + + # If there is a message, and we're in Python 3, and the value looks + # like bytes, we manually need to find the binary stream and write the + # message in there. This is done separately so that most stream + # types will work as you would expect. Eg: you can write to StringIO + # for other cases. + if message and not PY2 and is_bytes(message): + binary_file = _find_binary_writer(file) + if binary_file is not None: + file.flush() + binary_file.write(message) + binary_file.flush() + return + + # ANSI-style support. If there is no message or we are dealing with + # bytes nothing is happening. If we are connected to a file we want + # to strip colors. If we are on windows we either wrap the stream + # to strip the color or we use the colorama support to translate the + # ansi codes to API calls. + if message and not is_bytes(message): + color = resolve_color_default(color) + if should_strip_ansi(file, color): + message = strip_ansi(message) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) + elif not color: + message = strip_ansi(message) + + if message: + file.write(message) + file.flush() + + +def get_binary_stream(name): + """Returns a system stream for byte processing. This essentially + returns the stream from the sys module with the given name but it + solves some compatibility issues between different Python versions. + Primarily this function is necessary for getting binary streams on + Python 3. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError("Unknown standard stream '{}'".format(name)) + return opener() + + +def get_text_stream(name, encoding=None, errors="strict"): + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts on Python 3 + for already correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError("Unknown standard stream '{}'".format(name)) + return opener(encoding, errors) + + +def open_file( + filename, mode="r", encoding=None, errors="strict", lazy=False, atomic=False +): + """This is similar to how the :class:`File` works but for manual + usage. Files are opened non lazy by default. This can open regular + files as well as stdin/stdout if ``'-'`` is passed. + + If stdin/stdout is returned the stream is wrapped so that the context + manager will not close the stream accidentally. This makes it possible + to always use the function like this without having to worry to + accidentally close a standard stream:: + + with open_file(filename) as f: + ... + + .. versionadded:: 3.0 + + :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). + :param mode: the mode in which to open the file. + :param encoding: the encoding to use. + :param errors: the error handling for this file. + :param lazy: can be flipped to true to open the file lazily. + :param atomic: in atomic mode writes go into a temporary file and it's + moved on close. + """ + if lazy: + return LazyFile(filename, mode, encoding, errors, atomic=atomic) + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + if not should_close: + f = KeepOpenFile(f) + return f + + +def get_os_args(): + """This returns the argument part of sys.argv in the most appropriate + form for processing. What this means is that this return value is in + a format that works for Click to process but does not necessarily + correspond well to what's actually standard for the interpreter. + + On most environments the return value is ``sys.argv[:1]`` unchanged. + However if you are on Windows and running Python 2 the return value + will actually be a list of unicode strings instead because the + default behavior on that platform otherwise will not be able to + carry all possible values that sys.argv can have. + + .. versionadded:: 6.0 + """ + # We can only extract the unicode argv if sys.argv has not been + # changed since the startup of the application. + if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): + return _get_windows_argv() + return sys.argv[1:] + + +def format_filename(filename, shorten=False): + """Formats a filename for user display. The main purpose of this + function is to ensure that the filename can be displayed at all. This + will decode the filename to unicode if necessary in a way that it will + not fail. Optionally, it can shorten the filename to not include the + full path to the filename. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + return filename_to_ui(filename) + + +def get_app_dir(app_name, roaming=True, force_posix=False): + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Win XP (roaming): + ``C:\Documents and Settings\\Local Settings\Application Data\Foo Bar`` + Win XP (not roaming): + ``C:\Documents and Settings\\Application Data\Foo Bar`` + Win 7 (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Win 7 (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no affect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name)))) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper(object): + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped): + self.wrapped = wrapped + + def flush(self): + try: + self.wrapped.flush() + except IOError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr): + return getattr(self.wrapped, attr) diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/DESCRIPTION.rst b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..e118723 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +UNKNOWN + + diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/METADATA new file mode 100644 index 0000000..b232cee --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/METADATA @@ -0,0 +1,25 @@ +Metadata-Version: 2.0 +Name: cycler +Version: 0.10.0 +Summary: Composable style cycles +Home-page: http://github.com/matplotlib/cycler +Author: Thomas A Caswell +Author-email: matplotlib-users@python.org +License: BSD +Keywords: cycle kwargs +Platform: Cross platform (Linux +Platform: Mac OSX +Platform: Windows) +Classifier: Development Status :: 4 - Beta +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Requires-Dist: six + +UNKNOWN + + diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/RECORD new file mode 100644 index 0000000..44cc61e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/RECORD @@ -0,0 +1,9 @@ +__pycache__/cycler.cpython-36.pyc,, +cycler-0.10.0.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 +cycler-0.10.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cycler-0.10.0.dist-info/METADATA,sha256=aWX1pyo7D2hSDNZ2Q6Zl7DxhUQdpyu1O5uNABnvz000,722 +cycler-0.10.0.dist-info/RECORD,, +cycler-0.10.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +cycler-0.10.0.dist-info/metadata.json,sha256=CCBpg-KQU-VRL1unJcHPWKQeQbB84G0j7-BeCj7YUbU,875 +cycler-0.10.0.dist-info/top_level.txt,sha256=D8BVVDdAAelLb2FOEz7lDpc6-AL21ylKPrMhtG6yzyE,7 +cycler.py,sha256=ed3G39unvVEBrBZVDwnE0FFroRNsOLkbJ_TwIT5CjCU,15959 diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/metadata.json b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/metadata.json new file mode 100644 index 0000000..6082129 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5"], "extensions": {"python.details": {"contacts": [{"email": "matplotlib-users@python.org", "name": "Thomas A Caswell", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://github.com/matplotlib/cycler"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "keywords": ["cycle", "kwargs"], "license": "BSD", "metadata_version": "2.0", "name": "cycler", "platform": "Cross platform (Linux", "run_requires": [{"requires": ["six"]}], "summary": "Composable style cycles", "version": "0.10.0"} \ No newline at end of file diff --git a/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/top_level.txt new file mode 100644 index 0000000..2254644 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler-0.10.0.dist-info/top_level.txt @@ -0,0 +1 @@ +cycler diff --git a/minor_project/lib/python3.6/site-packages/cycler.py b/minor_project/lib/python3.6/site-packages/cycler.py new file mode 100644 index 0000000..3c3eb2d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/cycler.py @@ -0,0 +1,558 @@ +""" +Cycler +====== + +Cycling through combinations of values, producing dictionaries. + +You can add cyclers:: + + from cycler import cycler + cc = (cycler(color=list('rgb')) + + cycler(linestyle=['-', '--', '-.'])) + for d in cc: + print(d) + +Results in:: + + {'color': 'r', 'linestyle': '-'} + {'color': 'g', 'linestyle': '--'} + {'color': 'b', 'linestyle': '-.'} + + +You can multiply cyclers:: + + from cycler import cycler + cc = (cycler(color=list('rgb')) * + cycler(linestyle=['-', '--', '-.'])) + for d in cc: + print(d) + +Results in:: + + {'color': 'r', 'linestyle': '-'} + {'color': 'r', 'linestyle': '--'} + {'color': 'r', 'linestyle': '-.'} + {'color': 'g', 'linestyle': '-'} + {'color': 'g', 'linestyle': '--'} + {'color': 'g', 'linestyle': '-.'} + {'color': 'b', 'linestyle': '-'} + {'color': 'b', 'linestyle': '--'} + {'color': 'b', 'linestyle': '-.'} +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import six +from itertools import product, cycle +from six.moves import zip, reduce +from operator import mul, add +import copy + +__version__ = '0.10.0' + + +def _process_keys(left, right): + """ + Helper function to compose cycler keys + + Parameters + ---------- + left, right : iterable of dictionaries or None + The cyclers to be composed + Returns + ------- + keys : set + The keys in the composition of the two cyclers + """ + l_peek = next(iter(left)) if left is not None else {} + r_peek = next(iter(right)) if right is not None else {} + l_key = set(l_peek.keys()) + r_key = set(r_peek.keys()) + if l_key & r_key: + raise ValueError("Can not compose overlapping cycles") + return l_key | r_key + + +class Cycler(object): + """ + Composable cycles + + This class has compositions methods: + + ``+`` + for 'inner' products (zip) + + ``+=`` + in-place ``+`` + + ``*`` + for outer products (itertools.product) and integer multiplication + + ``*=`` + in-place ``*`` + + and supports basic slicing via ``[]`` + + Parameters + ---------- + left : Cycler or None + The 'left' cycler + + right : Cycler or None + The 'right' cycler + + op : func or None + Function which composes the 'left' and 'right' cyclers. + + """ + def __call__(self): + return cycle(self) + + def __init__(self, left, right=None, op=None): + """Semi-private init + + Do not use this directly, use `cycler` function instead. + """ + if isinstance(left, Cycler): + self._left = Cycler(left._left, left._right, left._op) + elif left is not None: + # Need to copy the dictionary or else that will be a residual + # mutable that could lead to strange errors + self._left = [copy.copy(v) for v in left] + else: + self._left = None + + if isinstance(right, Cycler): + self._right = Cycler(right._left, right._right, right._op) + elif right is not None: + # Need to copy the dictionary or else that will be a residual + # mutable that could lead to strange errors + self._right = [copy.copy(v) for v in right] + else: + self._right = None + + self._keys = _process_keys(self._left, self._right) + self._op = op + + @property + def keys(self): + """ + The keys this Cycler knows about + """ + return set(self._keys) + + def change_key(self, old, new): + """ + Change a key in this cycler to a new name. + Modification is performed in-place. + + Does nothing if the old key is the same as the new key. + Raises a ValueError if the new key is already a key. + Raises a KeyError if the old key isn't a key. + + """ + if old == new: + return + if new in self._keys: + raise ValueError("Can't replace %s with %s, %s is already a key" % + (old, new, new)) + if old not in self._keys: + raise KeyError("Can't replace %s with %s, %s is not a key" % + (old, new, old)) + + self._keys.remove(old) + self._keys.add(new) + + if self._right is not None and old in self._right.keys: + self._right.change_key(old, new) + + # self._left should always be non-None + # if self._keys is non-empty. + elif isinstance(self._left, Cycler): + self._left.change_key(old, new) + else: + # It should be completely safe at this point to + # assume that the old key can be found in each + # iteration. + self._left = [{new: entry[old]} for entry in self._left] + + def _compose(self): + """ + Compose the 'left' and 'right' components of this cycle + with the proper operation (zip or product as of now) + """ + for a, b in self._op(self._left, self._right): + out = dict() + out.update(a) + out.update(b) + yield out + + @classmethod + def _from_iter(cls, label, itr): + """ + Class method to create 'base' Cycler objects + that do not have a 'right' or 'op' and for which + the 'left' object is not another Cycler. + + Parameters + ---------- + label : str + The property key. + + itr : iterable + Finite length iterable of the property values. + + Returns + ------- + cycler : Cycler + New 'base' `Cycler` + """ + ret = cls(None) + ret._left = list({label: v} for v in itr) + ret._keys = set([label]) + return ret + + def __getitem__(self, key): + # TODO : maybe add numpy style fancy slicing + if isinstance(key, slice): + trans = self.by_key() + return reduce(add, (_cycler(k, v[key]) + for k, v in six.iteritems(trans))) + else: + raise ValueError("Can only use slices with Cycler.__getitem__") + + def __iter__(self): + if self._right is None: + return iter(dict(l) for l in self._left) + + return self._compose() + + def __add__(self, other): + """ + Pair-wise combine two equal length cycles (zip) + + Parameters + ---------- + other : Cycler + The second Cycler + """ + if len(self) != len(other): + raise ValueError("Can only add equal length cycles, " + "not {0} and {1}".format(len(self), len(other))) + return Cycler(self, other, zip) + + def __mul__(self, other): + """ + Outer product of two cycles (`itertools.product`) or integer + multiplication. + + Parameters + ---------- + other : Cycler or int + The second Cycler or integer + """ + if isinstance(other, Cycler): + return Cycler(self, other, product) + elif isinstance(other, int): + trans = self.by_key() + return reduce(add, (_cycler(k, v*other) + for k, v in six.iteritems(trans))) + else: + return NotImplemented + + def __rmul__(self, other): + return self * other + + def __len__(self): + op_dict = {zip: min, product: mul} + if self._right is None: + return len(self._left) + l_len = len(self._left) + r_len = len(self._right) + return op_dict[self._op](l_len, r_len) + + def __iadd__(self, other): + """ + In-place pair-wise combine two equal length cycles (zip) + + Parameters + ---------- + other : Cycler + The second Cycler + """ + if not isinstance(other, Cycler): + raise TypeError("Cannot += with a non-Cycler object") + # True shallow copy of self is fine since this is in-place + old_self = copy.copy(self) + self._keys = _process_keys(old_self, other) + self._left = old_self + self._op = zip + self._right = Cycler(other._left, other._right, other._op) + return self + + def __imul__(self, other): + """ + In-place outer product of two cycles (`itertools.product`) + + Parameters + ---------- + other : Cycler + The second Cycler + """ + if not isinstance(other, Cycler): + raise TypeError("Cannot *= with a non-Cycler object") + # True shallow copy of self is fine since this is in-place + old_self = copy.copy(self) + self._keys = _process_keys(old_self, other) + self._left = old_self + self._op = product + self._right = Cycler(other._left, other._right, other._op) + return self + + def __eq__(self, other): + """ + Check equality + """ + if len(self) != len(other): + return False + if self.keys ^ other.keys: + return False + + return all(a == b for a, b in zip(self, other)) + + def __repr__(self): + op_map = {zip: '+', product: '*'} + if self._right is None: + lab = self.keys.pop() + itr = list(v[lab] for v in self) + return "cycler({lab!r}, {itr!r})".format(lab=lab, itr=itr) + else: + op = op_map.get(self._op, '?') + msg = "({left!r} {op} {right!r})" + return msg.format(left=self._left, op=op, right=self._right) + + def _repr_html_(self): + # an table showing the value of each key through a full cycle + output = "" + sorted_keys = sorted(self.keys, key=repr) + for key in sorted_keys: + output += "".format(key=key) + for d in iter(self): + output += "" + for k in sorted_keys: + output += "".format(val=d[k]) + output += "" + output += "
{key!r}
{val!r}
" + return output + + def by_key(self): + """Values by key + + This returns the transposed values of the cycler. Iterating + over a `Cycler` yields dicts with a single value for each key, + this method returns a `dict` of `list` which are the values + for the given key. + + The returned value can be used to create an equivalent `Cycler` + using only `+`. + + Returns + ------- + transpose : dict + dict of lists of the values for each key. + """ + + # TODO : sort out if this is a bottle neck, if there is a better way + # and if we care. + + keys = self.keys + # change this to dict comprehension when drop 2.6 + out = dict((k, list()) for k in keys) + + for d in self: + for k in keys: + out[k].append(d[k]) + return out + + # for back compatibility + _transpose = by_key + + def simplify(self): + """Simplify the Cycler + + Returned as a composition using only sums (no multiplications) + + Returns + ------- + simple : Cycler + An equivalent cycler using only summation""" + # TODO: sort out if it is worth the effort to make sure this is + # balanced. Currently it is is + # (((a + b) + c) + d) vs + # ((a + b) + (c + d)) + # I would believe that there is some performance implications + + trans = self.by_key() + return reduce(add, (_cycler(k, v) for k, v in six.iteritems(trans))) + + def concat(self, other): + """Concatenate this cycler and an other. + + The keys must match exactly. + + This returns a single Cycler which is equivalent to + `itertools.chain(self, other)` + + Examples + -------- + + >>> num = cycler('a', range(3)) + >>> let = cycler('a', 'abc') + >>> num.concat(let) + cycler('a', [0, 1, 2, 'a', 'b', 'c']) + + Parameters + ---------- + other : `Cycler` + The `Cycler` to concatenate to this one. + + Returns + ------- + ret : `Cycler` + The concatenated `Cycler` + """ + return concat(self, other) + + +def concat(left, right): + """Concatenate two cyclers. + + The keys must match exactly. + + This returns a single Cycler which is equivalent to + `itertools.chain(left, right)` + + Examples + -------- + + >>> num = cycler('a', range(3)) + >>> let = cycler('a', 'abc') + >>> num.concat(let) + cycler('a', [0, 1, 2, 'a', 'b', 'c']) + + Parameters + ---------- + left, right : `Cycler` + The two `Cycler` instances to concatenate + + Returns + ------- + ret : `Cycler` + The concatenated `Cycler` + """ + if left.keys != right.keys: + msg = '\n\t'.join(["Keys do not match:", + "Intersection: {both!r}", + "Disjoint: {just_one!r}"]).format( + both=left.keys & right.keys, + just_one=left.keys ^ right.keys) + + raise ValueError(msg) + + _l = left.by_key() + _r = right.by_key() + return reduce(add, (_cycler(k, _l[k] + _r[k]) for k in left.keys)) + + +def cycler(*args, **kwargs): + """ + Create a new `Cycler` object from a single positional argument, + a pair of positional arguments, or the combination of keyword arguments. + + cycler(arg) + cycler(label1=itr1[, label2=iter2[, ...]]) + cycler(label, itr) + + Form 1 simply copies a given `Cycler` object. + + Form 2 composes a `Cycler` as an inner product of the + pairs of keyword arguments. In other words, all of the + iterables are cycled simultaneously, as if through zip(). + + Form 3 creates a `Cycler` from a label and an iterable. + This is useful for when the label cannot be a keyword argument + (e.g., an integer or a name that has a space in it). + + Parameters + ---------- + arg : Cycler + Copy constructor for Cycler (does a shallow copy of iterables). + + label : name + The property key. In the 2-arg form of the function, + the label can be any hashable object. In the keyword argument + form of the function, it must be a valid python identifier. + + itr : iterable + Finite length iterable of the property values. + Can be a single-property `Cycler` that would + be like a key change, but as a shallow copy. + + Returns + ------- + cycler : Cycler + New `Cycler` for the given property + + """ + if args and kwargs: + raise TypeError("cyl() can only accept positional OR keyword " + "arguments -- not both.") + + if len(args) == 1: + if not isinstance(args[0], Cycler): + raise TypeError("If only one positional argument given, it must " + " be a Cycler instance.") + return Cycler(args[0]) + elif len(args) == 2: + return _cycler(*args) + elif len(args) > 2: + raise TypeError("Only a single Cycler can be accepted as the lone " + "positional argument. Use keyword arguments instead.") + + if kwargs: + return reduce(add, (_cycler(k, v) for k, v in six.iteritems(kwargs))) + + raise TypeError("Must have at least a positional OR keyword arguments") + + +def _cycler(label, itr): + """ + Create a new `Cycler` object from a property name and + iterable of values. + + Parameters + ---------- + label : hashable + The property key. + + itr : iterable + Finite length iterable of the property values. + + Returns + ------- + cycler : Cycler + New `Cycler` for the given property + """ + if isinstance(itr, Cycler): + keys = itr.keys + if len(keys) != 1: + msg = "Can not create Cycler from a multi-property Cycler" + raise ValueError(msg) + + lab = keys.pop() + # Doesn't need to be a new list because + # _from_iter() will be creating that new list anyway. + itr = (v[lab] for v in itr) + + return Cycler._from_iter(label, itr) diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__init__.py b/minor_project/lib/python3.6/site-packages/dateutil/__init__.py new file mode 100644 index 0000000..0defb82 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..7b83b46 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/_common.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/_common.cpython-36.pyc new file mode 100644 index 0000000..d180ed2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/_common.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/_version.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/_version.cpython-36.pyc new file mode 100644 index 0000000..ea87196 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/_version.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/easter.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/easter.cpython-36.pyc new file mode 100644 index 0000000..9f16159 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/easter.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/relativedelta.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/relativedelta.cpython-36.pyc new file mode 100644 index 0000000..e2e8ddc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/relativedelta.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/rrule.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/rrule.cpython-36.pyc new file mode 100644 index 0000000..cdaab73 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/rrule.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/tzwin.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/tzwin.cpython-36.pyc new file mode 100644 index 0000000..6091d5a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/tzwin.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/utils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000..62f1f3f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/__pycache__/utils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/_common.py b/minor_project/lib/python3.6/site-packages/dateutil/_common.py new file mode 100644 index 0000000..4eb2659 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/minor_project/lib/python3.6/site-packages/dateutil/_version.py b/minor_project/lib/python3.6/site-packages/dateutil/_version.py new file mode 100644 index 0000000..eac1209 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/_version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '2.8.1' diff --git a/minor_project/lib/python3.6/site-packages/dateutil/easter.py b/minor_project/lib/python3.6/site-packages/dateutil/easter.py new file mode 100644 index 0000000..53b7c78 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different easter + calculation methods: + + 1 - Original calculation in Julian calendar, valid in + dates after 326 AD + 2 - Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3 - Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms `_ + + and + + `The Calendar FAQ: Easter `_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/minor_project/lib/python3.6/site-packages/dateutil/parser/__init__.py b/minor_project/lib/python3.6/site-packages/dateutil/parser/__init__.py new file mode 100644 index 0000000..d174b0e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/parser/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo, ParserError +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'ParserError', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..cf01e4c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/_parser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/_parser.cpython-36.pyc new file mode 100644 index 0000000..17f3394 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/_parser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/isoparser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/isoparser.cpython-36.pyc new file mode 100644 index 0000000..9a1271b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/parser/__pycache__/isoparser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/parser/_parser.py b/minor_project/lib/python3.6/site-packages/dateutil/parser/_parser.py new file mode 100644 index 0000000..458aa6a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/parser/_parser.py @@ -0,0 +1,1609 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo", "ParserError"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if six.PY2: + # In Python 2, we can't duck type properly because unicode has + # a 'decode' function, and we'd be double-decoding + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + else: + if getattr(instream, 'decode', None) is not None: + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leap year + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precedence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ParserError("Unknown string format: %s", timestr) + + if len(res) == 0: + raise ParserError("String does not contain a date: %s", timestr) + + try: + ret = self._build_naive(res, default) + except ValueError as e: + six.raise_from(ParserError(e.args[0] + ": %s", timestr), e) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *don't* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + # ------------------------------------------------------------------ + # Handling for individual tokens. These are kept as methods instead + # of functions for the sake of customizability via subclassing. + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted + # via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + # ------------------------------------------------------------------ + # Post-Parsing construction of datetime output. These are kept as + # methods instead of functions for the sake of customizability via + # subclassing. + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + else: + raise TypeError("Offset must be tzinfo subclass, tz string, " + "or int offset.") + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.UTC) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.UTC) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ValueError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +class ParserError(ValueError): + """Error class for representing failure to parse a datetime string.""" + def __str__(self): + try: + return self.args[0] % self.args[1:] + except (TypeError, IndexError): + return super(ParserError, self).__str__() + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, str(self)) + + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo""" +# vim:ts=4:sw=4:et diff --git a/minor_project/lib/python3.6/site-packages/dateutil/parser/isoparser.py b/minor_project/lib/python3.6/site-packages/dateutil/parser/isoparser.py new file mode 100644 index 0000000..48f86a3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/parser/isoparser.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {}'.format(datestr)) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len(timestr) < 2: + raise ValueError('ISO time too short') + + has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + if (has_sep and pos < len_str and + timestr[pos:pos + 1] == self._TIME_SEP): + pos += 1 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.UTC + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.UTC + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/minor_project/lib/python3.6/site-packages/dateutil/relativedelta.py b/minor_project/lib/python3.6/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000..a9e85f7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding arithmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/minor_project/lib/python3.6/site-packages/dateutil/rrule.py b/minor_project/lib/python3.6/site-packages/dateutil/rrule.py new file mode 100644 index 0000000..6bf0ea9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/rrule.py @@ -0,0 +1,1735 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import itertools +import datetime +import calendar +import re +import sys + +try: + from math import gcd +except ImportError: + from fractions import gcd + +from six import advance_iterator, integer_types +from six.moves import _thread, range +import heapq + +from ._common import weekday as weekdaybase + +# For warning about deprecation of until and count +from warnings import warn + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penalty. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/__init__.py b/minor_project/lib/python3.6/site-packages/dateutil/tz/__init__.py new file mode 100644 index 0000000..af1352c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/tz/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..6c6c031 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/_common.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/_common.cpython-36.pyc new file mode 100644 index 0000000..7f05d77 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/_common.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/_factories.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/_factories.cpython-36.pyc new file mode 100644 index 0000000..7e35bf0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/_factories.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/tz.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/tz.cpython-36.pyc new file mode 100644 index 0000000..645681c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/tz.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/win.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/win.cpython-36.pyc new file mode 100644 index 0000000..3618846 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/tz/__pycache__/win.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/_common.py b/minor_project/lib/python3.6/site-packages/dateutil/tz/_common.py new file mode 100644 index 0000000..e6ac118 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/_factories.py b/minor_project/lib/python3.6/site-packages/dateutil/tz/_factories.py new file mode 100644 index 0000000..f8a6589 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/tz/_factories.py @@ -0,0 +1,80 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + +from six.moves import _thread + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls._cache_lock = _thread.allocate_lock() + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls._cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls.__cache_lock = _thread.allocate_lock() + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls.__cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/tz.py b/minor_project/lib/python3.6/site-packages/dateutil/tz/tz.py new file mode 100644 index 0000000..af81e88 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1849 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name == ":": + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + try: + if name.startswith(":"): + name = name[1:] + except TypeError as e: + if isinstance(name, bytes): + new_msg = "gettz argument should be str, not bytes" + six.raise_from(TypeError(new_msg), e) + else: + raise + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = UTC + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextlib import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tz/win.py b/minor_project/lib/python3.6/site-packages/dateutil/tz/win.py new file mode 100644 index 0000000..cde07ba --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/minor_project/lib/python3.6/site-packages/dateutil/tzwin.py b/minor_project/lib/python3.6/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000..cebc673 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/minor_project/lib/python3.6/site-packages/dateutil/utils.py b/minor_project/lib/python3.6/site-packages/dateutil/utils.py new file mode 100644 index 0000000..44d9c99 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may a negilible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000..34f11ad --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..9cb2abc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc new file mode 100644 index 0000000..9ce9320 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000..89e8351 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000..78f0d1a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,53 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + try: + check_call(["zic", "-d", zonedir] + filepaths) + except OSError as e: + _print_on_nosuchfile(e) + raise + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/minor_project/lib/python3.6/site-packages/easy_install.py b/minor_project/lib/python3.6/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/LICENSE.txt b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/LICENSE.txt new file mode 100644 index 0000000..4c904db --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/METADATA new file mode 100644 index 0000000..d34ff2b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/METADATA @@ -0,0 +1,109 @@ +Metadata-Version: 2.1 +Name: future +Version: 0.18.2 +Summary: Clean single-source support for Python 3 and 2 +Home-page: https://python-future.org +Author: Ed Schofield +Author-email: ed@pythoncharmers.com +License: MIT +Keywords: future past python3 migration futurize backport six 2to3 modernize pasteurize 3to2 +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: License :: OSI Approved +Classifier: License :: OSI Approved :: MIT License +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* + + +future: Easy, safe support for Python 2/3 compatibility +======================================================= + +``future`` is the missing compatibility layer between Python 2 and Python +3. It allows you to use a single, clean Python 3.x-compatible codebase to +support both Python 2 and Python 3 with minimal overhead. + +It is designed to be used as follows:: + + from __future__ import (absolute_import, division, + print_function, unicode_literals) + from builtins import ( + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) + +followed by predominantly standard, idiomatic Python 3 code that then runs +similarly on Python 2.6/2.7 and Python 3.3+. + +The imports have no effect on Python 3. On Python 2, they shadow the +corresponding builtins, which normally have different semantics on Python 3 +versus 2, to provide their Python 3 semantics. + + +Standard library reorganization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``future`` supports the standard library reorganization (PEP 3108) through the +following Py3 interfaces: + + >>> # Top-level packages with Py3 names provided on Py2: + >>> import html.parser + >>> import queue + >>> import tkinter.dialog + >>> import xmlrpc.client + >>> # etc. + + >>> # Aliases provided for extensions to existing Py2 module names: + >>> from future.standard_library import install_aliases + >>> install_aliases() + + >>> from collections import Counter, OrderedDict # backported to Py2.6 + >>> from collections import UserDict, UserList, UserString + >>> import urllib.request + >>> from itertools import filterfalse, zip_longest + >>> from subprocess import getoutput, getstatusoutput + + +Automatic conversion +-------------------- + +An included script called `futurize +`_ aids in converting +code (from either Python 2 or Python 3) to code compatible with both +platforms. It is similar to ``python-modernize`` but goes further in +providing Python 3 compatibility through the use of the backported types +and builtin functions in ``future``. + + +Documentation +------------- + +See: http://python-future.org + + +Credits +------- + +:Author: Ed Schofield, Jordan M. Adler, et al +:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte + Ltd, Singapore. http://pythoncharmers.com +:Others: See docs/credits.rst or http://python-future.org/credits.html + + +Licensing +--------- +Copyright 2013-2019 Python Charmers Pty Ltd, Australia. +The software is distributed under an MIT licence. See LICENSE.txt. + + + diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/RECORD new file mode 100644 index 0000000..2d628d6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/RECORD @@ -0,0 +1,415 @@ +../../../bin/futurize,sha256=q2VZawlFl6nUK81ffaR1e22aQ560YiviLAWinGq2SDE,346 +../../../bin/pasteurize,sha256=XcdIRg5raNQIgdXcRnE2B3xofI83mrinQj08iEky5Ic,348 +future-0.18.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +future-0.18.2.dist-info/LICENSE.txt,sha256=kW5WE5LUhHG5wjQ39W4mUvMgyzsRnOqhYu30EBb3Rrk,1083 +future-0.18.2.dist-info/METADATA,sha256=fkY-mhLBh40f490kVFZ3hkvu2OVGdLIp5x-oJpqF91k,3703 +future-0.18.2.dist-info/RECORD,, +future-0.18.2.dist-info/WHEEL,sha256=g4nMs7d-Xl9-xC9XovUrsDHGXt-FT0E17Yqo92DEfvY,92 +future-0.18.2.dist-info/entry_points.txt,sha256=-ATQtLUC2gkzrCYqc1Twac093xrI164NuMwsRALJWnM,89 +future-0.18.2.dist-info/top_level.txt,sha256=DT0C3az2gb-uJaj-fs0h4WwHYlJVDp0EvLdud1y5Zyw,38 +future/__init__.py,sha256=TsDq1XoGk6Jfach_rEhwAi07zR5OKYZ6hhUlG5Bj6Ag,2991 +future/__pycache__/__init__.cpython-36.pyc,, +future/backports/__init__.py,sha256=5QXvQ_jc5Xx6p4dSaHnZXPZazBEunKDKhbUjxZ0XD1I,530 +future/backports/__pycache__/__init__.cpython-36.pyc,, +future/backports/__pycache__/_markupbase.cpython-36.pyc,, +future/backports/__pycache__/datetime.cpython-36.pyc,, +future/backports/__pycache__/misc.cpython-36.pyc,, +future/backports/__pycache__/socket.cpython-36.pyc,, +future/backports/__pycache__/socketserver.cpython-36.pyc,, +future/backports/__pycache__/total_ordering.cpython-36.pyc,, +future/backports/_markupbase.py,sha256=MDPTCykLq4J7Aea3PvYotATEE0CG4R_SjlxfJaLXTJM,16215 +future/backports/datetime.py,sha256=I214Vu0cRY8mi8J5aIcsAyQJnWmOKXeLV-QTWSn7VQU,75552 +future/backports/email/__init__.py,sha256=eH3AJr3FkuBy_D6yS1V2K76Q2CQ93y2zmAMWmn8FbHI,2269 +future/backports/email/__pycache__/__init__.cpython-36.pyc,, +future/backports/email/__pycache__/_encoded_words.cpython-36.pyc,, +future/backports/email/__pycache__/_header_value_parser.cpython-36.pyc,, +future/backports/email/__pycache__/_parseaddr.cpython-36.pyc,, +future/backports/email/__pycache__/_policybase.cpython-36.pyc,, +future/backports/email/__pycache__/base64mime.cpython-36.pyc,, +future/backports/email/__pycache__/charset.cpython-36.pyc,, +future/backports/email/__pycache__/encoders.cpython-36.pyc,, +future/backports/email/__pycache__/errors.cpython-36.pyc,, +future/backports/email/__pycache__/feedparser.cpython-36.pyc,, +future/backports/email/__pycache__/generator.cpython-36.pyc,, +future/backports/email/__pycache__/header.cpython-36.pyc,, +future/backports/email/__pycache__/headerregistry.cpython-36.pyc,, +future/backports/email/__pycache__/iterators.cpython-36.pyc,, +future/backports/email/__pycache__/message.cpython-36.pyc,, +future/backports/email/__pycache__/parser.cpython-36.pyc,, +future/backports/email/__pycache__/policy.cpython-36.pyc,, +future/backports/email/__pycache__/quoprimime.cpython-36.pyc,, +future/backports/email/__pycache__/utils.cpython-36.pyc,, +future/backports/email/_encoded_words.py,sha256=m1vTRfxAQdg4VyWO7PF-1ih1mmq97V-BPyHHkuEwSME,8443 +future/backports/email/_header_value_parser.py,sha256=cj_1ce1voLn8H98r9cKqiSLgfFSxCv3_UL3sSvjqgjk,104692 +future/backports/email/_parseaddr.py,sha256=KewEnos0YDM-SYX503z7E1MmVbG5VRaKjxjcl0Ipjbs,17389 +future/backports/email/_policybase.py,sha256=2lJD9xouiz4uHvWGQ6j1nwlwWVQGwwzpy5JZoeQqhUc,14647 +future/backports/email/base64mime.py,sha256=sey6iJA9pHIOdFgoV1p7QAwYVjt8CEkDhITt304-nyI,3729 +future/backports/email/charset.py,sha256=CfE4iV2zAq6MQC0CHXHLnwTNW71zmhNITbzOcfxE4vY,17439 +future/backports/email/encoders.py,sha256=Nn4Pcx1rOdRgoSIzB6T5RWHl5zxClbf32wgE6D0tUt8,2800 +future/backports/email/errors.py,sha256=tRX8PP5g7mk2bAxL1jTCYrbfhD2gPZFNrh4_GJRM8OQ,3680 +future/backports/email/feedparser.py,sha256=bvmhb4cdY-ipextPK2K2sDgMsNvTspmuQfYyCxc4zSc,22736 +future/backports/email/generator.py,sha256=lpaLhZHneguvZ2QgRu7Figkjb7zmY28AGhj9iZTdI7s,19520 +future/backports/email/header.py,sha256=uBHbNKO-yx5I9KBflernJpyy3fX4gImCB1QE7ICApLs,24448 +future/backports/email/headerregistry.py,sha256=ZPbvLKXD0NMLSU4jXlVHfGyGcLMrFm-GQVURu_XHj88,20637 +future/backports/email/iterators.py,sha256=kMRYFGy3SVVpo7HG7JJr2ZAlOoaX6CVPzKYwDSvLfV0,2348 +future/backports/email/message.py,sha256=I6WW5cZDza7uwLOGJSvsDhGZC9K_Q570Lk2gt_vDUXM,35237 +future/backports/email/mime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/email/mime/__pycache__/__init__.cpython-36.pyc,, +future/backports/email/mime/__pycache__/application.cpython-36.pyc,, +future/backports/email/mime/__pycache__/audio.cpython-36.pyc,, +future/backports/email/mime/__pycache__/base.cpython-36.pyc,, +future/backports/email/mime/__pycache__/image.cpython-36.pyc,, +future/backports/email/mime/__pycache__/message.cpython-36.pyc,, +future/backports/email/mime/__pycache__/multipart.cpython-36.pyc,, +future/backports/email/mime/__pycache__/nonmultipart.cpython-36.pyc,, +future/backports/email/mime/__pycache__/text.cpython-36.pyc,, +future/backports/email/mime/application.py,sha256=m-5a4mSxu2E32XAImnp9x9eMVX5Vme2iNgn2dMMNyss,1401 +future/backports/email/mime/audio.py,sha256=2ognalFRadcsUYQYMUZbjv5i1xJbFhQN643doMuI7M4,2815 +future/backports/email/mime/base.py,sha256=wV3ClQyMsOqmkXSXbk_wd_zPoPTvBx8kAIzq3rdM4lE,875 +future/backports/email/mime/image.py,sha256=DpQk1sB-IMmO43AF4uadsXyf_y5TdEzJLfyhqR48bIw,1907 +future/backports/email/mime/message.py,sha256=pFsMhXW07aRjsLq1peO847PApWFAl28-Z2Z7BP1Dn74,1429 +future/backports/email/mime/multipart.py,sha256=j4Lf_sJmuwTbfgdQ6R35_t1_ha2DynJBJDvpjwbNObE,1699 +future/backports/email/mime/nonmultipart.py,sha256=Ciba1Z8d2yLDDpxgDJuk3Bb-TqcpE9HCd8KfbW5vgl4,832 +future/backports/email/mime/text.py,sha256=zV98BjoR4S_nX8c47x43LnsnifeGhIfNGwSAh575bs0,1552 +future/backports/email/parser.py,sha256=-115SC3DHZ6lLijWFTxuOnE-GiM2BOYaUSz-QpmvYSo,5312 +future/backports/email/policy.py,sha256=gpcbhVRXuCohkK6MUqopTs1lv4E4-ZVUO6OVncoGEJE,8823 +future/backports/email/quoprimime.py,sha256=w93W5XgdFpyGaDqDBJrnXF_v_npH5r20WuAxmrAzyQg,10923 +future/backports/email/utils.py,sha256=vpfN0E8UjNbNw-2NFBQGCo4TNgrghMsqzpEYW5C_fBs,14270 +future/backports/html/__init__.py,sha256=FKwqFtWMCoGNkhU97OPnR1fZSh6etAKfN1FU1KvXcV8,924 +future/backports/html/__pycache__/__init__.cpython-36.pyc,, +future/backports/html/__pycache__/entities.cpython-36.pyc,, +future/backports/html/__pycache__/parser.cpython-36.pyc,, +future/backports/html/entities.py,sha256=kzoRnQyGk_3DgoucHLhL5QL1pglK9nvmxhPIGZFDTnc,75428 +future/backports/html/parser.py,sha256=G2tUObvbHSotNt06JLY-BP1swaZNfDYFd_ENWDjPmRg,19770 +future/backports/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/http/__pycache__/__init__.cpython-36.pyc,, +future/backports/http/__pycache__/client.cpython-36.pyc,, +future/backports/http/__pycache__/cookiejar.cpython-36.pyc,, +future/backports/http/__pycache__/cookies.cpython-36.pyc,, +future/backports/http/__pycache__/server.cpython-36.pyc,, +future/backports/http/client.py,sha256=76EbhEZOtvdHFcU-jrjivoff13oQ9IMbdkZEdf5kQzQ,47602 +future/backports/http/cookiejar.py,sha256=_Vy4BPT-h0ZT0R_utGQAFXzuOAdmU9KedGFffyX9wN4,76559 +future/backports/http/cookies.py,sha256=DsyDUGDEbCXAA9Jq6suswSc76uSZqUu39adDDNj8XGw,21581 +future/backports/http/server.py,sha256=1CaMxgzHf9lYhmTJyE7topgjRIlIn9cnjgw8YEvwJV4,45523 +future/backports/misc.py,sha256=AkbED6BdHKnYCmIAontT4zHKTqdPPfJfn35HIs6LDrg,32682 +future/backports/socket.py,sha256=DH1V6IjKPpJ0tln8bYvxvQ7qnvZG-UoQtMA5yVleHiU,15663 +future/backports/socketserver.py,sha256=Twvyk5FqVnOeiNcbVsyMDPTF1mNlkKfyofG7tKxTdD8,24286 +future/backports/test/__init__.py,sha256=9dXxIZnkI095YfHC-XIaVF6d31GjeY1Ag8TEzcFgepM,264 +future/backports/test/__pycache__/__init__.cpython-36.pyc,, +future/backports/test/__pycache__/pystone.cpython-36.pyc,, +future/backports/test/__pycache__/ssl_servers.cpython-36.pyc,, +future/backports/test/__pycache__/support.cpython-36.pyc,, +future/backports/test/badcert.pem,sha256=JioQeRZkHH8hGsWJjAF3U1zQvcWqhyzG6IOEJpTY9SE,1928 +future/backports/test/badkey.pem,sha256=gaBK9px_gG7DmrLKxfD6f6i-toAmARBTVfs-YGFRQF0,2162 +future/backports/test/dh512.pem,sha256=dUTsjtLbK-femrorUrTGF8qvLjhTiT_n4Uo5V6u__Gs,402 +future/backports/test/https_svn_python_org_root.pem,sha256=wOB3Onnc62Iu9kEFd8GcHhd_suucYjpJNA3jyfHeJWA,2569 +future/backports/test/keycert.passwd.pem,sha256=ZBfnVLpbBtAOf_2gCdiQ-yrBHmRsNzSf8VC3UpQZIjg,1830 +future/backports/test/keycert.pem,sha256=xPXi5idPcQVbrhgxBqF2TNGm6sSZ2aLVVEt6DWzplL8,1783 +future/backports/test/keycert2.pem,sha256=DB46FEAYv8BWwQJ-5RzC696FxPN7CON-Qsi-R4poJgc,1795 +future/backports/test/nokia.pem,sha256=s00x0uPDSaa5DHJ_CwzlVhg3OVdJ47f4zgqQdd0SAfQ,1923 +future/backports/test/nullbytecert.pem,sha256=NFRYWhmP_qT3jGfVjR6-iaC-EQdhIFjiXtTLN5ZPKnE,5435 +future/backports/test/nullcert.pem,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/test/pystone.py,sha256=fvyoJ_tVovTNaxbJmdJMwr9F6SngY-U4ibULnd_wUqA,7427 +future/backports/test/sha256.pem,sha256=3wB-GQqEc7jq-PYwYAQaPbtTvvr7stk_DVmZxFgehfA,8344 +future/backports/test/ssl_cert.pem,sha256=M607jJNeIeHG9BlTf_jaQkPJI4nOxSJPn-zmEAaW43M,867 +future/backports/test/ssl_key.passwd.pem,sha256=I_WH4sBw9Vs9Z-BvmuXY0aw8tx8avv6rm5UL4S_pP00,963 +future/backports/test/ssl_key.pem,sha256=VKGU-R3UYaZpVTXl7chWl4vEYEDeob69SfvRTQ8aq_4,916 +future/backports/test/ssl_servers.py,sha256=-pd7HMZljuZfFRAbCAiAP_2G04orITJFj-S9ddr6o84,7209 +future/backports/test/support.py,sha256=zJrb-pz-Wu2dZwnNodg1v3w96zVq7ORuN-hOGOHbdA8,70881 +future/backports/total_ordering.py,sha256=O3M57_IisQ-zW5hW20uxkfk4fTGsr0EF2tAKx3BksQo,1929 +future/backports/urllib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/urllib/__pycache__/__init__.cpython-36.pyc,, +future/backports/urllib/__pycache__/error.cpython-36.pyc,, +future/backports/urllib/__pycache__/parse.cpython-36.pyc,, +future/backports/urllib/__pycache__/request.cpython-36.pyc,, +future/backports/urllib/__pycache__/response.cpython-36.pyc,, +future/backports/urllib/__pycache__/robotparser.cpython-36.pyc,, +future/backports/urllib/error.py,sha256=ktikuK9ag4lS4f8Z0k5p1F11qF40N2AiOtjbXiF97ew,2715 +future/backports/urllib/parse.py,sha256=67avrYqV1UK7i_22goRUrvJ8SffzjRdTja9wzq_ynXY,35792 +future/backports/urllib/request.py,sha256=aR9ZMzfhV1C2Qk3wFsGvkwxqtdPTdsJVGRt5DUCwgJ8,96276 +future/backports/urllib/response.py,sha256=ooQyswwbb-9N6IVi1Kwjss1aR-Kvm8ZNezoyVEonp8c,3180 +future/backports/urllib/robotparser.py,sha256=pnAGTbKhdbCq_9yMZp7m8hj5q_NJpyQX6oQIZuYcnkw,6865 +future/backports/xmlrpc/__init__.py,sha256=h61ciVTdVvu8oEUXv4dHf_Tc5XUXDH3RKB1-8fQhSsg,38 +future/backports/xmlrpc/__pycache__/__init__.cpython-36.pyc,, +future/backports/xmlrpc/__pycache__/client.cpython-36.pyc,, +future/backports/xmlrpc/__pycache__/server.cpython-36.pyc,, +future/backports/xmlrpc/client.py,sha256=6a6Pvx_RVC9gIHDkFOVdREeGaZckOOiWd7T6GyzU3qU,48133 +future/backports/xmlrpc/server.py,sha256=W_RW5hgYbNV2LGbnvngzm7akacRdK-XFY-Cy2HL-qsY,37285 +future/builtins/__init__.py,sha256=jSdOucWfCsfkfTR8Jd4-Ls-YQpJ0AnzUomBxgwuoxNs,1687 +future/builtins/__pycache__/__init__.cpython-36.pyc,, +future/builtins/__pycache__/disabled.cpython-36.pyc,, +future/builtins/__pycache__/iterators.cpython-36.pyc,, +future/builtins/__pycache__/misc.cpython-36.pyc,, +future/builtins/__pycache__/new_min_max.cpython-36.pyc,, +future/builtins/__pycache__/newnext.cpython-36.pyc,, +future/builtins/__pycache__/newround.cpython-36.pyc,, +future/builtins/__pycache__/newsuper.cpython-36.pyc,, +future/builtins/disabled.py,sha256=Ysq74bsmwntpq7dzkwTAD7IHKrkXy66vJlPshVwgVBI,2109 +future/builtins/iterators.py,sha256=l1Zawm2x82oqOuGGtCZRE76Ej98sMlHQwu9fZLK5RrA,1396 +future/builtins/misc.py,sha256=hctlKKWUyN0Eoodxg4ySQHEqARTukOLR4L5K5c6PW9k,4550 +future/builtins/new_min_max.py,sha256=7qQ4iiG4GDgRzjPzzzmg9pdby35Mtt6xNOOsyqHnIGY,1757 +future/builtins/newnext.py,sha256=oxXB8baXqJv29YG40aCS9UXk9zObyoOjya8BJ7NdBJM,2009 +future/builtins/newround.py,sha256=l2EXPAFU3fAsZigJxUH6x66B7jhNaB076-L5FR617R8,3181 +future/builtins/newsuper.py,sha256=LmiUQ_f6NXDIz6v6sDPkoTWl-2Zccy7PpZfQKYtscac,4146 +future/moves/__init__.py,sha256=MsAW69Xp_fqUo4xODufcKM6AZf-ozHaz44WPZdsDFJA,220 +future/moves/__pycache__/__init__.cpython-36.pyc,, +future/moves/__pycache__/_dummy_thread.cpython-36.pyc,, +future/moves/__pycache__/_markupbase.cpython-36.pyc,, +future/moves/__pycache__/_thread.cpython-36.pyc,, +future/moves/__pycache__/builtins.cpython-36.pyc,, +future/moves/__pycache__/collections.cpython-36.pyc,, +future/moves/__pycache__/configparser.cpython-36.pyc,, +future/moves/__pycache__/copyreg.cpython-36.pyc,, +future/moves/__pycache__/itertools.cpython-36.pyc,, +future/moves/__pycache__/pickle.cpython-36.pyc,, +future/moves/__pycache__/queue.cpython-36.pyc,, +future/moves/__pycache__/reprlib.cpython-36.pyc,, +future/moves/__pycache__/socketserver.cpython-36.pyc,, +future/moves/__pycache__/subprocess.cpython-36.pyc,, +future/moves/__pycache__/sys.cpython-36.pyc,, +future/moves/__pycache__/winreg.cpython-36.pyc,, +future/moves/_dummy_thread.py,sha256=c8ZRUd8ffvyvGKGGgve5NKc8VdtAWquu8-4FnO2EdvA,175 +future/moves/_markupbase.py,sha256=W9wh_Gu3jDAMIhVBV1ZnCkJwYLHRk_v_su_HLALBkZQ,171 +future/moves/_thread.py,sha256=rwY7L4BZMFPlrp_i6T2Un4_iKYwnrXJ-yV6FJZN8YDo,163 +future/moves/builtins.py,sha256=4sjjKiylecJeL9da_RaBZjdymX2jtMs84oA9lCqb4Ug,281 +future/moves/collections.py,sha256=OKQ-TfUgms_2bnZRn4hrclLDoiN2i-HSWcjs3BC2iY8,417 +future/moves/configparser.py,sha256=TNy226uCbljjU-DjAVo7j7Effbj5zxXvDh0SdXehbzk,146 +future/moves/copyreg.py,sha256=Y3UjLXIMSOxZggXtvZucE9yv4tkKZtVan45z8eix4sU,438 +future/moves/dbm/__init__.py,sha256=_VkvQHC2UcIgZFPRroiX_P0Fs7HNqS_69flR0-oq2B8,488 +future/moves/dbm/__pycache__/__init__.cpython-36.pyc,, +future/moves/dbm/__pycache__/dumb.cpython-36.pyc,, +future/moves/dbm/__pycache__/gnu.cpython-36.pyc,, +future/moves/dbm/__pycache__/ndbm.cpython-36.pyc,, +future/moves/dbm/dumb.py,sha256=HKdjjtO3EyP9EKi1Hgxh_eUU6yCQ0fBX9NN3n-zb8JE,166 +future/moves/dbm/gnu.py,sha256=XoCSEpZ2QaOgo2h1m80GW7NUgj_b93BKtbcuwgtnaKo,162 +future/moves/dbm/ndbm.py,sha256=OFnreyo_1YHDBl5YUm9gCzKlN1MHgWbfSQAZVls2jaM,162 +future/moves/html/__init__.py,sha256=BSUFSHxXf2kGvHozlnrB1nn6bPE6p4PpN3DwA_Z5geo,1016 +future/moves/html/__pycache__/__init__.cpython-36.pyc,, +future/moves/html/__pycache__/entities.cpython-36.pyc,, +future/moves/html/__pycache__/parser.cpython-36.pyc,, +future/moves/html/entities.py,sha256=lVvchdjK_RzRj759eg4RMvGWHfgBbj0tKGOoZ8dbRyY,177 +future/moves/html/parser.py,sha256=V2XpHLKLCxQum3N9xlO3IUccAD7BIykZMqdEcWET3vY,167 +future/moves/http/__init__.py,sha256=Mx1v_Tcks4udHCtDM8q2xnYUiQ01gD7EpPyeQwsP3-Q,71 +future/moves/http/__pycache__/__init__.cpython-36.pyc,, +future/moves/http/__pycache__/client.cpython-36.pyc,, +future/moves/http/__pycache__/cookiejar.cpython-36.pyc,, +future/moves/http/__pycache__/cookies.cpython-36.pyc,, +future/moves/http/__pycache__/server.cpython-36.pyc,, +future/moves/http/client.py,sha256=hqEBq7GDXZidd1AscKnSyjSoMcuj8rERqGTmD7VheDQ,165 +future/moves/http/cookiejar.py,sha256=Frr9ZZCg-145ymy0VGpiPJhvBEpJtVqRBYPaKhgT1Z4,173 +future/moves/http/cookies.py,sha256=PPrHa1_oDbu3D_BhJGc6PvMgY1KoxyYq1jqeJwEcMvE,233 +future/moves/http/server.py,sha256=8YQlSCShjAsB5rr5foVvZgp3IzwYFvTmGZCHhBSDtaI,606 +future/moves/itertools.py,sha256=PVxFHRlBQl9ElS0cuGFPcUtj53eHX7Z1DmggzGfgQ6c,158 +future/moves/pickle.py,sha256=r8j9skzfE8ZCeHyh_OB-WucOkRTIHN7zpRM7l7V3qS4,229 +future/moves/queue.py,sha256=uxvLCChF-zxWWgrY1a_wxt8rp2jILdwO4PrnkBW6VTE,160 +future/moves/reprlib.py,sha256=Nt5sUgMQ3jeVIukqSHOvB0UIsl6Y5t-mmT_13mpZmiY,161 +future/moves/socketserver.py,sha256=v8ZLurDxHOgsubYm1iefjlpnnJQcx2VuRUGt9FCJB9k,174 +future/moves/subprocess.py,sha256=oqRSMfFZkxM4MXkt3oD5N6eBwmmJ6rQ9KPhvSQKT_hM,251 +future/moves/sys.py,sha256=HOMRX4Loim75FMbWawd3oEwuGNJR-ClMREEFkVpBsRs,132 +future/moves/test/__init__.py,sha256=yB9F-fDQpzu1v8cBoKgIrL2ScUNqjlkqEztYrGVCQ-0,110 +future/moves/test/__pycache__/__init__.cpython-36.pyc,, +future/moves/test/__pycache__/support.cpython-36.pyc,, +future/moves/test/support.py,sha256=6zGgTTXcERyBJIQ04-X-sAe781tVgLVHp3HzmQPy52g,259 +future/moves/tkinter/__init__.py,sha256=jV9vDx3wRl0bsoclU8oSe-5SqHQ3YpCbStmqtXnq1p4,620 +future/moves/tkinter/__pycache__/__init__.cpython-36.pyc,, +future/moves/tkinter/__pycache__/colorchooser.cpython-36.pyc,, +future/moves/tkinter/__pycache__/commondialog.cpython-36.pyc,, +future/moves/tkinter/__pycache__/constants.cpython-36.pyc,, +future/moves/tkinter/__pycache__/dialog.cpython-36.pyc,, +future/moves/tkinter/__pycache__/dnd.cpython-36.pyc,, +future/moves/tkinter/__pycache__/filedialog.cpython-36.pyc,, +future/moves/tkinter/__pycache__/font.cpython-36.pyc,, +future/moves/tkinter/__pycache__/messagebox.cpython-36.pyc,, +future/moves/tkinter/__pycache__/scrolledtext.cpython-36.pyc,, +future/moves/tkinter/__pycache__/simpledialog.cpython-36.pyc,, +future/moves/tkinter/__pycache__/tix.cpython-36.pyc,, +future/moves/tkinter/__pycache__/ttk.cpython-36.pyc,, +future/moves/tkinter/colorchooser.py,sha256=kprlmpRtvDbW5Gq43H1mi2KmNJ2kuzLQOba0a5EwDkU,333 +future/moves/tkinter/commondialog.py,sha256=mdUbq1IZqOGaSA7_8R367IukDCsMfzXiVHrTQQpp7Z0,333 +future/moves/tkinter/constants.py,sha256=0qRUrZLRPdVxueABL9KTzzEWEsk6xM1rOjxK6OHxXtA,324 +future/moves/tkinter/dialog.py,sha256=ksp-zvs-_A90P9RNHS8S27f1k8f48zG2Bel2jwZV5y0,311 +future/moves/tkinter/dnd.py,sha256=C_Ah0Urnyf2XKE5u-oP6mWi16RzMSXgMA1uhBSAwKY8,306 +future/moves/tkinter/filedialog.py,sha256=RSJFDGOP2AJ4T0ZscJ2hyF9ssOWp9t_S_DtnOmT-WZ8,323 +future/moves/tkinter/font.py,sha256=TXarflhJRxqepaRNSDw6JFUVGz5P1T1C4_uF9VRqj3w,309 +future/moves/tkinter/messagebox.py,sha256=WJt4t83kLmr_UnpCWFuLoyazZr3wAUOEl6ADn3osoEA,327 +future/moves/tkinter/scrolledtext.py,sha256=DRzN8aBAlDBUo1B2KDHzdpRSzXBfH4rOOz0iuHXbQcg,329 +future/moves/tkinter/simpledialog.py,sha256=6MhuVhZCJV4XfPpPSUWKlDLLGEi0Y2ZlGQ9TbsmJFL0,329 +future/moves/tkinter/tix.py,sha256=aNeOfbWSGmcN69UmEGf4tJ-QIxLT6SU5ynzm1iWgepA,302 +future/moves/tkinter/ttk.py,sha256=rRrJpDjcP2gjQNukECu4F026P-CkW-3Ca2tN6Oia-Fw,302 +future/moves/urllib/__init__.py,sha256=yB9F-fDQpzu1v8cBoKgIrL2ScUNqjlkqEztYrGVCQ-0,110 +future/moves/urllib/__pycache__/__init__.cpython-36.pyc,, +future/moves/urllib/__pycache__/error.cpython-36.pyc,, +future/moves/urllib/__pycache__/parse.cpython-36.pyc,, +future/moves/urllib/__pycache__/request.cpython-36.pyc,, +future/moves/urllib/__pycache__/response.cpython-36.pyc,, +future/moves/urllib/__pycache__/robotparser.cpython-36.pyc,, +future/moves/urllib/error.py,sha256=gfrKzv-6W5OjzNIfjvJaQkxABRLym2KwjfKFXSdDB60,479 +future/moves/urllib/parse.py,sha256=xLLUMIIB5MreCdYzRZ5zIRWrhTRCoMO8RZEH4WPFQDY,1045 +future/moves/urllib/request.py,sha256=ttIzq60PwjRyrLQUGdAtfYvs4fziVwvcLe2Kw-hvE0g,3496 +future/moves/urllib/response.py,sha256=ZEZML0FpbB--GIeBFPvSzbtlVJ6EsR4tCI4qB7D8sFQ,342 +future/moves/urllib/robotparser.py,sha256=j24p6dMNzUpGZtT3BQxwRoE-F88iWmBpKgu0tRV61FQ,179 +future/moves/winreg.py,sha256=2zNAG59QI7vFlCj7kqDh0JrAYTpexOnI55PEAIjYhqo,163 +future/moves/xmlrpc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/moves/xmlrpc/__pycache__/__init__.cpython-36.pyc,, +future/moves/xmlrpc/__pycache__/client.cpython-36.pyc,, +future/moves/xmlrpc/__pycache__/server.cpython-36.pyc,, +future/moves/xmlrpc/client.py,sha256=2PfnL5IbKVwdKP7C8B1OUviEtuBObwoH4pAPfvHIvQc,143 +future/moves/xmlrpc/server.py,sha256=ESDXdpUgTKyeFmCDSnJmBp8zONjJklsRJOvy4OtaALc,143 +future/standard_library/__init__.py,sha256=7paz9IsD5qv_tvk5Rre3YrlA2_2aS1FJfI7UlrzAtWY,27743 +future/standard_library/__pycache__/__init__.cpython-36.pyc,, +future/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/tests/__pycache__/__init__.cpython-36.pyc,, +future/tests/__pycache__/base.cpython-36.pyc,, +future/tests/base.py,sha256=7LTAKHJgUxOwmffD1kgcErVt2VouKcldPnq4iruqk_k,19956 +future/types/__init__.py,sha256=5fBxWqf_OTQ8jZ7k2TS34rFH14togeR488F4zBHIQ-s,6831 +future/types/__pycache__/__init__.cpython-36.pyc,, +future/types/__pycache__/newbytes.cpython-36.pyc,, +future/types/__pycache__/newdict.cpython-36.pyc,, +future/types/__pycache__/newint.cpython-36.pyc,, +future/types/__pycache__/newlist.cpython-36.pyc,, +future/types/__pycache__/newmemoryview.cpython-36.pyc,, +future/types/__pycache__/newobject.cpython-36.pyc,, +future/types/__pycache__/newopen.cpython-36.pyc,, +future/types/__pycache__/newrange.cpython-36.pyc,, +future/types/__pycache__/newstr.cpython-36.pyc,, +future/types/newbytes.py,sha256=D_kNDD9sbNJir2cUxxePiAuw2OW5irxVnu55uHmuK9E,16303 +future/types/newdict.py,sha256=2N7P44cWmWtiDHvlK5ir15mW492gg6uP2n65d5bsDy4,3100 +future/types/newint.py,sha256=hJiv9qUDrjl1xkfzNFNLzafsRMPoFcRFceoivUzVIek,13286 +future/types/newlist.py,sha256=-H5-fXodd-UQgTFnZBJdwE68CrgIL_jViYdv4w7q2rU,2284 +future/types/newmemoryview.py,sha256=LnARgiKqQ2zLwwDZ3owu1atoonPQkOneWMfxJCwB4_o,712 +future/types/newobject.py,sha256=AX_n8GwlDR2IY-xIwZCvu0Olj_Ca2aS57nlTihnFr-I,3358 +future/types/newopen.py,sha256=lcRNHWZ1UjEn_0_XKis1ZA5U6l-4c-CHlC0WX1sY4NI,810 +future/types/newrange.py,sha256=7sgJaRaC4WIUtZ40K-c1d5QWruyaCWGgTVFadKo8qYA,5294 +future/types/newstr.py,sha256=e0brkurI0IK--4ToQEO4Cz1FECZav4CyUGMKxlrcmK4,15758 +future/utils/__init__.py,sha256=wsvXsKx-DXZichQ10Rdml-CWMqS79RNNynmdvfISpCU,21828 +future/utils/__pycache__/__init__.cpython-36.pyc,, +future/utils/__pycache__/surrogateescape.cpython-36.pyc,, +future/utils/surrogateescape.py,sha256=7u4V4XlW83P5YSAJS2f92YUF8vsWthsiTnmAshOJL_M,6097 +libfuturize/__init__.py,sha256=CZA_KgvTQOPAY1_MrlJeQ6eMh2Eei4_KIv4JuyAkpfw,31 +libfuturize/__pycache__/__init__.cpython-36.pyc,, +libfuturize/__pycache__/fixer_util.cpython-36.pyc,, +libfuturize/__pycache__/main.cpython-36.pyc,, +libfuturize/fixer_util.py,sha256=Zhms5G97l40pyG1krQM2lCp-TxnocBdJkB2AbkAFnKY,17494 +libfuturize/fixes/__init__.py,sha256=5KEpUnjVsFCCsr_-zrikvJbLf9zslEJnFTH_5pBc33I,5236 +libfuturize/fixes/__pycache__/__init__.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_UserDict.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_absolute_import.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_basestring.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_bytes.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_cmp.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_division.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_division_safe.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_execfile.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_input.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_metaclass.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_next_call.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_object.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_print.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_print_with_import.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_raise.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-36.pyc,, +libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-36.pyc,, +libfuturize/fixes/fix_UserDict.py,sha256=jL4jXnGaUQTkG8RKfGXbU_HVTkB3MWZMQwUkqMAjB6I,3840 +libfuturize/fixes/fix_absolute_import.py,sha256=vkrF2FyQR5lSz2WmdqywzkEJVTC0eq4gh_REWBKHh7w,3140 +libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py,sha256=Fr219VAzR8KWXc2_bfiqLl10EgxAWjL6cI3Mowt--VU,662 +libfuturize/fixes/fix_basestring.py,sha256=bHkKuMzhr5FMXwjXlMOjsod4S3rQkVdbzhoWV4-tl3Y,394 +libfuturize/fixes/fix_bytes.py,sha256=AhzOJes6EnPwgzboDjvURANbWKqciG6ZGaYW07PYQK8,685 +libfuturize/fixes/fix_cmp.py,sha256=Blq_Z0IGkYiKS83QzZ5wUgpJyZfQiZoEsWJ1VPyXgFY,701 +libfuturize/fixes/fix_division.py,sha256=gnrAi7stquiVUyi_De1H8q--43iQaSUX0CjnOmQ6O2w,228 +libfuturize/fixes/fix_division_safe.py,sha256=Y_HUfQJAxRClXkcfqWP5SFCsRYZOsLUsNjLXlGOA3cQ,3292 +libfuturize/fixes/fix_execfile.py,sha256=I5AcJ6vPZ7i70TChaq9inxqnZ4C04-yJyfAItGa8E3c,921 +libfuturize/fixes/fix_future_builtins.py,sha256=QBCRpD9XA7tbtfP4wmOF2DXquB4lq-eupkQj-QAxp0s,2027 +libfuturize/fixes/fix_future_standard_library.py,sha256=FVtflFt38efHe_SEX6k3m6IYAtKWjA4rAPZrlCv6yA0,733 +libfuturize/fixes/fix_future_standard_library_urllib.py,sha256=Rf81XcAXA-vwNvrhskf5sLExbR--Wkr5fiUcMYGAKzs,1001 +libfuturize/fixes/fix_input.py,sha256=bhaPNtMrZNbjWIDQCR7Iue5BxBj4rf0RJQ9_jiwvb-s,687 +libfuturize/fixes/fix_metaclass.py,sha256=GLB76wbuyUVciDgW9bgNNOBEnLeS_AR-fKABcPBZk6M,9568 +libfuturize/fixes/fix_next_call.py,sha256=01STG86Av9o5QcpQDJ6UbPhvxt9kKrkatiPeddXRgvA,3158 +libfuturize/fixes/fix_object.py,sha256=qalFIjn0VTWXG5sGOOoCvO65omjX5_9d40SUpwUjBdw,407 +libfuturize/fixes/fix_oldstr_wrap.py,sha256=UCR6Q2l-pVqJSrRTnQAWMlaqBoX7oX1VpG_w6Q0XcyY,1214 +libfuturize/fixes/fix_order___future__imports.py,sha256=ACUCw5NEGWvj6XA9rNj8BYha3ktxLvkM5Ssh5cyV644,829 +libfuturize/fixes/fix_print.py,sha256=92s1w2t9SynA3Y1_85-lexSBbgEWJM6lBrhCxVacfDc,3384 +libfuturize/fixes/fix_print_with_import.py,sha256=hVWn70Q1DPMUiHMyEqgUx-6sM1AylLj78v9pMc4LFw8,735 +libfuturize/fixes/fix_raise.py,sha256=mEXpM9sS6tenMmxayfqM-Kp9gUvaztTY61vFaqyMUuo,3884 +libfuturize/fixes/fix_remove_old__future__imports.py,sha256=j4EC1KEVgXhuQAqhYHnAruUjW6uczPjV_fTCSOLMuAw,851 +libfuturize/fixes/fix_unicode_keep_u.py,sha256=M8fcFxHeFnWVOKoQRpkMsnpd9qmUFubI2oFhO4ZPk7A,779 +libfuturize/fixes/fix_unicode_literals_import.py,sha256=wq-hb-9Yx3Az4ol-ylXZJPEDZ81EaPZeIy5VvpA0CEY,367 +libfuturize/fixes/fix_xrange_with_import.py,sha256=f074qStjMz3OtLjt1bKKZSxQnRbbb7HzEbqHt9wgqdw,479 +libfuturize/main.py,sha256=feICmcv0dzWhutvwz0unnIVxusbSlQZFDaxObkHebs8,13733 +libpasteurize/__init__.py,sha256=CZA_KgvTQOPAY1_MrlJeQ6eMh2Eei4_KIv4JuyAkpfw,31 +libpasteurize/__pycache__/__init__.cpython-36.pyc,, +libpasteurize/__pycache__/main.cpython-36.pyc,, +libpasteurize/fixes/__init__.py,sha256=ccdv-2MGjQMbq8XuEZBndHmbzGRrZnabksjXZLUv044,3719 +libpasteurize/fixes/__pycache__/__init__.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/feature_base.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_annotations.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_division.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_features.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_getcwd.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_imports.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_imports2.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_kwargs.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_memoryview.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_metaclass.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_newstyle.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_next.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_printfunction.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_raise.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_raise_.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_throw.cpython-36.pyc,, +libpasteurize/fixes/__pycache__/fix_unpacking.cpython-36.pyc,, +libpasteurize/fixes/feature_base.py,sha256=v7yLjBDBUPeNUc-YHGGlIsJDOQzFAM4Vo0RN5F1JHVU,1723 +libpasteurize/fixes/fix_add_all__future__imports.py,sha256=mHet1LgbHn9GfgCYGNZXKo-rseDWreAvUcAjZwdgeTE,676 +libpasteurize/fixes/fix_add_all_future_builtins.py,sha256=scfkY-Sz5j0yDtLYls2ENOcqEMPVxeDm9gFYYPINPB8,1269 +libpasteurize/fixes/fix_add_future_standard_library_import.py,sha256=thTRbkBzy_SJjZ0bJteTp0sBTx8Wr69xFakH4styf7Y,663 +libpasteurize/fixes/fix_annotations.py,sha256=VT_AorKY9AYWYZUZ17_CeUrJlEA7VGkwVLDQlwD1Bxo,1581 +libpasteurize/fixes/fix_division.py,sha256=_TD_c5KniAYqEm11O7NJF0v2WEhYSNkRGcKG_94ZOas,904 +libpasteurize/fixes/fix_features.py,sha256=NZn0n34_MYZpLNwyP1Tf51hOiN58Rg7A8tA9pK1S8-c,2675 +libpasteurize/fixes/fix_fullargspec.py,sha256=VlZuIU6QNrClmRuvC4mtLICL3yMCi-RcGCnS9fD4b-Q,438 +libpasteurize/fixes/fix_future_builtins.py,sha256=SlCK9I9u05m19Lr1wxlJxF8toZ5yu0yXBeDLxUN9_fw,1450 +libpasteurize/fixes/fix_getcwd.py,sha256=uebvTvFboLqsROFCwdnzoP6ThziM0skz9TDXHoJcFsQ,873 +libpasteurize/fixes/fix_imports.py,sha256=U4lIs_5Xp1qqM8mN72ieDkkIdiyALZFyCZsRC8ZmXlM,4944 +libpasteurize/fixes/fix_imports2.py,sha256=bs2V5Yv0v_8xLx-lNj9kNEAK2dLYXUXkZ2hxECg01CU,8580 +libpasteurize/fixes/fix_kwargs.py,sha256=NB_Ap8YJk-9ncoJRbOiPY_VMIigFgVB8m8AuY29DDhE,5991 +libpasteurize/fixes/fix_memoryview.py,sha256=Fwayx_ezpr22tbJ0-QrKdJ-FZTpU-m7y78l1h_N4xxc,551 +libpasteurize/fixes/fix_metaclass.py,sha256=IcE2KjaDG8jUR3FYXECzOC_cr2pr5r95W1NTbMrK8Wc,3260 +libpasteurize/fixes/fix_newstyle.py,sha256=78sazKOHm9DUoMyW4VdvQpMXZhicbXzorVPRhBpSUrM,888 +libpasteurize/fixes/fix_next.py,sha256=VHqcyORRNVqKJ51jJ1OkhwxHuXRgp8qaldyqcMvA4J0,1233 +libpasteurize/fixes/fix_printfunction.py,sha256=NDIfqVmUJBG3H9E6nrnN0cWZK8ch9pL4F-nMexdsa38,401 +libpasteurize/fixes/fix_raise.py,sha256=zQ_AcMsGmCbtKMgrxZGcHLYNscw6tqXFvHQxgqtNbU8,1099 +libpasteurize/fixes/fix_raise_.py,sha256=9STp633frUfYASjYzqhwxx_MXePNmMhfJClowRj8FLY,1225 +libpasteurize/fixes/fix_throw.py,sha256=_ZREVre-WttUvk4sWjrqUNqm9Q1uFaATECN0_-PXKbk,835 +libpasteurize/fixes/fix_unpacking.py,sha256=eMqRe44Nfq8lo0YFL9oKW75dGARmBSmklj4BCS_q1Lo,5946 +libpasteurize/main.py,sha256=dVHYTQQeJonuOFDNrenJZl-rKHgOQKRMPP1OqnJogWQ,8186 +past/__init__.py,sha256=wIiXaAvXl3svDi-fzuy6HDD0VsuCVr4cnqnCr8XINGI,2918 +past/__pycache__/__init__.cpython-36.pyc,, +past/builtins/__init__.py,sha256=7j_4OsUlN6q2eKr14do7mRQ1GwXRoXAMUR0A1fJpAls,1805 +past/builtins/__pycache__/__init__.cpython-36.pyc,, +past/builtins/__pycache__/misc.cpython-36.pyc,, +past/builtins/__pycache__/noniterators.cpython-36.pyc,, +past/builtins/misc.py,sha256=nw62HVSxuAgT-Q2lD3lmgRB9zmFXopS14dZHEv5xpDQ,2627 +past/builtins/noniterators.py,sha256=LtdELnd7KyYdXg7GkW25cgkEPUC0ggZ5AYMtDe9N95I,9370 +past/translation/__init__.py,sha256=j2e6mLeK74KEICqH6P_-tpKqSNZoMwip2toThhSmKpU,17646 +past/translation/__pycache__/__init__.cpython-36.pyc,, +past/types/__init__.py,sha256=RyJlgqg9uJ8oF-kJT9QlfhfdmhiMh3fShmtvd2CQycY,879 +past/types/__pycache__/__init__.cpython-36.pyc,, +past/types/__pycache__/basestring.cpython-36.pyc,, +past/types/__pycache__/olddict.cpython-36.pyc,, +past/types/__pycache__/oldstr.cpython-36.pyc,, +past/types/basestring.py,sha256=qrImcr24wvdDCMvF9x0Tyx8S1lCt6GIwRvzuAmvg_Tg,728 +past/types/olddict.py,sha256=0YtffZ55VY6AyQ_rwu4DZ4vcRsp6dz-dQzczeyN8hLk,2721 +past/types/oldstr.py,sha256=J2sJPC5jWEdpqXPcFwJFNDKn51TKhi86PsLFmJtQr-M,4332 +past/utils/__init__.py,sha256=e8l1sOfdiDJ3dkckBWLNWvC1ahC5BX5haHC2TGdNgA8,2633 +past/utils/__pycache__/__init__.cpython-36.pyc,, diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/WHEEL new file mode 100644 index 0000000..b552003 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/entry_points.txt b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/entry_points.txt new file mode 100644 index 0000000..45d1a88 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +futurize = libfuturize.main:main +pasteurize = libpasteurize.main:main + diff --git a/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/top_level.txt new file mode 100644 index 0000000..58f5843 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future-0.18.2.dist-info/top_level.txt @@ -0,0 +1,4 @@ +future +libfuturize +libpasteurize +past diff --git a/minor_project/lib/python3.6/site-packages/future/__init__.py b/minor_project/lib/python3.6/site-packages/future/__init__.py new file mode 100644 index 0000000..ad419d6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/__init__.py @@ -0,0 +1,93 @@ +""" +future: Easy, safe support for Python 2/3 compatibility +======================================================= + +``future`` is the missing compatibility layer between Python 2 and Python +3. It allows you to use a single, clean Python 3.x-compatible codebase to +support both Python 2 and Python 3 with minimal overhead. + +It is designed to be used as follows:: + + from __future__ import (absolute_import, division, + print_function, unicode_literals) + from builtins import ( + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) + +followed by predominantly standard, idiomatic Python 3 code that then runs +similarly on Python 2.6/2.7 and Python 3.3+. + +The imports have no effect on Python 3. On Python 2, they shadow the +corresponding builtins, which normally have different semantics on Python 3 +versus 2, to provide their Python 3 semantics. + + +Standard library reorganization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``future`` supports the standard library reorganization (PEP 3108) through the +following Py3 interfaces: + + >>> # Top-level packages with Py3 names provided on Py2: + >>> import html.parser + >>> import queue + >>> import tkinter.dialog + >>> import xmlrpc.client + >>> # etc. + + >>> # Aliases provided for extensions to existing Py2 module names: + >>> from future.standard_library import install_aliases + >>> install_aliases() + + >>> from collections import Counter, OrderedDict # backported to Py2.6 + >>> from collections import UserDict, UserList, UserString + >>> import urllib.request + >>> from itertools import filterfalse, zip_longest + >>> from subprocess import getoutput, getstatusoutput + + +Automatic conversion +-------------------- + +An included script called `futurize +`_ aids in converting +code (from either Python 2 or Python 3) to code compatible with both +platforms. It is similar to ``python-modernize`` but goes further in +providing Python 3 compatibility through the use of the backported types +and builtin functions in ``future``. + + +Documentation +------------- + +See: http://python-future.org + + +Credits +------- + +:Author: Ed Schofield, Jordan M. Adler, et al +:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte + Ltd, Singapore. http://pythoncharmers.com +:Others: See docs/credits.rst or http://python-future.org/credits.html + + +Licensing +--------- +Copyright 2013-2019 Python Charmers Pty Ltd, Australia. +The software is distributed under an MIT licence. See LICENSE.txt. + +""" + +__title__ = 'future' +__author__ = 'Ed Schofield' +__license__ = 'MIT' +__copyright__ = 'Copyright 2013-2019 Python Charmers Pty Ltd' +__ver_major__ = 0 +__ver_minor__ = 18 +__ver_patch__ = 2 +__ver_sub__ = '' +__version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__, + __ver_patch__, __ver_sub__) diff --git a/minor_project/lib/python3.6/site-packages/future/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..d5c09b8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/__init__.py new file mode 100644 index 0000000..c71e065 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/__init__.py @@ -0,0 +1,26 @@ +""" +future.backports package +""" + +from __future__ import absolute_import + +import sys + +__future_module__ = True +from future.standard_library import import_top_level_modules + + +if sys.version_info[0] >= 3: + import_top_level_modules() + + +from .misc import (ceil, + OrderedDict, + Counter, + ChainMap, + check_output, + count, + recursive_repr, + _count_elements, + cmp_to_key + ) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..0651ac2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/_markupbase.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/_markupbase.cpython-36.pyc new file mode 100644 index 0000000..eed35eb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/_markupbase.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/datetime.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/datetime.cpython-36.pyc new file mode 100644 index 0000000..7393b35 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/datetime.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/misc.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/misc.cpython-36.pyc new file mode 100644 index 0000000..328c22f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/misc.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/socket.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/socket.cpython-36.pyc new file mode 100644 index 0000000..9bff3cb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/socket.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/socketserver.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/socketserver.cpython-36.pyc new file mode 100644 index 0000000..e5bd268 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/socketserver.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/total_ordering.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/total_ordering.cpython-36.pyc new file mode 100644 index 0000000..c279698 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/__pycache__/total_ordering.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/_markupbase.py b/minor_project/lib/python3.6/site-packages/future/backports/_markupbase.py new file mode 100644 index 0000000..d51bfc7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/_markupbase.py @@ -0,0 +1,422 @@ +"""Shared support for scanning document type declarations in HTML and XHTML. + +Backported for python-future from Python 3.3. Reason: ParserBase is an +old-style class in the Python 2.7 source of markupbase.py, which I suspect +might be the cause of sporadic unit-test failures on travis-ci.org with +test_htmlparser.py. The test failures look like this: + + ====================================================================== + +ERROR: test_attr_entity_replacement (future.tests.test_htmlparser.AttributesStrictTestCase) + +---------------------------------------------------------------------- + +Traceback (most recent call last): + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 661, in test_attr_entity_replacement + [("starttag", "a", [("b", "&><\"'")])]) + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 93, in _run_check + collector = self.get_collector() + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 617, in get_collector + return EventCollector(strict=True) + File "/home/travis/build/edschofield/python-future/future/tests/test_htmlparser.py", line 27, in __init__ + html.parser.HTMLParser.__init__(self, *args, **kw) + File "/home/travis/build/edschofield/python-future/future/backports/html/parser.py", line 135, in __init__ + self.reset() + File "/home/travis/build/edschofield/python-future/future/backports/html/parser.py", line 143, in reset + _markupbase.ParserBase.reset(self) + +TypeError: unbound method reset() must be called with ParserBase instance as first argument (got EventCollector instance instead) + +This module is used as a foundation for the html.parser module. It has no +documented public API and should not be used directly. + +""" + +import re + +_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match +_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match +_commentclose = re.compile(r'--\s*>') +_markedsectionclose = re.compile(r']\s*]\s*>') + +# An analysis of the MS-Word extensions is available at +# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf + +_msmarkedsectionclose = re.compile(r']\s*>') + +del re + + +class ParserBase(object): + """Parser base class which provides some common support methods used + by the SGML/HTML and XHTML parsers.""" + + def __init__(self): + if self.__class__ is ParserBase: + raise RuntimeError( + "_markupbase.ParserBase must be subclassed") + + def error(self, message): + raise NotImplementedError( + "subclasses of ParserBase must override error()") + + def reset(self): + self.lineno = 1 + self.offset = 0 + + def getpos(self): + """Return current line number and offset.""" + return self.lineno, self.offset + + # Internal -- update line number and offset. This should be + # called for each piece of data exactly once, in order -- in other + # words the concatenation of all the input strings to this + # function should be exactly the entire input. + def updatepos(self, i, j): + if i >= j: + return j + rawdata = self.rawdata + nlines = rawdata.count("\n", i, j) + if nlines: + self.lineno = self.lineno + nlines + pos = rawdata.rindex("\n", i, j) # Should not fail + self.offset = j-(pos+1) + else: + self.offset = self.offset + j-i + return j + + _decl_otherchars = '' + + # Internal -- parse declaration (for use by subclasses). + def parse_declaration(self, i): + # This is some sort of declaration; in "HTML as + # deployed," this should only be the document type + # declaration (""). + # ISO 8879:1986, however, has more complex + # declaration syntax for elements in , including: + # --comment-- + # [marked section] + # name in the following list: ENTITY, DOCTYPE, ELEMENT, + # ATTLIST, NOTATION, SHORTREF, USEMAP, + # LINKTYPE, LINK, IDLINK, USELINK, SYSTEM + rawdata = self.rawdata + j = i + 2 + assert rawdata[i:j] == "": + # the empty comment + return j + 1 + if rawdata[j:j+1] in ("-", ""): + # Start of comment followed by buffer boundary, + # or just a buffer boundary. + return -1 + # A simple, practical version could look like: ((name|stringlit) S*) + '>' + n = len(rawdata) + if rawdata[j:j+2] == '--': #comment + # Locate --.*-- as the body of the comment + return self.parse_comment(i) + elif rawdata[j] == '[': #marked section + # Locate [statusWord [...arbitrary SGML...]] as the body of the marked section + # Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA + # Note that this is extended by Microsoft Office "Save as Web" function + # to include [if...] and [endif]. + return self.parse_marked_section(i) + else: #all other declaration elements + decltype, j = self._scan_name(j, i) + if j < 0: + return j + if decltype == "doctype": + self._decl_otherchars = '' + while j < n: + c = rawdata[j] + if c == ">": + # end of declaration syntax + data = rawdata[i+2:j] + if decltype == "doctype": + self.handle_decl(data) + else: + # According to the HTML5 specs sections "8.2.4.44 Bogus + # comment state" and "8.2.4.45 Markup declaration open + # state", a comment token should be emitted. + # Calling unknown_decl provides more flexibility though. + self.unknown_decl(data) + return j + 1 + if c in "\"'": + m = _declstringlit_match(rawdata, j) + if not m: + return -1 # incomplete + j = m.end() + elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ": + name, j = self._scan_name(j, i) + elif c in self._decl_otherchars: + j = j + 1 + elif c == "[": + # this could be handled in a separate doctype parser + if decltype == "doctype": + j = self._parse_doctype_subset(j + 1, i) + elif decltype in set(["attlist", "linktype", "link", "element"]): + # must tolerate []'d groups in a content model in an element declaration + # also in data attribute specifications of attlist declaration + # also link type declaration subsets in linktype declarations + # also link attribute specification lists in link declarations + self.error("unsupported '[' char in %s declaration" % decltype) + else: + self.error("unexpected '[' char in declaration") + else: + self.error( + "unexpected %r char in declaration" % rawdata[j]) + if j < 0: + return j + return -1 # incomplete + + # Internal -- parse a marked section + # Override this to handle MS-word extension syntax content + def parse_marked_section(self, i, report=1): + rawdata= self.rawdata + assert rawdata[i:i+3] == ' ending + match= _markedsectionclose.search(rawdata, i+3) + elif sectName in set(["if", "else", "endif"]): + # look for MS Office ]> ending + match= _msmarkedsectionclose.search(rawdata, i+3) + else: + self.error('unknown status keyword %r in marked section' % rawdata[i+3:j]) + if not match: + return -1 + if report: + j = match.start(0) + self.unknown_decl(rawdata[i+3: j]) + return match.end(0) + + # Internal -- parse comment, return length or -1 if not terminated + def parse_comment(self, i, report=1): + rawdata = self.rawdata + if rawdata[i:i+4] != ' delimiter transport-padding + # --> CRLF body-part + for body_part in msgtexts: + # delimiter transport-padding CRLF + self.write(self._NL + '--' + boundary + self._NL) + # body-part + self._fp.write(body_part) + # close-delimiter transport-padding + self.write(self._NL + '--' + boundary + '--') + if msg.epilogue is not None: + self.write(self._NL) + if self._mangle_from_: + epilogue = fcre.sub('>From ', msg.epilogue) + else: + epilogue = msg.epilogue + self._write_lines(epilogue) + + def _handle_multipart_signed(self, msg): + # The contents of signed parts has to stay unmodified in order to keep + # the signature intact per RFC1847 2.1, so we disable header wrapping. + # RDM: This isn't enough to completely preserve the part, but it helps. + p = self.policy + self.policy = p.clone(max_line_length=0) + try: + self._handle_multipart(msg) + finally: + self.policy = p + + def _handle_message_delivery_status(self, msg): + # We can't just write the headers directly to self's file object + # because this will leave an extra newline between the last header + # block and the boundary. Sigh. + blocks = [] + for part in msg.get_payload(): + s = self._new_buffer() + g = self.clone(s) + g.flatten(part, unixfrom=False, linesep=self._NL) + text = s.getvalue() + lines = text.split(self._encoded_NL) + # Strip off the unnecessary trailing empty line + if lines and lines[-1] == self._encoded_EMPTY: + blocks.append(self._encoded_NL.join(lines[:-1])) + else: + blocks.append(text) + # Now join all the blocks with an empty line. This has the lovely + # effect of separating each block with an empty line, but not adding + # an extra one after the last one. + self._fp.write(self._encoded_NL.join(blocks)) + + def _handle_message(self, msg): + s = self._new_buffer() + g = self.clone(s) + # The payload of a message/rfc822 part should be a multipart sequence + # of length 1. The zeroth element of the list should be the Message + # object for the subpart. Extract that object, stringify it, and + # write it out. + # Except, it turns out, when it's a string instead, which happens when + # and only when HeaderParser is used on a message of mime type + # message/rfc822. Such messages are generated by, for example, + # Groupwise when forwarding unadorned messages. (Issue 7970.) So + # in that case we just emit the string body. + payload = msg._payload + if isinstance(payload, list): + g.flatten(msg.get_payload(0), unixfrom=False, linesep=self._NL) + payload = s.getvalue() + else: + payload = self._encode(payload) + self._fp.write(payload) + + # This used to be a module level function; we use a classmethod for this + # and _compile_re so we can continue to provide the module level function + # for backward compatibility by doing + # _make_boudary = Generator._make_boundary + # at the end of the module. It *is* internal, so we could drop that... + @classmethod + def _make_boundary(cls, text=None): + # Craft a random boundary. If text is given, ensure that the chosen + # boundary doesn't appear in the text. + token = random.randrange(sys.maxsize) + boundary = ('=' * 15) + (_fmt % token) + '==' + if text is None: + return boundary + b = boundary + counter = 0 + while True: + cre = cls._compile_re('^--' + re.escape(b) + '(--)?$', re.MULTILINE) + if not cre.search(text): + break + b = boundary + '.' + str(counter) + counter += 1 + return b + + @classmethod + def _compile_re(cls, s, flags): + return re.compile(s, flags) + +class BytesGenerator(Generator): + """Generates a bytes version of a Message object tree. + + Functionally identical to the base Generator except that the output is + bytes and not string. When surrogates were used in the input to encode + bytes, these are decoded back to bytes for output. If the policy has + cte_type set to 7bit, then the message is transformed such that the + non-ASCII bytes are properly content transfer encoded, using the charset + unknown-8bit. + + The outfp object must accept bytes in its write method. + """ + + # Bytes versions of this constant for use in manipulating data from + # the BytesIO buffer. + _encoded_EMPTY = b'' + + def write(self, s): + self._fp.write(str(s).encode('ascii', 'surrogateescape')) + + def _new_buffer(self): + return BytesIO() + + def _encode(self, s): + return s.encode('ascii') + + def _write_headers(self, msg): + # This is almost the same as the string version, except for handling + # strings with 8bit bytes. + for h, v in msg.raw_items(): + self._fp.write(self.policy.fold_binary(h, v)) + # A blank line always separates headers from body + self.write(self._NL) + + def _handle_text(self, msg): + # If the string has surrogates the original source was bytes, so + # just write it back out. + if msg._payload is None: + return + if _has_surrogates(msg._payload) and not self.policy.cte_type=='7bit': + if self._mangle_from_: + msg._payload = fcre.sub(">From ", msg._payload) + self._write_lines(msg._payload) + else: + super(BytesGenerator,self)._handle_text(msg) + + # Default body handler + _writeBody = _handle_text + + @classmethod + def _compile_re(cls, s, flags): + return re.compile(s.encode('ascii'), flags) + + +_FMT = '[Non-text (%(type)s) part of message omitted, filename %(filename)s]' + +class DecodedGenerator(Generator): + """Generates a text representation of a message. + + Like the Generator base class, except that non-text parts are substituted + with a format string representing the part. + """ + def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None): + """Like Generator.__init__() except that an additional optional + argument is allowed. + + Walks through all subparts of a message. If the subpart is of main + type `text', then it prints the decoded payload of the subpart. + + Otherwise, fmt is a format string that is used instead of the message + payload. fmt is expanded with the following keywords (in + %(keyword)s format): + + type : Full MIME type of the non-text part + maintype : Main MIME type of the non-text part + subtype : Sub-MIME type of the non-text part + filename : Filename of the non-text part + description: Description associated with the non-text part + encoding : Content transfer encoding of the non-text part + + The default value for fmt is None, meaning + + [Non-text (%(type)s) part of message omitted, filename %(filename)s] + """ + Generator.__init__(self, outfp, mangle_from_, maxheaderlen) + if fmt is None: + self._fmt = _FMT + else: + self._fmt = fmt + + def _dispatch(self, msg): + for part in msg.walk(): + maintype = part.get_content_maintype() + if maintype == 'text': + print(part.get_payload(decode=False), file=self) + elif maintype == 'multipart': + # Just skip this + pass + else: + print(self._fmt % { + 'type' : part.get_content_type(), + 'maintype' : part.get_content_maintype(), + 'subtype' : part.get_content_subtype(), + 'filename' : part.get_filename('[no filename]'), + 'description': part.get('Content-Description', + '[no description]'), + 'encoding' : part.get('Content-Transfer-Encoding', + '[no encoding]'), + }, file=self) + + +# Helper used by Generator._make_boundary +_width = len(repr(sys.maxsize-1)) +_fmt = '%%0%dd' % _width + +# Backward compatibility +_make_boundary = Generator._make_boundary diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/header.py b/minor_project/lib/python3.6/site-packages/future/backports/email/header.py new file mode 100644 index 0000000..63bf038 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/header.py @@ -0,0 +1,581 @@ +# Copyright (C) 2002-2007 Python Software Foundation +# Author: Ben Gertzfield, Barry Warsaw +# Contact: email-sig@python.org + +"""Header encoding and decoding functionality.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future.builtins import bytes, range, str, super, zip + +__all__ = [ + 'Header', + 'decode_header', + 'make_header', + ] + +import re +import binascii + +from future.backports import email +from future.backports.email import base64mime +from future.backports.email.errors import HeaderParseError +import future.backports.email.charset as _charset + +# Helpers +from future.backports.email.quoprimime import _max_append, header_decode + +Charset = _charset.Charset + +NL = '\n' +SPACE = ' ' +BSPACE = b' ' +SPACE8 = ' ' * 8 +EMPTYSTRING = '' +MAXLINELEN = 78 +FWS = ' \t' + +USASCII = Charset('us-ascii') +UTF8 = Charset('utf-8') + +# Match encoded-word strings in the form =?charset?q?Hello_World?= +ecre = re.compile(r''' + =\? # literal =? + (?P[^?]*?) # non-greedy up to the next ? is the charset + \? # literal ? + (?P[qb]) # either a "q" or a "b", case insensitive + \? # literal ? + (?P.*?) # non-greedy up to the next ?= is the encoded string + \?= # literal ?= + ''', re.VERBOSE | re.IGNORECASE | re.MULTILINE) + +# Field name regexp, including trailing colon, but not separating whitespace, +# according to RFC 2822. Character range is from tilde to exclamation mark. +# For use with .match() +fcre = re.compile(r'[\041-\176]+:$') + +# Find a header embedded in a putative header value. Used to check for +# header injection attack. +_embeded_header = re.compile(r'\n[^ \t]+:') + + +def decode_header(header): + """Decode a message header value without converting charset. + + Returns a list of (string, charset) pairs containing each of the decoded + parts of the header. Charset is None for non-encoded parts of the header, + otherwise a lower-case string containing the name of the character set + specified in the encoded string. + + header may be a string that may or may not contain RFC2047 encoded words, + or it may be a Header object. + + An email.errors.HeaderParseError may be raised when certain decoding error + occurs (e.g. a base64 decoding exception). + """ + # If it is a Header object, we can just return the encoded chunks. + if hasattr(header, '_chunks'): + return [(_charset._encode(string, str(charset)), str(charset)) + for string, charset in header._chunks] + # If no encoding, just return the header with no charset. + if not ecre.search(header): + return [(header, None)] + # First step is to parse all the encoded parts into triplets of the form + # (encoded_string, encoding, charset). For unencoded strings, the last + # two parts will be None. + words = [] + for line in header.splitlines(): + parts = ecre.split(line) + first = True + while parts: + unencoded = parts.pop(0) + if first: + unencoded = unencoded.lstrip() + first = False + if unencoded: + words.append((unencoded, None, None)) + if parts: + charset = parts.pop(0).lower() + encoding = parts.pop(0).lower() + encoded = parts.pop(0) + words.append((encoded, encoding, charset)) + # Now loop over words and remove words that consist of whitespace + # between two encoded strings. + import sys + droplist = [] + for n, w in enumerate(words): + if n>1 and w[1] and words[n-2][1] and words[n-1][0].isspace(): + droplist.append(n-1) + for d in reversed(droplist): + del words[d] + + # The next step is to decode each encoded word by applying the reverse + # base64 or quopri transformation. decoded_words is now a list of the + # form (decoded_word, charset). + decoded_words = [] + for encoded_string, encoding, charset in words: + if encoding is None: + # This is an unencoded word. + decoded_words.append((encoded_string, charset)) + elif encoding == 'q': + word = header_decode(encoded_string) + decoded_words.append((word, charset)) + elif encoding == 'b': + paderr = len(encoded_string) % 4 # Postel's law: add missing padding + if paderr: + encoded_string += '==='[:4 - paderr] + try: + word = base64mime.decode(encoded_string) + except binascii.Error: + raise HeaderParseError('Base64 decoding error') + else: + decoded_words.append((word, charset)) + else: + raise AssertionError('Unexpected encoding: ' + encoding) + # Now convert all words to bytes and collapse consecutive runs of + # similarly encoded words. + collapsed = [] + last_word = last_charset = None + for word, charset in decoded_words: + if isinstance(word, str): + word = bytes(word, 'raw-unicode-escape') + if last_word is None: + last_word = word + last_charset = charset + elif charset != last_charset: + collapsed.append((last_word, last_charset)) + last_word = word + last_charset = charset + elif last_charset is None: + last_word += BSPACE + word + else: + last_word += word + collapsed.append((last_word, last_charset)) + return collapsed + + +def make_header(decoded_seq, maxlinelen=None, header_name=None, + continuation_ws=' '): + """Create a Header from a sequence of pairs as returned by decode_header() + + decode_header() takes a header value string and returns a sequence of + pairs of the format (decoded_string, charset) where charset is the string + name of the character set. + + This function takes one of those sequence of pairs and returns a Header + instance. Optional maxlinelen, header_name, and continuation_ws are as in + the Header constructor. + """ + h = Header(maxlinelen=maxlinelen, header_name=header_name, + continuation_ws=continuation_ws) + for s, charset in decoded_seq: + # None means us-ascii but we can simply pass it on to h.append() + if charset is not None and not isinstance(charset, Charset): + charset = Charset(charset) + h.append(s, charset) + return h + + +class Header(object): + def __init__(self, s=None, charset=None, + maxlinelen=None, header_name=None, + continuation_ws=' ', errors='strict'): + """Create a MIME-compliant header that can contain many character sets. + + Optional s is the initial header value. If None, the initial header + value is not set. You can later append to the header with .append() + method calls. s may be a byte string or a Unicode string, but see the + .append() documentation for semantics. + + Optional charset serves two purposes: it has the same meaning as the + charset argument to the .append() method. It also sets the default + character set for all subsequent .append() calls that omit the charset + argument. If charset is not provided in the constructor, the us-ascii + charset is used both as s's initial charset and as the default for + subsequent .append() calls. + + The maximum line length can be specified explicitly via maxlinelen. For + splitting the first line to a shorter value (to account for the field + header which isn't included in s, e.g. `Subject') pass in the name of + the field in header_name. The default maxlinelen is 78 as recommended + by RFC 2822. + + continuation_ws must be RFC 2822 compliant folding whitespace (usually + either a space or a hard tab) which will be prepended to continuation + lines. + + errors is passed through to the .append() call. + """ + if charset is None: + charset = USASCII + elif not isinstance(charset, Charset): + charset = Charset(charset) + self._charset = charset + self._continuation_ws = continuation_ws + self._chunks = [] + if s is not None: + self.append(s, charset, errors) + if maxlinelen is None: + maxlinelen = MAXLINELEN + self._maxlinelen = maxlinelen + if header_name is None: + self._headerlen = 0 + else: + # Take the separating colon and space into account. + self._headerlen = len(header_name) + 2 + + def __str__(self): + """Return the string value of the header.""" + self._normalize() + uchunks = [] + lastcs = None + lastspace = None + for string, charset in self._chunks: + # We must preserve spaces between encoded and non-encoded word + # boundaries, which means for us we need to add a space when we go + # from a charset to None/us-ascii, or from None/us-ascii to a + # charset. Only do this for the second and subsequent chunks. + # Don't add a space if the None/us-ascii string already has + # a space (trailing or leading depending on transition) + nextcs = charset + if nextcs == _charset.UNKNOWN8BIT: + original_bytes = string.encode('ascii', 'surrogateescape') + string = original_bytes.decode('ascii', 'replace') + if uchunks: + hasspace = string and self._nonctext(string[0]) + if lastcs not in (None, 'us-ascii'): + if nextcs in (None, 'us-ascii') and not hasspace: + uchunks.append(SPACE) + nextcs = None + elif nextcs not in (None, 'us-ascii') and not lastspace: + uchunks.append(SPACE) + lastspace = string and self._nonctext(string[-1]) + lastcs = nextcs + uchunks.append(string) + return EMPTYSTRING.join(uchunks) + + # Rich comparison operators for equality only. BAW: does it make sense to + # have or explicitly disable <, <=, >, >= operators? + def __eq__(self, other): + # other may be a Header or a string. Both are fine so coerce + # ourselves to a unicode (of the unencoded header value), swap the + # args and do another comparison. + return other == str(self) + + def __ne__(self, other): + return not self == other + + def append(self, s, charset=None, errors='strict'): + """Append a string to the MIME header. + + Optional charset, if given, should be a Charset instance or the name + of a character set (which will be converted to a Charset instance). A + value of None (the default) means that the charset given in the + constructor is used. + + s may be a byte string or a Unicode string. If it is a byte string + (i.e. isinstance(s, str) is false), then charset is the encoding of + that byte string, and a UnicodeError will be raised if the string + cannot be decoded with that charset. If s is a Unicode string, then + charset is a hint specifying the character set of the characters in + the string. In either case, when producing an RFC 2822 compliant + header using RFC 2047 rules, the string will be encoded using the + output codec of the charset. If the string cannot be encoded to the + output codec, a UnicodeError will be raised. + + Optional `errors' is passed as the errors argument to the decode + call if s is a byte string. + """ + if charset is None: + charset = self._charset + elif not isinstance(charset, Charset): + charset = Charset(charset) + if not isinstance(s, str): + input_charset = charset.input_codec or 'us-ascii' + if input_charset == _charset.UNKNOWN8BIT: + s = s.decode('us-ascii', 'surrogateescape') + else: + s = s.decode(input_charset, errors) + # Ensure that the bytes we're storing can be decoded to the output + # character set, otherwise an early error is raised. + output_charset = charset.output_codec or 'us-ascii' + if output_charset != _charset.UNKNOWN8BIT: + try: + s.encode(output_charset, errors) + except UnicodeEncodeError: + if output_charset!='us-ascii': + raise + charset = UTF8 + self._chunks.append((s, charset)) + + def _nonctext(self, s): + """True if string s is not a ctext character of RFC822. + """ + return s.isspace() or s in ('(', ')', '\\') + + def encode(self, splitchars=';, \t', maxlinelen=None, linesep='\n'): + r"""Encode a message header into an RFC-compliant format. + + There are many issues involved in converting a given string for use in + an email header. Only certain character sets are readable in most + email clients, and as header strings can only contain a subset of + 7-bit ASCII, care must be taken to properly convert and encode (with + Base64 or quoted-printable) header strings. In addition, there is a + 75-character length limit on any given encoded header field, so + line-wrapping must be performed, even with double-byte character sets. + + Optional maxlinelen specifies the maximum length of each generated + line, exclusive of the linesep string. Individual lines may be longer + than maxlinelen if a folding point cannot be found. The first line + will be shorter by the length of the header name plus ": " if a header + name was specified at Header construction time. The default value for + maxlinelen is determined at header construction time. + + Optional splitchars is a string containing characters which should be + given extra weight by the splitting algorithm during normal header + wrapping. This is in very rough support of RFC 2822's `higher level + syntactic breaks': split points preceded by a splitchar are preferred + during line splitting, with the characters preferred in the order in + which they appear in the string. Space and tab may be included in the + string to indicate whether preference should be given to one over the + other as a split point when other split chars do not appear in the line + being split. Splitchars does not affect RFC 2047 encoded lines. + + Optional linesep is a string to be used to separate the lines of + the value. The default value is the most useful for typical + Python applications, but it can be set to \r\n to produce RFC-compliant + line separators when needed. + """ + self._normalize() + if maxlinelen is None: + maxlinelen = self._maxlinelen + # A maxlinelen of 0 means don't wrap. For all practical purposes, + # choosing a huge number here accomplishes that and makes the + # _ValueFormatter algorithm much simpler. + if maxlinelen == 0: + maxlinelen = 1000000 + formatter = _ValueFormatter(self._headerlen, maxlinelen, + self._continuation_ws, splitchars) + lastcs = None + hasspace = lastspace = None + for string, charset in self._chunks: + if hasspace is not None: + hasspace = string and self._nonctext(string[0]) + import sys + if lastcs not in (None, 'us-ascii'): + if not hasspace or charset not in (None, 'us-ascii'): + formatter.add_transition() + elif charset not in (None, 'us-ascii') and not lastspace: + formatter.add_transition() + lastspace = string and self._nonctext(string[-1]) + lastcs = charset + hasspace = False + lines = string.splitlines() + if lines: + formatter.feed('', lines[0], charset) + else: + formatter.feed('', '', charset) + for line in lines[1:]: + formatter.newline() + if charset.header_encoding is not None: + formatter.feed(self._continuation_ws, ' ' + line.lstrip(), + charset) + else: + sline = line.lstrip() + fws = line[:len(line)-len(sline)] + formatter.feed(fws, sline, charset) + if len(lines) > 1: + formatter.newline() + if self._chunks: + formatter.add_transition() + value = formatter._str(linesep) + if _embeded_header.search(value): + raise HeaderParseError("header value appears to contain " + "an embedded header: {!r}".format(value)) + return value + + def _normalize(self): + # Step 1: Normalize the chunks so that all runs of identical charsets + # get collapsed into a single unicode string. + chunks = [] + last_charset = None + last_chunk = [] + for string, charset in self._chunks: + if charset == last_charset: + last_chunk.append(string) + else: + if last_charset is not None: + chunks.append((SPACE.join(last_chunk), last_charset)) + last_chunk = [string] + last_charset = charset + if last_chunk: + chunks.append((SPACE.join(last_chunk), last_charset)) + self._chunks = chunks + + +class _ValueFormatter(object): + def __init__(self, headerlen, maxlen, continuation_ws, splitchars): + self._maxlen = maxlen + self._continuation_ws = continuation_ws + self._continuation_ws_len = len(continuation_ws) + self._splitchars = splitchars + self._lines = [] + self._current_line = _Accumulator(headerlen) + + def _str(self, linesep): + self.newline() + return linesep.join(self._lines) + + def __str__(self): + return self._str(NL) + + def newline(self): + end_of_line = self._current_line.pop() + if end_of_line != (' ', ''): + self._current_line.push(*end_of_line) + if len(self._current_line) > 0: + if self._current_line.is_onlyws(): + self._lines[-1] += str(self._current_line) + else: + self._lines.append(str(self._current_line)) + self._current_line.reset() + + def add_transition(self): + self._current_line.push(' ', '') + + def feed(self, fws, string, charset): + # If the charset has no header encoding (i.e. it is an ASCII encoding) + # then we must split the header at the "highest level syntactic break" + # possible. Note that we don't have a lot of smarts about field + # syntax; we just try to break on semi-colons, then commas, then + # whitespace. Eventually, this should be pluggable. + if charset.header_encoding is None: + self._ascii_split(fws, string, self._splitchars) + return + # Otherwise, we're doing either a Base64 or a quoted-printable + # encoding which means we don't need to split the line on syntactic + # breaks. We can basically just find enough characters to fit on the + # current line, minus the RFC 2047 chrome. What makes this trickier + # though is that we have to split at octet boundaries, not character + # boundaries but it's only safe to split at character boundaries so at + # best we can only get close. + encoded_lines = charset.header_encode_lines(string, self._maxlengths()) + # The first element extends the current line, but if it's None then + # nothing more fit on the current line so start a new line. + try: + first_line = encoded_lines.pop(0) + except IndexError: + # There are no encoded lines, so we're done. + return + if first_line is not None: + self._append_chunk(fws, first_line) + try: + last_line = encoded_lines.pop() + except IndexError: + # There was only one line. + return + self.newline() + self._current_line.push(self._continuation_ws, last_line) + # Everything else are full lines in themselves. + for line in encoded_lines: + self._lines.append(self._continuation_ws + line) + + def _maxlengths(self): + # The first line's length. + yield self._maxlen - len(self._current_line) + while True: + yield self._maxlen - self._continuation_ws_len + + def _ascii_split(self, fws, string, splitchars): + # The RFC 2822 header folding algorithm is simple in principle but + # complex in practice. Lines may be folded any place where "folding + # white space" appears by inserting a linesep character in front of the + # FWS. The complication is that not all spaces or tabs qualify as FWS, + # and we are also supposed to prefer to break at "higher level + # syntactic breaks". We can't do either of these without intimate + # knowledge of the structure of structured headers, which we don't have + # here. So the best we can do here is prefer to break at the specified + # splitchars, and hope that we don't choose any spaces or tabs that + # aren't legal FWS. (This is at least better than the old algorithm, + # where we would sometimes *introduce* FWS after a splitchar, or the + # algorithm before that, where we would turn all white space runs into + # single spaces or tabs.) + parts = re.split("(["+FWS+"]+)", fws+string) + if parts[0]: + parts[:0] = [''] + else: + parts.pop(0) + for fws, part in zip(*[iter(parts)]*2): + self._append_chunk(fws, part) + + def _append_chunk(self, fws, string): + self._current_line.push(fws, string) + if len(self._current_line) > self._maxlen: + # Find the best split point, working backward from the end. + # There might be none, on a long first line. + for ch in self._splitchars: + for i in range(self._current_line.part_count()-1, 0, -1): + if ch.isspace(): + fws = self._current_line[i][0] + if fws and fws[0]==ch: + break + prevpart = self._current_line[i-1][1] + if prevpart and prevpart[-1]==ch: + break + else: + continue + break + else: + fws, part = self._current_line.pop() + if self._current_line._initial_size > 0: + # There will be a header, so leave it on a line by itself. + self.newline() + if not fws: + # We don't use continuation_ws here because the whitespace + # after a header should always be a space. + fws = ' ' + self._current_line.push(fws, part) + return + remainder = self._current_line.pop_from(i) + self._lines.append(str(self._current_line)) + self._current_line.reset(remainder) + + +class _Accumulator(list): + + def __init__(self, initial_size=0): + self._initial_size = initial_size + super().__init__() + + def push(self, fws, string): + self.append((fws, string)) + + def pop_from(self, i=0): + popped = self[i:] + self[i:] = [] + return popped + + def pop(self): + if self.part_count()==0: + return ('', '') + return super().pop() + + def __len__(self): + return sum((len(fws)+len(part) for fws, part in self), + self._initial_size) + + def __str__(self): + return EMPTYSTRING.join((EMPTYSTRING.join((fws, part)) + for fws, part in self)) + + def reset(self, startval=None): + if startval is None: + startval = [] + self[:] = startval + self._initial_size = 0 + + def is_onlyws(self): + return self._initial_size==0 and (not self or str(self).isspace()) + + def part_count(self): + return super().__len__() diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/headerregistry.py b/minor_project/lib/python3.6/site-packages/future/backports/email/headerregistry.py new file mode 100644 index 0000000..9aaad65 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/headerregistry.py @@ -0,0 +1,592 @@ +"""Representing and manipulating email headers via custom objects. + +This module provides an implementation of the HeaderRegistry API. +The implementation is designed to flexibly follow RFC5322 rules. + +Eventually HeaderRegistry will be a public API, but it isn't yet, +and will probably change some before that happens. + +""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +from future.builtins import super +from future.builtins import str +from future.utils import text_to_native_str +from future.backports.email import utils +from future.backports.email import errors +from future.backports.email import _header_value_parser as parser + +class Address(object): + + def __init__(self, display_name='', username='', domain='', addr_spec=None): + """Create an object represeting a full email address. + + An address can have a 'display_name', a 'username', and a 'domain'. In + addition to specifying the username and domain separately, they may be + specified together by using the addr_spec keyword *instead of* the + username and domain keywords. If an addr_spec string is specified it + must be properly quoted according to RFC 5322 rules; an error will be + raised if it is not. + + An Address object has display_name, username, domain, and addr_spec + attributes, all of which are read-only. The addr_spec and the string + value of the object are both quoted according to RFC5322 rules, but + without any Content Transfer Encoding. + + """ + # This clause with its potential 'raise' may only happen when an + # application program creates an Address object using an addr_spec + # keyword. The email library code itself must always supply username + # and domain. + if addr_spec is not None: + if username or domain: + raise TypeError("addrspec specified when username and/or " + "domain also specified") + a_s, rest = parser.get_addr_spec(addr_spec) + if rest: + raise ValueError("Invalid addr_spec; only '{}' " + "could be parsed from '{}'".format( + a_s, addr_spec)) + if a_s.all_defects: + raise a_s.all_defects[0] + username = a_s.local_part + domain = a_s.domain + self._display_name = display_name + self._username = username + self._domain = domain + + @property + def display_name(self): + return self._display_name + + @property + def username(self): + return self._username + + @property + def domain(self): + return self._domain + + @property + def addr_spec(self): + """The addr_spec (username@domain) portion of the address, quoted + according to RFC 5322 rules, but with no Content Transfer Encoding. + """ + nameset = set(self.username) + if len(nameset) > len(nameset-parser.DOT_ATOM_ENDS): + lp = parser.quote_string(self.username) + else: + lp = self.username + if self.domain: + return lp + '@' + self.domain + if not lp: + return '<>' + return lp + + def __repr__(self): + return "Address(display_name={!r}, username={!r}, domain={!r})".format( + self.display_name, self.username, self.domain) + + def __str__(self): + nameset = set(self.display_name) + if len(nameset) > len(nameset-parser.SPECIALS): + disp = parser.quote_string(self.display_name) + else: + disp = self.display_name + if disp: + addr_spec = '' if self.addr_spec=='<>' else self.addr_spec + return "{} <{}>".format(disp, addr_spec) + return self.addr_spec + + def __eq__(self, other): + if type(other) != type(self): + return False + return (self.display_name == other.display_name and + self.username == other.username and + self.domain == other.domain) + + +class Group(object): + + def __init__(self, display_name=None, addresses=None): + """Create an object representing an address group. + + An address group consists of a display_name followed by colon and an + list of addresses (see Address) terminated by a semi-colon. The Group + is created by specifying a display_name and a possibly empty list of + Address objects. A Group can also be used to represent a single + address that is not in a group, which is convenient when manipulating + lists that are a combination of Groups and individual Addresses. In + this case the display_name should be set to None. In particular, the + string representation of a Group whose display_name is None is the same + as the Address object, if there is one and only one Address object in + the addresses list. + + """ + self._display_name = display_name + self._addresses = tuple(addresses) if addresses else tuple() + + @property + def display_name(self): + return self._display_name + + @property + def addresses(self): + return self._addresses + + def __repr__(self): + return "Group(display_name={!r}, addresses={!r}".format( + self.display_name, self.addresses) + + def __str__(self): + if self.display_name is None and len(self.addresses)==1: + return str(self.addresses[0]) + disp = self.display_name + if disp is not None: + nameset = set(disp) + if len(nameset) > len(nameset-parser.SPECIALS): + disp = parser.quote_string(disp) + adrstr = ", ".join(str(x) for x in self.addresses) + adrstr = ' ' + adrstr if adrstr else adrstr + return "{}:{};".format(disp, adrstr) + + def __eq__(self, other): + if type(other) != type(self): + return False + return (self.display_name == other.display_name and + self.addresses == other.addresses) + + +# Header Classes # + +class BaseHeader(str): + + """Base class for message headers. + + Implements generic behavior and provides tools for subclasses. + + A subclass must define a classmethod named 'parse' that takes an unfolded + value string and a dictionary as its arguments. The dictionary will + contain one key, 'defects', initialized to an empty list. After the call + the dictionary must contain two additional keys: parse_tree, set to the + parse tree obtained from parsing the header, and 'decoded', set to the + string value of the idealized representation of the data from the value. + (That is, encoded words are decoded, and values that have canonical + representations are so represented.) + + The defects key is intended to collect parsing defects, which the message + parser will subsequently dispose of as appropriate. The parser should not, + insofar as practical, raise any errors. Defects should be added to the + list instead. The standard header parsers register defects for RFC + compliance issues, for obsolete RFC syntax, and for unrecoverable parsing + errors. + + The parse method may add additional keys to the dictionary. In this case + the subclass must define an 'init' method, which will be passed the + dictionary as its keyword arguments. The method should use (usually by + setting them as the value of similarly named attributes) and remove all the + extra keys added by its parse method, and then use super to call its parent + class with the remaining arguments and keywords. + + The subclass should also make sure that a 'max_count' attribute is defined + that is either None or 1. XXX: need to better define this API. + + """ + + def __new__(cls, name, value): + kwds = {'defects': []} + cls.parse(value, kwds) + if utils._has_surrogates(kwds['decoded']): + kwds['decoded'] = utils._sanitize(kwds['decoded']) + self = str.__new__(cls, kwds['decoded']) + # del kwds['decoded'] + self.init(name, **kwds) + return self + + def init(self, name, **_3to2kwargs): + defects = _3to2kwargs['defects']; del _3to2kwargs['defects'] + parse_tree = _3to2kwargs['parse_tree']; del _3to2kwargs['parse_tree'] + self._name = name + self._parse_tree = parse_tree + self._defects = defects + + @property + def name(self): + return self._name + + @property + def defects(self): + return tuple(self._defects) + + def __reduce__(self): + return ( + _reconstruct_header, + ( + self.__class__.__name__, + self.__class__.__bases__, + str(self), + ), + self.__dict__) + + @classmethod + def _reconstruct(cls, value): + return str.__new__(cls, value) + + def fold(self, **_3to2kwargs): + policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] + """Fold header according to policy. + + The parsed representation of the header is folded according to + RFC5322 rules, as modified by the policy. If the parse tree + contains surrogateescaped bytes, the bytes are CTE encoded using + the charset 'unknown-8bit". + + Any non-ASCII characters in the parse tree are CTE encoded using + charset utf-8. XXX: make this a policy setting. + + The returned value is an ASCII-only string possibly containing linesep + characters, and ending with a linesep character. The string includes + the header name and the ': ' separator. + + """ + # At some point we need to only put fws here if it was in the source. + header = parser.Header([ + parser.HeaderLabel([ + parser.ValueTerminal(self.name, 'header-name'), + parser.ValueTerminal(':', 'header-sep')]), + parser.CFWSList([parser.WhiteSpaceTerminal(' ', 'fws')]), + self._parse_tree]) + return header.fold(policy=policy) + + +def _reconstruct_header(cls_name, bases, value): + return type(text_to_native_str(cls_name), bases, {})._reconstruct(value) + + +class UnstructuredHeader(object): + + max_count = None + value_parser = staticmethod(parser.get_unstructured) + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = cls.value_parser(value) + kwds['decoded'] = str(kwds['parse_tree']) + + +class UniqueUnstructuredHeader(UnstructuredHeader): + + max_count = 1 + + +class DateHeader(object): + + """Header whose value consists of a single timestamp. + + Provides an additional attribute, datetime, which is either an aware + datetime using a timezone, or a naive datetime if the timezone + in the input string is -0000. Also accepts a datetime as input. + The 'value' attribute is the normalized form of the timestamp, + which means it is the output of format_datetime on the datetime. + """ + + max_count = None + + # This is used only for folding, not for creating 'decoded'. + value_parser = staticmethod(parser.get_unstructured) + + @classmethod + def parse(cls, value, kwds): + if not value: + kwds['defects'].append(errors.HeaderMissingRequiredValue()) + kwds['datetime'] = None + kwds['decoded'] = '' + kwds['parse_tree'] = parser.TokenList() + return + if isinstance(value, str): + value = utils.parsedate_to_datetime(value) + kwds['datetime'] = value + kwds['decoded'] = utils.format_datetime(kwds['datetime']) + kwds['parse_tree'] = cls.value_parser(kwds['decoded']) + + def init(self, *args, **kw): + self._datetime = kw.pop('datetime') + super().init(*args, **kw) + + @property + def datetime(self): + return self._datetime + + +class UniqueDateHeader(DateHeader): + + max_count = 1 + + +class AddressHeader(object): + + max_count = None + + @staticmethod + def value_parser(value): + address_list, value = parser.get_address_list(value) + assert not value, 'this should not happen' + return address_list + + @classmethod + def parse(cls, value, kwds): + if isinstance(value, str): + # We are translating here from the RFC language (address/mailbox) + # to our API language (group/address). + kwds['parse_tree'] = address_list = cls.value_parser(value) + groups = [] + for addr in address_list.addresses: + groups.append(Group(addr.display_name, + [Address(mb.display_name or '', + mb.local_part or '', + mb.domain or '') + for mb in addr.all_mailboxes])) + defects = list(address_list.all_defects) + else: + # Assume it is Address/Group stuff + if not hasattr(value, '__iter__'): + value = [value] + groups = [Group(None, [item]) if not hasattr(item, 'addresses') + else item + for item in value] + defects = [] + kwds['groups'] = groups + kwds['defects'] = defects + kwds['decoded'] = ', '.join([str(item) for item in groups]) + if 'parse_tree' not in kwds: + kwds['parse_tree'] = cls.value_parser(kwds['decoded']) + + def init(self, *args, **kw): + self._groups = tuple(kw.pop('groups')) + self._addresses = None + super().init(*args, **kw) + + @property + def groups(self): + return self._groups + + @property + def addresses(self): + if self._addresses is None: + self._addresses = tuple([address for group in self._groups + for address in group.addresses]) + return self._addresses + + +class UniqueAddressHeader(AddressHeader): + + max_count = 1 + + +class SingleAddressHeader(AddressHeader): + + @property + def address(self): + if len(self.addresses)!=1: + raise ValueError(("value of single address header {} is not " + "a single address").format(self.name)) + return self.addresses[0] + + +class UniqueSingleAddressHeader(SingleAddressHeader): + + max_count = 1 + + +class MIMEVersionHeader(object): + + max_count = 1 + + value_parser = staticmethod(parser.parse_mime_version) + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = parse_tree = cls.value_parser(value) + kwds['decoded'] = str(parse_tree) + kwds['defects'].extend(parse_tree.all_defects) + kwds['major'] = None if parse_tree.minor is None else parse_tree.major + kwds['minor'] = parse_tree.minor + if parse_tree.minor is not None: + kwds['version'] = '{}.{}'.format(kwds['major'], kwds['minor']) + else: + kwds['version'] = None + + def init(self, *args, **kw): + self._version = kw.pop('version') + self._major = kw.pop('major') + self._minor = kw.pop('minor') + super().init(*args, **kw) + + @property + def major(self): + return self._major + + @property + def minor(self): + return self._minor + + @property + def version(self): + return self._version + + +class ParameterizedMIMEHeader(object): + + # Mixin that handles the params dict. Must be subclassed and + # a property value_parser for the specific header provided. + + max_count = 1 + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = parse_tree = cls.value_parser(value) + kwds['decoded'] = str(parse_tree) + kwds['defects'].extend(parse_tree.all_defects) + if parse_tree.params is None: + kwds['params'] = {} + else: + # The MIME RFCs specify that parameter ordering is arbitrary. + kwds['params'] = dict((utils._sanitize(name).lower(), + utils._sanitize(value)) + for name, value in parse_tree.params) + + def init(self, *args, **kw): + self._params = kw.pop('params') + super().init(*args, **kw) + + @property + def params(self): + return self._params.copy() + + +class ContentTypeHeader(ParameterizedMIMEHeader): + + value_parser = staticmethod(parser.parse_content_type_header) + + def init(self, *args, **kw): + super().init(*args, **kw) + self._maintype = utils._sanitize(self._parse_tree.maintype) + self._subtype = utils._sanitize(self._parse_tree.subtype) + + @property + def maintype(self): + return self._maintype + + @property + def subtype(self): + return self._subtype + + @property + def content_type(self): + return self.maintype + '/' + self.subtype + + +class ContentDispositionHeader(ParameterizedMIMEHeader): + + value_parser = staticmethod(parser.parse_content_disposition_header) + + def init(self, *args, **kw): + super().init(*args, **kw) + cd = self._parse_tree.content_disposition + self._content_disposition = cd if cd is None else utils._sanitize(cd) + + @property + def content_disposition(self): + return self._content_disposition + + +class ContentTransferEncodingHeader(object): + + max_count = 1 + + value_parser = staticmethod(parser.parse_content_transfer_encoding_header) + + @classmethod + def parse(cls, value, kwds): + kwds['parse_tree'] = parse_tree = cls.value_parser(value) + kwds['decoded'] = str(parse_tree) + kwds['defects'].extend(parse_tree.all_defects) + + def init(self, *args, **kw): + super().init(*args, **kw) + self._cte = utils._sanitize(self._parse_tree.cte) + + @property + def cte(self): + return self._cte + + +# The header factory # + +_default_header_map = { + 'subject': UniqueUnstructuredHeader, + 'date': UniqueDateHeader, + 'resent-date': DateHeader, + 'orig-date': UniqueDateHeader, + 'sender': UniqueSingleAddressHeader, + 'resent-sender': SingleAddressHeader, + 'to': UniqueAddressHeader, + 'resent-to': AddressHeader, + 'cc': UniqueAddressHeader, + 'resent-cc': AddressHeader, + 'bcc': UniqueAddressHeader, + 'resent-bcc': AddressHeader, + 'from': UniqueAddressHeader, + 'resent-from': AddressHeader, + 'reply-to': UniqueAddressHeader, + 'mime-version': MIMEVersionHeader, + 'content-type': ContentTypeHeader, + 'content-disposition': ContentDispositionHeader, + 'content-transfer-encoding': ContentTransferEncodingHeader, + } + +class HeaderRegistry(object): + + """A header_factory and header registry.""" + + def __init__(self, base_class=BaseHeader, default_class=UnstructuredHeader, + use_default_map=True): + """Create a header_factory that works with the Policy API. + + base_class is the class that will be the last class in the created + header class's __bases__ list. default_class is the class that will be + used if "name" (see __call__) does not appear in the registry. + use_default_map controls whether or not the default mapping of names to + specialized classes is copied in to the registry when the factory is + created. The default is True. + + """ + self.registry = {} + self.base_class = base_class + self.default_class = default_class + if use_default_map: + self.registry.update(_default_header_map) + + def map_to_type(self, name, cls): + """Register cls as the specialized class for handling "name" headers. + + """ + self.registry[name.lower()] = cls + + def __getitem__(self, name): + cls = self.registry.get(name.lower(), self.default_class) + return type(text_to_native_str('_'+cls.__name__), (cls, self.base_class), {}) + + def __call__(self, name, value): + """Create a header instance for header 'name' from 'value'. + + Creates a header instance by creating a specialized class for parsing + and representing the specified header by combining the factory + base_class with a specialized class from the registry or the + default_class, and passing the name and value to the constructed + class's constructor. + + """ + return self[name](name, value) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/iterators.py b/minor_project/lib/python3.6/site-packages/future/backports/email/iterators.py new file mode 100644 index 0000000..82d320f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/iterators.py @@ -0,0 +1,74 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Various types of useful iterators and generators.""" +from __future__ import print_function +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = [ + 'body_line_iterator', + 'typed_subpart_iterator', + 'walk', + # Do not include _structure() since it's part of the debugging API. + ] + +import sys +from io import StringIO + + +# This function will become a method of the Message class +def walk(self): + """Walk over the message tree, yielding each subpart. + + The walk is performed in depth-first order. This method is a + generator. + """ + yield self + if self.is_multipart(): + for subpart in self.get_payload(): + for subsubpart in subpart.walk(): + yield subsubpart + + +# These two functions are imported into the Iterators.py interface module. +def body_line_iterator(msg, decode=False): + """Iterate over the parts, returning string payloads line-by-line. + + Optional decode (default False) is passed through to .get_payload(). + """ + for subpart in msg.walk(): + payload = subpart.get_payload(decode=decode) + if isinstance(payload, str): + for line in StringIO(payload): + yield line + + +def typed_subpart_iterator(msg, maintype='text', subtype=None): + """Iterate over the subparts with a given MIME type. + + Use `maintype' as the main MIME type to match against; this defaults to + "text". Optional `subtype' is the MIME subtype to match against; if + omitted, only the main type is matched. + """ + for subpart in msg.walk(): + if subpart.get_content_maintype() == maintype: + if subtype is None or subpart.get_content_subtype() == subtype: + yield subpart + + +def _structure(msg, fp=None, level=0, include_default=False): + """A handy debugging aid""" + if fp is None: + fp = sys.stdout + tab = ' ' * (level * 4) + print(tab + msg.get_content_type(), end='', file=fp) + if include_default: + print(' [%s]' % msg.get_default_type(), file=fp) + else: + print(file=fp) + if msg.is_multipart(): + for subpart in msg.get_payload(): + _structure(subpart, fp, level+1, include_default) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/message.py b/minor_project/lib/python3.6/site-packages/future/backports/email/message.py new file mode 100644 index 0000000..d8d9615 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/message.py @@ -0,0 +1,882 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2001-2007 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Basic message object for the email package object model.""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import list, range, str, zip + +__all__ = ['Message'] + +import re +import uu +import base64 +import binascii +from io import BytesIO, StringIO + +# Intrapackage imports +from future.utils import as_native_str +from future.backports.email import utils +from future.backports.email import errors +from future.backports.email._policybase import compat32 +from future.backports.email import charset as _charset +from future.backports.email._encoded_words import decode_b +Charset = _charset.Charset + +SEMISPACE = '; ' + +# Regular expression that matches `special' characters in parameters, the +# existence of which force quoting of the parameter value. +tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]') + + +def _splitparam(param): + # Split header parameters. BAW: this may be too simple. It isn't + # strictly RFC 2045 (section 5.1) compliant, but it catches most headers + # found in the wild. We may eventually need a full fledged parser. + # RDM: we might have a Header here; for now just stringify it. + a, sep, b = str(param).partition(';') + if not sep: + return a.strip(), None + return a.strip(), b.strip() + +def _formatparam(param, value=None, quote=True): + """Convenience function to format and return a key=value pair. + + This will quote the value if needed or if quote is true. If value is a + three tuple (charset, language, value), it will be encoded according + to RFC2231 rules. If it contains non-ascii characters it will likewise + be encoded according to RFC2231 rules, using the utf-8 charset and + a null language. + """ + if value is not None and len(value) > 0: + # A tuple is used for RFC 2231 encoded parameter values where items + # are (charset, language, value). charset is a string, not a Charset + # instance. RFC 2231 encoded values are never quoted, per RFC. + if isinstance(value, tuple): + # Encode as per RFC 2231 + param += '*' + value = utils.encode_rfc2231(value[2], value[0], value[1]) + return '%s=%s' % (param, value) + else: + try: + value.encode('ascii') + except UnicodeEncodeError: + param += '*' + value = utils.encode_rfc2231(value, 'utf-8', '') + return '%s=%s' % (param, value) + # BAW: Please check this. I think that if quote is set it should + # force quoting even if not necessary. + if quote or tspecials.search(value): + return '%s="%s"' % (param, utils.quote(value)) + else: + return '%s=%s' % (param, value) + else: + return param + +def _parseparam(s): + # RDM This might be a Header, so for now stringify it. + s = ';' + str(s) + plist = [] + while s[:1] == ';': + s = s[1:] + end = s.find(';') + while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2: + end = s.find(';', end + 1) + if end < 0: + end = len(s) + f = s[:end] + if '=' in f: + i = f.index('=') + f = f[:i].strip().lower() + '=' + f[i+1:].strip() + plist.append(f.strip()) + s = s[end:] + return plist + + +def _unquotevalue(value): + # This is different than utils.collapse_rfc2231_value() because it doesn't + # try to convert the value to a unicode. Message.get_param() and + # Message.get_params() are both currently defined to return the tuple in + # the face of RFC 2231 parameters. + if isinstance(value, tuple): + return value[0], value[1], utils.unquote(value[2]) + else: + return utils.unquote(value) + + +class Message(object): + """Basic message object. + + A message object is defined as something that has a bunch of RFC 2822 + headers and a payload. It may optionally have an envelope header + (a.k.a. Unix-From or From_ header). If the message is a container (i.e. a + multipart or a message/rfc822), then the payload is a list of Message + objects, otherwise it is a string. + + Message objects implement part of the `mapping' interface, which assumes + there is exactly one occurrence of the header per message. Some headers + do in fact appear multiple times (e.g. Received) and for those headers, + you must use the explicit API to set or get all the headers. Not all of + the mapping methods are implemented. + """ + def __init__(self, policy=compat32): + self.policy = policy + self._headers = list() + self._unixfrom = None + self._payload = None + self._charset = None + # Defaults for multipart messages + self.preamble = self.epilogue = None + self.defects = [] + # Default content type + self._default_type = 'text/plain' + + @as_native_str(encoding='utf-8') + def __str__(self): + """Return the entire formatted message as a string. + This includes the headers, body, and envelope header. + """ + return self.as_string() + + def as_string(self, unixfrom=False, maxheaderlen=0): + """Return the entire formatted message as a (unicode) string. + Optional `unixfrom' when True, means include the Unix From_ envelope + header. + + This is a convenience method and may not generate the message exactly + as you intend. For more flexibility, use the flatten() method of a + Generator instance. + """ + from future.backports.email.generator import Generator + fp = StringIO() + g = Generator(fp, mangle_from_=False, maxheaderlen=maxheaderlen) + g.flatten(self, unixfrom=unixfrom) + return fp.getvalue() + + def is_multipart(self): + """Return True if the message consists of multiple parts.""" + return isinstance(self._payload, list) + + # + # Unix From_ line + # + def set_unixfrom(self, unixfrom): + self._unixfrom = unixfrom + + def get_unixfrom(self): + return self._unixfrom + + # + # Payload manipulation. + # + def attach(self, payload): + """Add the given payload to the current payload. + + The current payload will always be a list of objects after this method + is called. If you want to set the payload to a scalar object, use + set_payload() instead. + """ + if self._payload is None: + self._payload = [payload] + else: + self._payload.append(payload) + + def get_payload(self, i=None, decode=False): + """Return a reference to the payload. + + The payload will either be a list object or a string. If you mutate + the list object, you modify the message's payload in place. Optional + i returns that index into the payload. + + Optional decode is a flag indicating whether the payload should be + decoded or not, according to the Content-Transfer-Encoding header + (default is False). + + When True and the message is not a multipart, the payload will be + decoded if this header's value is `quoted-printable' or `base64'. If + some other encoding is used, or the header is missing, or if the + payload has bogus data (i.e. bogus base64 or uuencoded data), the + payload is returned as-is. + + If the message is a multipart and the decode flag is True, then None + is returned. + """ + # Here is the logic table for this code, based on the email5.0.0 code: + # i decode is_multipart result + # ------ ------ ------------ ------------------------------ + # None True True None + # i True True None + # None False True _payload (a list) + # i False True _payload element i (a Message) + # i False False error (not a list) + # i True False error (not a list) + # None False False _payload + # None True False _payload decoded (bytes) + # Note that Barry planned to factor out the 'decode' case, but that + # isn't so easy now that we handle the 8 bit data, which needs to be + # converted in both the decode and non-decode path. + if self.is_multipart(): + if decode: + return None + if i is None: + return self._payload + else: + return self._payload[i] + # For backward compatibility, Use isinstance and this error message + # instead of the more logical is_multipart test. + if i is not None and not isinstance(self._payload, list): + raise TypeError('Expected list, got %s' % type(self._payload)) + payload = self._payload + # cte might be a Header, so for now stringify it. + cte = str(self.get('content-transfer-encoding', '')).lower() + # payload may be bytes here. + if isinstance(payload, str): + payload = str(payload) # for Python-Future, so surrogateescape works + if utils._has_surrogates(payload): + bpayload = payload.encode('ascii', 'surrogateescape') + if not decode: + try: + payload = bpayload.decode(self.get_param('charset', 'ascii'), 'replace') + except LookupError: + payload = bpayload.decode('ascii', 'replace') + elif decode: + try: + bpayload = payload.encode('ascii') + except UnicodeError: + # This won't happen for RFC compliant messages (messages + # containing only ASCII codepoints in the unicode input). + # If it does happen, turn the string into bytes in a way + # guaranteed not to fail. + bpayload = payload.encode('raw-unicode-escape') + if not decode: + return payload + if cte == 'quoted-printable': + return utils._qdecode(bpayload) + elif cte == 'base64': + # XXX: this is a bit of a hack; decode_b should probably be factored + # out somewhere, but I haven't figured out where yet. + value, defects = decode_b(b''.join(bpayload.splitlines())) + for defect in defects: + self.policy.handle_defect(self, defect) + return value + elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'): + in_file = BytesIO(bpayload) + out_file = BytesIO() + try: + uu.decode(in_file, out_file, quiet=True) + return out_file.getvalue() + except uu.Error: + # Some decoding problem + return bpayload + if isinstance(payload, str): + return bpayload + return payload + + def set_payload(self, payload, charset=None): + """Set the payload to the given value. + + Optional charset sets the message's default character set. See + set_charset() for details. + """ + self._payload = payload + if charset is not None: + self.set_charset(charset) + + def set_charset(self, charset): + """Set the charset of the payload to a given character set. + + charset can be a Charset instance, a string naming a character set, or + None. If it is a string it will be converted to a Charset instance. + If charset is None, the charset parameter will be removed from the + Content-Type field. Anything else will generate a TypeError. + + The message will be assumed to be of type text/* encoded with + charset.input_charset. It will be converted to charset.output_charset + and encoded properly, if needed, when generating the plain text + representation of the message. MIME headers (MIME-Version, + Content-Type, Content-Transfer-Encoding) will be added as needed. + """ + if charset is None: + self.del_param('charset') + self._charset = None + return + if not isinstance(charset, Charset): + charset = Charset(charset) + self._charset = charset + if 'MIME-Version' not in self: + self.add_header('MIME-Version', '1.0') + if 'Content-Type' not in self: + self.add_header('Content-Type', 'text/plain', + charset=charset.get_output_charset()) + else: + self.set_param('charset', charset.get_output_charset()) + if charset != charset.get_output_charset(): + self._payload = charset.body_encode(self._payload) + if 'Content-Transfer-Encoding' not in self: + cte = charset.get_body_encoding() + try: + cte(self) + except TypeError: + self._payload = charset.body_encode(self._payload) + self.add_header('Content-Transfer-Encoding', cte) + + def get_charset(self): + """Return the Charset instance associated with the message's payload. + """ + return self._charset + + # + # MAPPING INTERFACE (partial) + # + def __len__(self): + """Return the total number of headers, including duplicates.""" + return len(self._headers) + + def __getitem__(self, name): + """Get a header value. + + Return None if the header is missing instead of raising an exception. + + Note that if the header appeared multiple times, exactly which + occurrence gets returned is undefined. Use get_all() to get all + the values matching a header field name. + """ + return self.get(name) + + def __setitem__(self, name, val): + """Set the value of a header. + + Note: this does not overwrite an existing header with the same field + name. Use __delitem__() first to delete any existing headers. + """ + max_count = self.policy.header_max_count(name) + if max_count: + lname = name.lower() + found = 0 + for k, v in self._headers: + if k.lower() == lname: + found += 1 + if found >= max_count: + raise ValueError("There may be at most {} {} headers " + "in a message".format(max_count, name)) + self._headers.append(self.policy.header_store_parse(name, val)) + + def __delitem__(self, name): + """Delete all occurrences of a header, if present. + + Does not raise an exception if the header is missing. + """ + name = name.lower() + newheaders = list() + for k, v in self._headers: + if k.lower() != name: + newheaders.append((k, v)) + self._headers = newheaders + + def __contains__(self, name): + return name.lower() in [k.lower() for k, v in self._headers] + + def __iter__(self): + for field, value in self._headers: + yield field + + def keys(self): + """Return a list of all the message's header field names. + + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + return [k for k, v in self._headers] + + def values(self): + """Return a list of all the message's header values. + + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + return [self.policy.header_fetch_parse(k, v) + for k, v in self._headers] + + def items(self): + """Get all the message's header fields and values. + + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + return [(k, self.policy.header_fetch_parse(k, v)) + for k, v in self._headers] + + def get(self, name, failobj=None): + """Get a header value. + + Like __getitem__() but return failobj instead of None when the field + is missing. + """ + name = name.lower() + for k, v in self._headers: + if k.lower() == name: + return self.policy.header_fetch_parse(k, v) + return failobj + + # + # "Internal" methods (public API, but only intended for use by a parser + # or generator, not normal application code. + # + + def set_raw(self, name, value): + """Store name and value in the model without modification. + + This is an "internal" API, intended only for use by a parser. + """ + self._headers.append((name, value)) + + def raw_items(self): + """Return the (name, value) header pairs without modification. + + This is an "internal" API, intended only for use by a generator. + """ + return iter(self._headers.copy()) + + # + # Additional useful stuff + # + + def get_all(self, name, failobj=None): + """Return a list of all the values for the named field. + + These will be sorted in the order they appeared in the original + message, and may contain duplicates. Any fields deleted and + re-inserted are always appended to the header list. + + If no such fields exist, failobj is returned (defaults to None). + """ + values = [] + name = name.lower() + for k, v in self._headers: + if k.lower() == name: + values.append(self.policy.header_fetch_parse(k, v)) + if not values: + return failobj + return values + + def add_header(self, _name, _value, **_params): + """Extended header setting. + + name is the header field to add. keyword arguments can be used to set + additional parameters for the header field, with underscores converted + to dashes. Normally the parameter will be added as key="value" unless + value is None, in which case only the key will be added. If a + parameter value contains non-ASCII characters it can be specified as a + three-tuple of (charset, language, value), in which case it will be + encoded according to RFC2231 rules. Otherwise it will be encoded using + the utf-8 charset and a language of ''. + + Examples: + + msg.add_header('content-disposition', 'attachment', filename='bud.gif') + msg.add_header('content-disposition', 'attachment', + filename=('utf-8', '', 'Fußballer.ppt')) + msg.add_header('content-disposition', 'attachment', + filename='Fußballer.ppt')) + """ + parts = [] + for k, v in _params.items(): + if v is None: + parts.append(k.replace('_', '-')) + else: + parts.append(_formatparam(k.replace('_', '-'), v)) + if _value is not None: + parts.insert(0, _value) + self[_name] = SEMISPACE.join(parts) + + def replace_header(self, _name, _value): + """Replace a header. + + Replace the first matching header found in the message, retaining + header order and case. If no matching header was found, a KeyError is + raised. + """ + _name = _name.lower() + for i, (k, v) in zip(range(len(self._headers)), self._headers): + if k.lower() == _name: + self._headers[i] = self.policy.header_store_parse(k, _value) + break + else: + raise KeyError(_name) + + # + # Use these three methods instead of the three above. + # + + def get_content_type(self): + """Return the message's content type. + + The returned string is coerced to lower case of the form + `maintype/subtype'. If there was no Content-Type header in the + message, the default type as given by get_default_type() will be + returned. Since according to RFC 2045, messages always have a default + type this will always return a value. + + RFC 2045 defines a message's default type to be text/plain unless it + appears inside a multipart/digest container, in which case it would be + message/rfc822. + """ + missing = object() + value = self.get('content-type', missing) + if value is missing: + # This should have no parameters + return self.get_default_type() + ctype = _splitparam(value)[0].lower() + # RFC 2045, section 5.2 says if its invalid, use text/plain + if ctype.count('/') != 1: + return 'text/plain' + return ctype + + def get_content_maintype(self): + """Return the message's main content type. + + This is the `maintype' part of the string returned by + get_content_type(). + """ + ctype = self.get_content_type() + return ctype.split('/')[0] + + def get_content_subtype(self): + """Returns the message's sub-content type. + + This is the `subtype' part of the string returned by + get_content_type(). + """ + ctype = self.get_content_type() + return ctype.split('/')[1] + + def get_default_type(self): + """Return the `default' content type. + + Most messages have a default content type of text/plain, except for + messages that are subparts of multipart/digest containers. Such + subparts have a default content type of message/rfc822. + """ + return self._default_type + + def set_default_type(self, ctype): + """Set the `default' content type. + + ctype should be either "text/plain" or "message/rfc822", although this + is not enforced. The default content type is not stored in the + Content-Type header. + """ + self._default_type = ctype + + def _get_params_preserve(self, failobj, header): + # Like get_params() but preserves the quoting of values. BAW: + # should this be part of the public interface? + missing = object() + value = self.get(header, missing) + if value is missing: + return failobj + params = [] + for p in _parseparam(value): + try: + name, val = p.split('=', 1) + name = name.strip() + val = val.strip() + except ValueError: + # Must have been a bare attribute + name = p.strip() + val = '' + params.append((name, val)) + params = utils.decode_params(params) + return params + + def get_params(self, failobj=None, header='content-type', unquote=True): + """Return the message's Content-Type parameters, as a list. + + The elements of the returned list are 2-tuples of key/value pairs, as + split on the `=' sign. The left hand side of the `=' is the key, + while the right hand side is the value. If there is no `=' sign in + the parameter the value is the empty string. The value is as + described in the get_param() method. + + Optional failobj is the object to return if there is no Content-Type + header. Optional header is the header to search instead of + Content-Type. If unquote is True, the value is unquoted. + """ + missing = object() + params = self._get_params_preserve(missing, header) + if params is missing: + return failobj + if unquote: + return [(k, _unquotevalue(v)) for k, v in params] + else: + return params + + def get_param(self, param, failobj=None, header='content-type', + unquote=True): + """Return the parameter value if found in the Content-Type header. + + Optional failobj is the object to return if there is no Content-Type + header, or the Content-Type header has no such parameter. Optional + header is the header to search instead of Content-Type. + + Parameter keys are always compared case insensitively. The return + value can either be a string, or a 3-tuple if the parameter was RFC + 2231 encoded. When it's a 3-tuple, the elements of the value are of + the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and + LANGUAGE can be None, in which case you should consider VALUE to be + encoded in the us-ascii charset. You can usually ignore LANGUAGE. + The parameter value (either the returned string, or the VALUE item in + the 3-tuple) is always unquoted, unless unquote is set to False. + + If your application doesn't care whether the parameter was RFC 2231 + encoded, it can turn the return value into a string as follows: + + param = msg.get_param('foo') + param = email.utils.collapse_rfc2231_value(rawparam) + + """ + if header not in self: + return failobj + for k, v in self._get_params_preserve(failobj, header): + if k.lower() == param.lower(): + if unquote: + return _unquotevalue(v) + else: + return v + return failobj + + def set_param(self, param, value, header='Content-Type', requote=True, + charset=None, language=''): + """Set a parameter in the Content-Type header. + + If the parameter already exists in the header, its value will be + replaced with the new value. + + If header is Content-Type and has not yet been defined for this + message, it will be set to "text/plain" and the new parameter and + value will be appended as per RFC 2045. + + An alternate header can specified in the header argument, and all + parameters will be quoted as necessary unless requote is False. + + If charset is specified, the parameter will be encoded according to RFC + 2231. Optional language specifies the RFC 2231 language, defaulting + to the empty string. Both charset and language should be strings. + """ + if not isinstance(value, tuple) and charset: + value = (charset, language, value) + + if header not in self and header.lower() == 'content-type': + ctype = 'text/plain' + else: + ctype = self.get(header) + if not self.get_param(param, header=header): + if not ctype: + ctype = _formatparam(param, value, requote) + else: + ctype = SEMISPACE.join( + [ctype, _formatparam(param, value, requote)]) + else: + ctype = '' + for old_param, old_value in self.get_params(header=header, + unquote=requote): + append_param = '' + if old_param.lower() == param.lower(): + append_param = _formatparam(param, value, requote) + else: + append_param = _formatparam(old_param, old_value, requote) + if not ctype: + ctype = append_param + else: + ctype = SEMISPACE.join([ctype, append_param]) + if ctype != self.get(header): + del self[header] + self[header] = ctype + + def del_param(self, param, header='content-type', requote=True): + """Remove the given parameter completely from the Content-Type header. + + The header will be re-written in place without the parameter or its + value. All values will be quoted as necessary unless requote is + False. Optional header specifies an alternative to the Content-Type + header. + """ + if header not in self: + return + new_ctype = '' + for p, v in self.get_params(header=header, unquote=requote): + if p.lower() != param.lower(): + if not new_ctype: + new_ctype = _formatparam(p, v, requote) + else: + new_ctype = SEMISPACE.join([new_ctype, + _formatparam(p, v, requote)]) + if new_ctype != self.get(header): + del self[header] + self[header] = new_ctype + + def set_type(self, type, header='Content-Type', requote=True): + """Set the main type and subtype for the Content-Type header. + + type must be a string in the form "maintype/subtype", otherwise a + ValueError is raised. + + This method replaces the Content-Type header, keeping all the + parameters in place. If requote is False, this leaves the existing + header's quoting as is. Otherwise, the parameters will be quoted (the + default). + + An alternative header can be specified in the header argument. When + the Content-Type header is set, we'll always also add a MIME-Version + header. + """ + # BAW: should we be strict? + if not type.count('/') == 1: + raise ValueError + # Set the Content-Type, you get a MIME-Version + if header.lower() == 'content-type': + del self['mime-version'] + self['MIME-Version'] = '1.0' + if header not in self: + self[header] = type + return + params = self.get_params(header=header, unquote=requote) + del self[header] + self[header] = type + # Skip the first param; it's the old type. + for p, v in params[1:]: + self.set_param(p, v, header, requote) + + def get_filename(self, failobj=None): + """Return the filename associated with the payload if present. + + The filename is extracted from the Content-Disposition header's + `filename' parameter, and it is unquoted. If that header is missing + the `filename' parameter, this method falls back to looking for the + `name' parameter. + """ + missing = object() + filename = self.get_param('filename', missing, 'content-disposition') + if filename is missing: + filename = self.get_param('name', missing, 'content-type') + if filename is missing: + return failobj + return utils.collapse_rfc2231_value(filename).strip() + + def get_boundary(self, failobj=None): + """Return the boundary associated with the payload if present. + + The boundary is extracted from the Content-Type header's `boundary' + parameter, and it is unquoted. + """ + missing = object() + boundary = self.get_param('boundary', missing) + if boundary is missing: + return failobj + # RFC 2046 says that boundaries may begin but not end in w/s + return utils.collapse_rfc2231_value(boundary).rstrip() + + def set_boundary(self, boundary): + """Set the boundary parameter in Content-Type to 'boundary'. + + This is subtly different than deleting the Content-Type header and + adding a new one with a new boundary parameter via add_header(). The + main difference is that using the set_boundary() method preserves the + order of the Content-Type header in the original message. + + HeaderParseError is raised if the message has no Content-Type header. + """ + missing = object() + params = self._get_params_preserve(missing, 'content-type') + if params is missing: + # There was no Content-Type header, and we don't know what type + # to set it to, so raise an exception. + raise errors.HeaderParseError('No Content-Type header found') + newparams = list() + foundp = False + for pk, pv in params: + if pk.lower() == 'boundary': + newparams.append(('boundary', '"%s"' % boundary)) + foundp = True + else: + newparams.append((pk, pv)) + if not foundp: + # The original Content-Type header had no boundary attribute. + # Tack one on the end. BAW: should we raise an exception + # instead??? + newparams.append(('boundary', '"%s"' % boundary)) + # Replace the existing Content-Type header with the new value + newheaders = list() + for h, v in self._headers: + if h.lower() == 'content-type': + parts = list() + for k, v in newparams: + if v == '': + parts.append(k) + else: + parts.append('%s=%s' % (k, v)) + val = SEMISPACE.join(parts) + newheaders.append(self.policy.header_store_parse(h, val)) + + else: + newheaders.append((h, v)) + self._headers = newheaders + + def get_content_charset(self, failobj=None): + """Return the charset parameter of the Content-Type header. + + The returned string is always coerced to lower case. If there is no + Content-Type header, or if that header has no charset parameter, + failobj is returned. + """ + missing = object() + charset = self.get_param('charset', missing) + if charset is missing: + return failobj + if isinstance(charset, tuple): + # RFC 2231 encoded, so decode it, and it better end up as ascii. + pcharset = charset[0] or 'us-ascii' + try: + # LookupError will be raised if the charset isn't known to + # Python. UnicodeError will be raised if the encoded text + # contains a character not in the charset. + as_bytes = charset[2].encode('raw-unicode-escape') + charset = str(as_bytes, pcharset) + except (LookupError, UnicodeError): + charset = charset[2] + # charset characters must be in us-ascii range + try: + charset.encode('us-ascii') + except UnicodeError: + return failobj + # RFC 2046, $4.1.2 says charsets are not case sensitive + return charset.lower() + + def get_charsets(self, failobj=None): + """Return a list containing the charset(s) used in this message. + + The returned list of items describes the Content-Type headers' + charset parameter for this message and all the subparts in its + payload. + + Each item will either be a string (the value of the charset parameter + in the Content-Type header of that part) or the value of the + 'failobj' parameter (defaults to None), if the part does not have a + main MIME type of "text", or the charset is not defined. + + The list will contain one string for each part of the message, plus + one for the container message (i.e. self), so that a non-multipart + message will still return a list of length 1. + """ + return [part.get_content_charset(failobj) for part in self.walk()] + + # I.e. def walk(self): ... + from future.backports.email.iterators import walk diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..f381c05 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/application.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/application.cpython-36.pyc new file mode 100644 index 0000000..a3dd107 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/application.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/audio.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/audio.cpython-36.pyc new file mode 100644 index 0000000..a4910fc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/audio.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/base.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/base.cpython-36.pyc new file mode 100644 index 0000000..9e81447 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/base.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/image.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/image.cpython-36.pyc new file mode 100644 index 0000000..fe88ebc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/image.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/message.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/message.cpython-36.pyc new file mode 100644 index 0000000..d872d8e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/message.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/multipart.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/multipart.cpython-36.pyc new file mode 100644 index 0000000..abbdaeb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/multipart.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/nonmultipart.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/nonmultipart.cpython-36.pyc new file mode 100644 index 0000000..db42fb7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/nonmultipart.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/text.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/text.cpython-36.pyc new file mode 100644 index 0000000..4c903bf Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/__pycache__/text.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/application.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/application.py new file mode 100644 index 0000000..5cbfb17 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/application.py @@ -0,0 +1,39 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Keith Dart +# Contact: email-sig@python.org + +"""Class representing application/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +from future.backports.email import encoders +from future.backports.email.mime.nonmultipart import MIMENonMultipart + +__all__ = ["MIMEApplication"] + + +class MIMEApplication(MIMENonMultipart): + """Class for generating application/* MIME documents.""" + + def __init__(self, _data, _subtype='octet-stream', + _encoder=encoders.encode_base64, **_params): + """Create an application/* type MIME document. + + _data is a string containing the raw application data. + + _subtype is the MIME content type subtype, defaulting to + 'octet-stream'. + + _encoder is a function which will perform the actual encoding for + transport of the application data, defaulting to base64 encoding. + + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ + if _subtype is None: + raise TypeError('Invalid application MIME subtype') + MIMENonMultipart.__init__(self, 'application', _subtype, **_params) + self.set_payload(_data) + _encoder(self) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/audio.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/audio.py new file mode 100644 index 0000000..4989c11 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/audio.py @@ -0,0 +1,74 @@ +# Copyright (C) 2001-2007 Python Software Foundation +# Author: Anthony Baxter +# Contact: email-sig@python.org + +"""Class representing audio/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEAudio'] + +import sndhdr + +from io import BytesIO +from future.backports.email import encoders +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +_sndhdr_MIMEmap = {'au' : 'basic', + 'wav' :'x-wav', + 'aiff':'x-aiff', + 'aifc':'x-aiff', + } + +# There are others in sndhdr that don't have MIME types. :( +# Additional ones to be added to sndhdr? midi, mp3, realaudio, wma?? +def _whatsnd(data): + """Try to identify a sound file type. + + sndhdr.what() has a pretty cruddy interface, unfortunately. This is why + we re-do it here. It would be easier to reverse engineer the Unix 'file' + command and use the standard 'magic' file, as shipped with a modern Unix. + """ + hdr = data[:512] + fakefile = BytesIO(hdr) + for testfn in sndhdr.tests: + res = testfn(hdr, fakefile) + if res is not None: + return _sndhdr_MIMEmap.get(res[0]) + return None + + +class MIMEAudio(MIMENonMultipart): + """Class for generating audio/* MIME documents.""" + + def __init__(self, _audiodata, _subtype=None, + _encoder=encoders.encode_base64, **_params): + """Create an audio/* type MIME document. + + _audiodata is a string containing the raw audio data. If this data + can be decoded by the standard Python `sndhdr' module, then the + subtype will be automatically included in the Content-Type header. + Otherwise, you can specify the specific audio subtype via the + _subtype parameter. If _subtype is not given, and no subtype can be + guessed, a TypeError is raised. + + _encoder is a function which will perform the actual encoding for + transport of the image data. It takes one argument, which is this + Image instance. It should use get_payload() and set_payload() to + change the payload to the encoded form. It should also add any + Content-Transfer-Encoding or other headers to the message as + necessary. The default encoding is Base64. + + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ + if _subtype is None: + _subtype = _whatsnd(_audiodata) + if _subtype is None: + raise TypeError('Could not find audio MIME subtype') + MIMENonMultipart.__init__(self, 'audio', _subtype, **_params) + self.set_payload(_audiodata) + _encoder(self) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/base.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/base.py new file mode 100644 index 0000000..e77f3ca --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/base.py @@ -0,0 +1,25 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Base class for MIME specializations.""" +from __future__ import absolute_import, division, unicode_literals +from future.backports.email import message + +__all__ = ['MIMEBase'] + + +class MIMEBase(message.Message): + """Base class for MIME specializations.""" + + def __init__(self, _maintype, _subtype, **_params): + """This constructor adds a Content-Type: and a MIME-Version: header. + + The Content-Type: header is taken from the _maintype and _subtype + arguments. Additional parameters for this header are taken from the + keyword arguments. + """ + message.Message.__init__(self) + ctype = '%s/%s' % (_maintype, _subtype) + self.add_header('Content-Type', ctype, **_params) + self['MIME-Version'] = '1.0' diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/image.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/image.py new file mode 100644 index 0000000..a036024 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/image.py @@ -0,0 +1,48 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Class representing image/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEImage'] + +import imghdr + +from future.backports.email import encoders +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +class MIMEImage(MIMENonMultipart): + """Class for generating image/* type MIME documents.""" + + def __init__(self, _imagedata, _subtype=None, + _encoder=encoders.encode_base64, **_params): + """Create an image/* type MIME document. + + _imagedata is a string containing the raw image data. If this data + can be decoded by the standard Python `imghdr' module, then the + subtype will be automatically included in the Content-Type header. + Otherwise, you can specify the specific image subtype via the _subtype + parameter. + + _encoder is a function which will perform the actual encoding for + transport of the image data. It takes one argument, which is this + Image instance. It should use get_payload() and set_payload() to + change the payload to the encoded form. It should also add any + Content-Transfer-Encoding or other headers to the message as + necessary. The default encoding is Base64. + + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ + if _subtype is None: + _subtype = imghdr.what(None, _imagedata) + if _subtype is None: + raise TypeError('Could not guess image MIME subtype') + MIMENonMultipart.__init__(self, 'image', _subtype, **_params) + self.set_payload(_imagedata) + _encoder(self) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/message.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/message.py new file mode 100644 index 0000000..7f92075 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/message.py @@ -0,0 +1,36 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Class representing message/* MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEMessage'] + +from future.backports.email import message +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +class MIMEMessage(MIMENonMultipart): + """Class representing message/* MIME documents.""" + + def __init__(self, _msg, _subtype='rfc822'): + """Create a message/* type MIME document. + + _msg is a message object and must be an instance of Message, or a + derived class of Message, otherwise a TypeError is raised. + + Optional _subtype defines the subtype of the contained message. The + default is "rfc822" (this is defined by the MIME standard, even though + the term "rfc822" is technically outdated by RFC 2822). + """ + MIMENonMultipart.__init__(self, 'message', _subtype) + if not isinstance(_msg, message.Message): + raise TypeError('Argument is not an instance of Message') + # It's convenient to use this base class method. We need to do it + # this way or we'll get an exception + message.Message.attach(self, _msg) + # And be sure our default type is set correctly + self.set_default_type('message/rfc822') diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/multipart.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/multipart.py new file mode 100644 index 0000000..6d7ed3d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/multipart.py @@ -0,0 +1,49 @@ +# Copyright (C) 2002-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Base class for MIME multipart/* type messages.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEMultipart'] + +from future.backports.email.mime.base import MIMEBase + + +class MIMEMultipart(MIMEBase): + """Base class for MIME multipart/* type messages.""" + + def __init__(self, _subtype='mixed', boundary=None, _subparts=None, + **_params): + """Creates a multipart/* type message. + + By default, creates a multipart/mixed message, with proper + Content-Type and MIME-Version headers. + + _subtype is the subtype of the multipart content type, defaulting to + `mixed'. + + boundary is the multipart boundary string. By default it is + calculated as needed. + + _subparts is a sequence of initial subparts for the payload. It + must be an iterable object, such as a list. You can always + attach new subparts to the message by using the attach() method. + + Additional parameters for the Content-Type header are taken from the + keyword arguments (or passed into the _params argument). + """ + MIMEBase.__init__(self, 'multipart', _subtype, **_params) + + # Initialise _payload to an empty list as the Message superclass's + # implementation of is_multipart assumes that _payload is a list for + # multipart messages. + self._payload = [] + + if _subparts: + for p in _subparts: + self.attach(p) + if boundary: + self.set_boundary(boundary) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/nonmultipart.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/nonmultipart.py new file mode 100644 index 0000000..08c37c3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/nonmultipart.py @@ -0,0 +1,24 @@ +# Copyright (C) 2002-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Base class for MIME type messages that are not multipart.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMENonMultipart'] + +from future.backports.email import errors +from future.backports.email.mime.base import MIMEBase + + +class MIMENonMultipart(MIMEBase): + """Base class for MIME multipart/* type messages.""" + + def attach(self, payload): + # The public API prohibits attaching multiple subparts to MIMEBase + # derived subtypes since none of them are, by definition, of content + # type multipart/* + raise errors.MultipartConversionError( + 'Cannot attach additional subparts to non-multipart/*') diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/mime/text.py b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/text.py new file mode 100644 index 0000000..6269f4a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/mime/text.py @@ -0,0 +1,44 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Class representing text/* type MIME documents.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['MIMEText'] + +from future.backports.email.encoders import encode_7or8bit +from future.backports.email.mime.nonmultipart import MIMENonMultipart + + +class MIMEText(MIMENonMultipart): + """Class for generating text/* type MIME documents.""" + + def __init__(self, _text, _subtype='plain', _charset=None): + """Create a text/* type MIME document. + + _text is the string for this message object. + + _subtype is the MIME sub content type, defaulting to "plain". + + _charset is the character set parameter added to the Content-Type + header. This defaults to "us-ascii". Note that as a side-effect, the + Content-Transfer-Encoding header will also be set. + """ + + # If no _charset was specified, check to see if there are non-ascii + # characters present. If not, use 'us-ascii', otherwise use utf-8. + # XXX: This can be removed once #7304 is fixed. + if _charset is None: + try: + _text.encode('us-ascii') + _charset = 'us-ascii' + except UnicodeEncodeError: + _charset = 'utf-8' + + MIMENonMultipart.__init__(self, 'text', _subtype, + **{'charset': _charset}) + + self.set_payload(_text, _charset) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/parser.py b/minor_project/lib/python3.6/site-packages/future/backports/email/parser.py new file mode 100644 index 0000000..df1c6e2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/parser.py @@ -0,0 +1,135 @@ +# Copyright (C) 2001-2007 Python Software Foundation +# Author: Barry Warsaw, Thomas Wouters, Anthony Baxter +# Contact: email-sig@python.org + +"""A parser of RFC 2822 and MIME email messages.""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import + +__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser'] + +import warnings +from io import StringIO, TextIOWrapper + +from future.backports.email.feedparser import FeedParser, BytesFeedParser +from future.backports.email.message import Message +from future.backports.email._policybase import compat32 + + +class Parser(object): + def __init__(self, _class=Message, **_3to2kwargs): + """Parser of RFC 2822 and MIME email messages. + + Creates an in-memory object tree representing the email message, which + can then be manipulated and turned over to a Generator to return the + textual representation of the message. + + The string must be formatted as a block of RFC 2822 headers and header + continuation lines, optionally preceeded by a `Unix-from' header. The + header block is terminated either by the end of the string or by a + blank line. + + _class is the class to instantiate for new message objects when they + must be created. This class must have a constructor that can take + zero arguments. Default is Message.Message. + + The policy keyword specifies a policy object that controls a number of + aspects of the parser's operation. The default policy maintains + backward compatibility. + + """ + if 'policy' in _3to2kwargs: policy = _3to2kwargs['policy']; del _3to2kwargs['policy'] + else: policy = compat32 + self._class = _class + self.policy = policy + + def parse(self, fp, headersonly=False): + """Create a message structure from the data in a file. + + Reads all the data from the file and returns the root of the message + structure. Optional headersonly is a flag specifying whether to stop + parsing after reading the headers or not. The default is False, + meaning it parses the entire contents of the file. + """ + feedparser = FeedParser(self._class, policy=self.policy) + if headersonly: + feedparser._set_headersonly() + while True: + data = fp.read(8192) + if not data: + break + feedparser.feed(data) + return feedparser.close() + + def parsestr(self, text, headersonly=False): + """Create a message structure from a string. + + Returns the root of the message structure. Optional headersonly is a + flag specifying whether to stop parsing after reading the headers or + not. The default is False, meaning it parses the entire contents of + the file. + """ + return self.parse(StringIO(text), headersonly=headersonly) + + + +class HeaderParser(Parser): + def parse(self, fp, headersonly=True): + return Parser.parse(self, fp, True) + + def parsestr(self, text, headersonly=True): + return Parser.parsestr(self, text, True) + + +class BytesParser(object): + + def __init__(self, *args, **kw): + """Parser of binary RFC 2822 and MIME email messages. + + Creates an in-memory object tree representing the email message, which + can then be manipulated and turned over to a Generator to return the + textual representation of the message. + + The input must be formatted as a block of RFC 2822 headers and header + continuation lines, optionally preceeded by a `Unix-from' header. The + header block is terminated either by the end of the input or by a + blank line. + + _class is the class to instantiate for new message objects when they + must be created. This class must have a constructor that can take + zero arguments. Default is Message.Message. + """ + self.parser = Parser(*args, **kw) + + def parse(self, fp, headersonly=False): + """Create a message structure from the data in a binary file. + + Reads all the data from the file and returns the root of the message + structure. Optional headersonly is a flag specifying whether to stop + parsing after reading the headers or not. The default is False, + meaning it parses the entire contents of the file. + """ + fp = TextIOWrapper(fp, encoding='ascii', errors='surrogateescape') + with fp: + return self.parser.parse(fp, headersonly) + + + def parsebytes(self, text, headersonly=False): + """Create a message structure from a byte string. + + Returns the root of the message structure. Optional headersonly is a + flag specifying whether to stop parsing after reading the headers or + not. The default is False, meaning it parses the entire contents of + the file. + """ + text = text.decode('ASCII', errors='surrogateescape') + return self.parser.parsestr(text, headersonly) + + +class BytesHeaderParser(BytesParser): + def parse(self, fp, headersonly=True): + return BytesParser.parse(self, fp, headersonly=True) + + def parsebytes(self, text, headersonly=True): + return BytesParser.parsebytes(self, text, headersonly=True) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/policy.py b/minor_project/lib/python3.6/site-packages/future/backports/email/policy.py new file mode 100644 index 0000000..2f609a2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/policy.py @@ -0,0 +1,193 @@ +"""This will be the home for the policy that hooks in the new +code that adds all the email6 features. +""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future.builtins import super + +from future.standard_library.email._policybase import (Policy, Compat32, + compat32, _extend_docstrings) +from future.standard_library.email.utils import _has_surrogates +from future.standard_library.email.headerregistry import HeaderRegistry as HeaderRegistry + +__all__ = [ + 'Compat32', + 'compat32', + 'Policy', + 'EmailPolicy', + 'default', + 'strict', + 'SMTP', + 'HTTP', + ] + +@_extend_docstrings +class EmailPolicy(Policy): + + """+ + PROVISIONAL + + The API extensions enabled by this policy are currently provisional. + Refer to the documentation for details. + + This policy adds new header parsing and folding algorithms. Instead of + simple strings, headers are custom objects with custom attributes + depending on the type of the field. The folding algorithm fully + implements RFCs 2047 and 5322. + + In addition to the settable attributes listed above that apply to + all Policies, this policy adds the following additional attributes: + + refold_source -- if the value for a header in the Message object + came from the parsing of some source, this attribute + indicates whether or not a generator should refold + that value when transforming the message back into + stream form. The possible values are: + + none -- all source values use original folding + long -- source values that have any line that is + longer than max_line_length will be + refolded + all -- all values are refolded. + + The default is 'long'. + + header_factory -- a callable that takes two arguments, 'name' and + 'value', where 'name' is a header field name and + 'value' is an unfolded header field value, and + returns a string-like object that represents that + header. A default header_factory is provided that + understands some of the RFC5322 header field types. + (Currently address fields and date fields have + special treatment, while all other fields are + treated as unstructured. This list will be + completed before the extension is marked stable.) + """ + + refold_source = 'long' + header_factory = HeaderRegistry() + + def __init__(self, **kw): + # Ensure that each new instance gets a unique header factory + # (as opposed to clones, which share the factory). + if 'header_factory' not in kw: + object.__setattr__(self, 'header_factory', HeaderRegistry()) + super().__init__(**kw) + + def header_max_count(self, name): + """+ + The implementation for this class returns the max_count attribute from + the specialized header class that would be used to construct a header + of type 'name'. + """ + return self.header_factory[name].max_count + + # The logic of the next three methods is chosen such that it is possible to + # switch a Message object between a Compat32 policy and a policy derived + # from this class and have the results stay consistent. This allows a + # Message object constructed with this policy to be passed to a library + # that only handles Compat32 objects, or to receive such an object and + # convert it to use the newer style by just changing its policy. It is + # also chosen because it postpones the relatively expensive full rfc5322 + # parse until as late as possible when parsing from source, since in many + # applications only a few headers will actually be inspected. + + def header_source_parse(self, sourcelines): + """+ + The name is parsed as everything up to the ':' and returned unmodified. + The value is determined by stripping leading whitespace off the + remainder of the first line, joining all subsequent lines together, and + stripping any trailing carriage return or linefeed characters. (This + is the same as Compat32). + + """ + name, value = sourcelines[0].split(':', 1) + value = value.lstrip(' \t') + ''.join(sourcelines[1:]) + return (name, value.rstrip('\r\n')) + + def header_store_parse(self, name, value): + """+ + The name is returned unchanged. If the input value has a 'name' + attribute and it matches the name ignoring case, the value is returned + unchanged. Otherwise the name and value are passed to header_factory + method, and the resulting custom header object is returned as the + value. In this case a ValueError is raised if the input value contains + CR or LF characters. + + """ + if hasattr(value, 'name') and value.name.lower() == name.lower(): + return (name, value) + if isinstance(value, str) and len(value.splitlines())>1: + raise ValueError("Header values may not contain linefeed " + "or carriage return characters") + return (name, self.header_factory(name, value)) + + def header_fetch_parse(self, name, value): + """+ + If the value has a 'name' attribute, it is returned to unmodified. + Otherwise the name and the value with any linesep characters removed + are passed to the header_factory method, and the resulting custom + header object is returned. Any surrogateescaped bytes get turned + into the unicode unknown-character glyph. + + """ + if hasattr(value, 'name'): + return value + return self.header_factory(name, ''.join(value.splitlines())) + + def fold(self, name, value): + """+ + Header folding is controlled by the refold_source policy setting. A + value is considered to be a 'source value' if and only if it does not + have a 'name' attribute (having a 'name' attribute means it is a header + object of some sort). If a source value needs to be refolded according + to the policy, it is converted into a custom header object by passing + the name and the value with any linesep characters removed to the + header_factory method. Folding of a custom header object is done by + calling its fold method with the current policy. + + Source values are split into lines using splitlines. If the value is + not to be refolded, the lines are rejoined using the linesep from the + policy and returned. The exception is lines containing non-ascii + binary data. In that case the value is refolded regardless of the + refold_source setting, which causes the binary data to be CTE encoded + using the unknown-8bit charset. + + """ + return self._fold(name, value, refold_binary=True) + + def fold_binary(self, name, value): + """+ + The same as fold if cte_type is 7bit, except that the returned value is + bytes. + + If cte_type is 8bit, non-ASCII binary data is converted back into + bytes. Headers with binary data are not refolded, regardless of the + refold_header setting, since there is no way to know whether the binary + data consists of single byte characters or multibyte characters. + + """ + folded = self._fold(name, value, refold_binary=self.cte_type=='7bit') + return folded.encode('ascii', 'surrogateescape') + + def _fold(self, name, value, refold_binary=False): + if hasattr(value, 'name'): + return value.fold(policy=self) + maxlen = self.max_line_length if self.max_line_length else float('inf') + lines = value.splitlines() + refold = (self.refold_source == 'all' or + self.refold_source == 'long' and + (lines and len(lines[0])+len(name)+2 > maxlen or + any(len(x) > maxlen for x in lines[1:]))) + if refold or refold_binary and _has_surrogates(value): + return self.header_factory(name, ''.join(lines)).fold(policy=self) + return name + ': ' + self.linesep.join(lines) + self.linesep + + +default = EmailPolicy() +# Make the default policy use the class default header_factory +del default.header_factory +strict = default.clone(raise_on_defect=True) +SMTP = default.clone(linesep='\r\n') +HTTP = default.clone(linesep='\r\n', max_line_length=None) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/quoprimime.py b/minor_project/lib/python3.6/site-packages/future/backports/email/quoprimime.py new file mode 100644 index 0000000..b69d158 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/quoprimime.py @@ -0,0 +1,326 @@ +# Copyright (C) 2001-2006 Python Software Foundation +# Author: Ben Gertzfield +# Contact: email-sig@python.org + +"""Quoted-printable content transfer encoding per RFCs 2045-2047. + +This module handles the content transfer encoding method defined in RFC 2045 +to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to +safely encode text that is in a character set similar to the 7-bit US ASCII +character set, but that includes some 8-bit characters that are normally not +allowed in email bodies or headers. + +Quoted-printable is very space-inefficient for encoding binary files; use the +email.base64mime module for that instead. + +This module provides an interface to encode and decode both headers and bodies +with quoted-printable encoding. + +RFC 2045 defines a method for including character set information in an +`encoded-word' in a header. This method is commonly used for 8-bit real names +in To:/From:/Cc: etc. fields, as well as Subject: lines. + +This module does not do the line wrapping or end-of-line character +conversion necessary for proper internationalized headers; it only +does dumb encoding and decoding. To deal with the various line +wrapping issues, use the email.header module. +""" +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future.builtins import bytes, chr, dict, int, range, super + +__all__ = [ + 'body_decode', + 'body_encode', + 'body_length', + 'decode', + 'decodestring', + 'header_decode', + 'header_encode', + 'header_length', + 'quote', + 'unquote', + ] + +import re +import io + +from string import ascii_letters, digits, hexdigits + +CRLF = '\r\n' +NL = '\n' +EMPTYSTRING = '' + +# Build a mapping of octets to the expansion of that octet. Since we're only +# going to have 256 of these things, this isn't terribly inefficient +# space-wise. Remember that headers and bodies have different sets of safe +# characters. Initialize both maps with the full expansion, and then override +# the safe bytes with the more compact form. +_QUOPRI_HEADER_MAP = dict((c, '=%02X' % c) for c in range(256)) +_QUOPRI_BODY_MAP = _QUOPRI_HEADER_MAP.copy() + +# Safe header bytes which need no encoding. +for c in bytes(b'-!*+/' + ascii_letters.encode('ascii') + digits.encode('ascii')): + _QUOPRI_HEADER_MAP[c] = chr(c) +# Headers have one other special encoding; spaces become underscores. +_QUOPRI_HEADER_MAP[ord(' ')] = '_' + +# Safe body bytes which need no encoding. +for c in bytes(b' !"#$%&\'()*+,-./0123456789:;<>' + b'?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`' + b'abcdefghijklmnopqrstuvwxyz{|}~\t'): + _QUOPRI_BODY_MAP[c] = chr(c) + + + +# Helpers +def header_check(octet): + """Return True if the octet should be escaped with header quopri.""" + return chr(octet) != _QUOPRI_HEADER_MAP[octet] + + +def body_check(octet): + """Return True if the octet should be escaped with body quopri.""" + return chr(octet) != _QUOPRI_BODY_MAP[octet] + + +def header_length(bytearray): + """Return a header quoted-printable encoding length. + + Note that this does not include any RFC 2047 chrome added by + `header_encode()`. + + :param bytearray: An array of bytes (a.k.a. octets). + :return: The length in bytes of the byte array when it is encoded with + quoted-printable for headers. + """ + return sum(len(_QUOPRI_HEADER_MAP[octet]) for octet in bytearray) + + +def body_length(bytearray): + """Return a body quoted-printable encoding length. + + :param bytearray: An array of bytes (a.k.a. octets). + :return: The length in bytes of the byte array when it is encoded with + quoted-printable for bodies. + """ + return sum(len(_QUOPRI_BODY_MAP[octet]) for octet in bytearray) + + +def _max_append(L, s, maxlen, extra=''): + if not isinstance(s, str): + s = chr(s) + if not L: + L.append(s.lstrip()) + elif len(L[-1]) + len(s) <= maxlen: + L[-1] += extra + s + else: + L.append(s.lstrip()) + + +def unquote(s): + """Turn a string in the form =AB to the ASCII character with value 0xab""" + return chr(int(s[1:3], 16)) + + +def quote(c): + return '=%02X' % ord(c) + + + +def header_encode(header_bytes, charset='iso-8859-1'): + """Encode a single header line with quoted-printable (like) encoding. + + Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but + used specifically for email header fields to allow charsets with mostly 7 + bit characters (and some 8 bit) to remain more or less readable in non-RFC + 2045 aware mail clients. + + charset names the character set to use in the RFC 2046 header. It + defaults to iso-8859-1. + """ + # Return empty headers as an empty string. + if not header_bytes: + return '' + # Iterate over every byte, encoding if necessary. + encoded = [] + for octet in header_bytes: + encoded.append(_QUOPRI_HEADER_MAP[octet]) + # Now add the RFC chrome to each encoded chunk and glue the chunks + # together. + return '=?%s?q?%s?=' % (charset, EMPTYSTRING.join(encoded)) + + +class _body_accumulator(io.StringIO): + + def __init__(self, maxlinelen, eol, *args, **kw): + super().__init__(*args, **kw) + self.eol = eol + self.maxlinelen = self.room = maxlinelen + + def write_str(self, s): + """Add string s to the accumulated body.""" + self.write(s) + self.room -= len(s) + + def newline(self): + """Write eol, then start new line.""" + self.write_str(self.eol) + self.room = self.maxlinelen + + def write_soft_break(self): + """Write a soft break, then start a new line.""" + self.write_str('=') + self.newline() + + def write_wrapped(self, s, extra_room=0): + """Add a soft line break if needed, then write s.""" + if self.room < len(s) + extra_room: + self.write_soft_break() + self.write_str(s) + + def write_char(self, c, is_last_char): + if not is_last_char: + # Another character follows on this line, so we must leave + # extra room, either for it or a soft break, and whitespace + # need not be quoted. + self.write_wrapped(c, extra_room=1) + elif c not in ' \t': + # For this and remaining cases, no more characters follow, + # so there is no need to reserve extra room (since a hard + # break will immediately follow). + self.write_wrapped(c) + elif self.room >= 3: + # It's a whitespace character at end-of-line, and we have room + # for the three-character quoted encoding. + self.write(quote(c)) + elif self.room == 2: + # There's room for the whitespace character and a soft break. + self.write(c) + self.write_soft_break() + else: + # There's room only for a soft break. The quoted whitespace + # will be the only content on the subsequent line. + self.write_soft_break() + self.write(quote(c)) + + +def body_encode(body, maxlinelen=76, eol=NL): + """Encode with quoted-printable, wrapping at maxlinelen characters. + + Each line of encoded text will end with eol, which defaults to "\\n". Set + this to "\\r\\n" if you will be using the result of this function directly + in an email. + + Each line will be wrapped at, at most, maxlinelen characters before the + eol string (maxlinelen defaults to 76 characters, the maximum value + permitted by RFC 2045). Long lines will have the 'soft line break' + quoted-printable character "=" appended to them, so the decoded text will + be identical to the original text. + + The minimum maxlinelen is 4 to have room for a quoted character ("=XX") + followed by a soft line break. Smaller values will generate a + ValueError. + + """ + + if maxlinelen < 4: + raise ValueError("maxlinelen must be at least 4") + if not body: + return body + + # The last line may or may not end in eol, but all other lines do. + last_has_eol = (body[-1] in '\r\n') + + # This accumulator will make it easier to build the encoded body. + encoded_body = _body_accumulator(maxlinelen, eol) + + lines = body.splitlines() + last_line_no = len(lines) - 1 + for line_no, line in enumerate(lines): + last_char_index = len(line) - 1 + for i, c in enumerate(line): + if body_check(ord(c)): + c = quote(c) + encoded_body.write_char(c, i==last_char_index) + # Add an eol if input line had eol. All input lines have eol except + # possibly the last one. + if line_no < last_line_no or last_has_eol: + encoded_body.newline() + + return encoded_body.getvalue() + + + +# BAW: I'm not sure if the intent was for the signature of this function to be +# the same as base64MIME.decode() or not... +def decode(encoded, eol=NL): + """Decode a quoted-printable string. + + Lines are separated with eol, which defaults to \\n. + """ + if not encoded: + return encoded + # BAW: see comment in encode() above. Again, we're building up the + # decoded string with string concatenation, which could be done much more + # efficiently. + decoded = '' + + for line in encoded.splitlines(): + line = line.rstrip() + if not line: + decoded += eol + continue + + i = 0 + n = len(line) + while i < n: + c = line[i] + if c != '=': + decoded += c + i += 1 + # Otherwise, c == "=". Are we at the end of the line? If so, add + # a soft line break. + elif i+1 == n: + i += 1 + continue + # Decode if in form =AB + elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits: + decoded += unquote(line[i:i+3]) + i += 3 + # Otherwise, not in form =AB, pass literally + else: + decoded += c + i += 1 + + if i == n: + decoded += eol + # Special case if original string did not end with eol + if encoded[-1] not in '\r\n' and decoded.endswith(eol): + decoded = decoded[:-1] + return decoded + + +# For convenience and backwards compatibility w/ standard base64 module +body_decode = decode +decodestring = decode + + + +def _unquote_match(match): + """Turn a match in the form =AB to the ASCII character with value 0xab""" + s = match.group(0) + return unquote(s) + + +# Header decoding is done a bit differently +def header_decode(s): + """Decode a string encoded with RFC 2045 MIME header `Q' encoding. + + This function does not parse a full MIME header value encoded with + quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use + the high level email.header class for that functionality. + """ + s = s.replace('_', ' ') + return re.sub(r'=[a-fA-F0-9]{2}', _unquote_match, s, re.ASCII) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/email/utils.py b/minor_project/lib/python3.6/site-packages/future/backports/email/utils.py new file mode 100644 index 0000000..4abebf7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/email/utils.py @@ -0,0 +1,400 @@ +# Copyright (C) 2001-2010 Python Software Foundation +# Author: Barry Warsaw +# Contact: email-sig@python.org + +"""Miscellaneous utilities.""" + +from __future__ import unicode_literals +from __future__ import division +from __future__ import absolute_import +from future import utils +from future.builtins import bytes, int, str + +__all__ = [ + 'collapse_rfc2231_value', + 'decode_params', + 'decode_rfc2231', + 'encode_rfc2231', + 'formataddr', + 'formatdate', + 'format_datetime', + 'getaddresses', + 'make_msgid', + 'mktime_tz', + 'parseaddr', + 'parsedate', + 'parsedate_tz', + 'parsedate_to_datetime', + 'unquote', + ] + +import os +import re +if utils.PY2: + re.ASCII = 0 +import time +import base64 +import random +import socket +from future.backports import datetime +from future.backports.urllib.parse import quote as url_quote, unquote as url_unquote +import warnings +from io import StringIO + +from future.backports.email._parseaddr import quote +from future.backports.email._parseaddr import AddressList as _AddressList +from future.backports.email._parseaddr import mktime_tz + +from future.backports.email._parseaddr import parsedate, parsedate_tz, _parsedate_tz + +from quopri import decodestring as _qdecode + +# Intrapackage imports +from future.backports.email.encoders import _bencode, _qencode +from future.backports.email.charset import Charset + +COMMASPACE = ', ' +EMPTYSTRING = '' +UEMPTYSTRING = '' +CRLF = '\r\n' +TICK = "'" + +specialsre = re.compile(r'[][\\()<>@,:;".]') +escapesre = re.compile(r'[\\"]') + +# How to figure out if we are processing strings that come from a byte +# source with undecodable characters. +_has_surrogates = re.compile( + '([^\ud800-\udbff]|\A)[\udc00-\udfff]([^\udc00-\udfff]|\Z)').search + +# How to deal with a string containing bytes before handing it to the +# application through the 'normal' interface. +def _sanitize(string): + # Turn any escaped bytes into unicode 'unknown' char. + original_bytes = string.encode('ascii', 'surrogateescape') + return original_bytes.decode('ascii', 'replace') + + +# Helpers + +def formataddr(pair, charset='utf-8'): + """The inverse of parseaddr(), this takes a 2-tuple of the form + (realname, email_address) and returns the string value suitable + for an RFC 2822 From, To or Cc header. + + If the first element of pair is false, then the second element is + returned unmodified. + + Optional charset if given is the character set that is used to encode + realname in case realname is not ASCII safe. Can be an instance of str or + a Charset-like object which has a header_encode method. Default is + 'utf-8'. + """ + name, address = pair + # The address MUST (per RFC) be ascii, so raise an UnicodeError if it isn't. + address.encode('ascii') + if name: + try: + name.encode('ascii') + except UnicodeEncodeError: + if isinstance(charset, str): + charset = Charset(charset) + encoded_name = charset.header_encode(name) + return "%s <%s>" % (encoded_name, address) + else: + quotes = '' + if specialsre.search(name): + quotes = '"' + name = escapesre.sub(r'\\\g<0>', name) + return '%s%s%s <%s>' % (quotes, name, quotes, address) + return address + + + +def getaddresses(fieldvalues): + """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" + all = COMMASPACE.join(fieldvalues) + a = _AddressList(all) + return a.addresslist + + + +ecre = re.compile(r''' + =\? # literal =? + (?P[^?]*?) # non-greedy up to the next ? is the charset + \? # literal ? + (?P[qb]) # either a "q" or a "b", case insensitive + \? # literal ? + (?P.*?) # non-greedy up to the next ?= is the atom + \?= # literal ?= + ''', re.VERBOSE | re.IGNORECASE) + + +def _format_timetuple_and_zone(timetuple, zone): + return '%s, %02d %s %04d %02d:%02d:%02d %s' % ( + ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timetuple[6]], + timetuple[2], + ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timetuple[1] - 1], + timetuple[0], timetuple[3], timetuple[4], timetuple[5], + zone) + +def formatdate(timeval=None, localtime=False, usegmt=False): + """Returns a date string as specified by RFC 2822, e.g.: + + Fri, 09 Nov 2001 01:08:47 -0000 + + Optional timeval if given is a floating point time value as accepted by + gmtime() and localtime(), otherwise the current time is used. + + Optional localtime is a flag that when True, interprets timeval, and + returns a date relative to the local timezone instead of UTC, properly + taking daylight savings time into account. + + Optional argument usegmt means that the timezone is written out as + an ascii string, not numeric one (so "GMT" instead of "+0000"). This + is needed for HTTP, and is only used when localtime==False. + """ + # Note: we cannot use strftime() because that honors the locale and RFC + # 2822 requires that day and month names be the English abbreviations. + if timeval is None: + timeval = time.time() + if localtime: + now = time.localtime(timeval) + # Calculate timezone offset, based on whether the local zone has + # daylight savings time, and whether DST is in effect. + if time.daylight and now[-1]: + offset = time.altzone + else: + offset = time.timezone + hours, minutes = divmod(abs(offset), 3600) + # Remember offset is in seconds west of UTC, but the timezone is in + # minutes east of UTC, so the signs differ. + if offset > 0: + sign = '-' + else: + sign = '+' + zone = '%s%02d%02d' % (sign, hours, minutes // 60) + else: + now = time.gmtime(timeval) + # Timezone offset is always -0000 + if usegmt: + zone = 'GMT' + else: + zone = '-0000' + return _format_timetuple_and_zone(now, zone) + +def format_datetime(dt, usegmt=False): + """Turn a datetime into a date string as specified in RFC 2822. + + If usegmt is True, dt must be an aware datetime with an offset of zero. In + this case 'GMT' will be rendered instead of the normal +0000 required by + RFC2822. This is to support HTTP headers involving date stamps. + """ + now = dt.timetuple() + if usegmt: + if dt.tzinfo is None or dt.tzinfo != datetime.timezone.utc: + raise ValueError("usegmt option requires a UTC datetime") + zone = 'GMT' + elif dt.tzinfo is None: + zone = '-0000' + else: + zone = dt.strftime("%z") + return _format_timetuple_and_zone(now, zone) + + +def make_msgid(idstring=None, domain=None): + """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: + + <20020201195627.33539.96671@nightshade.la.mastaler.com> + + Optional idstring if given is a string used to strengthen the + uniqueness of the message id. Optional domain if given provides the + portion of the message id after the '@'. It defaults to the locally + defined hostname. + """ + timeval = time.time() + utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval)) + pid = os.getpid() + randint = random.randrange(100000) + if idstring is None: + idstring = '' + else: + idstring = '.' + idstring + if domain is None: + domain = socket.getfqdn() + msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, domain) + return msgid + + +def parsedate_to_datetime(data): + _3to2list = list(_parsedate_tz(data)) + dtuple, tz, = [_3to2list[:-1]] + _3to2list[-1:] + if tz is None: + return datetime.datetime(*dtuple[:6]) + return datetime.datetime(*dtuple[:6], + tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) + + +def parseaddr(addr): + addrs = _AddressList(addr).addresslist + if not addrs: + return '', '' + return addrs[0] + + +# rfc822.unquote() doesn't properly de-backslash-ify in Python pre-2.3. +def unquote(str): + """Remove quotes from a string.""" + if len(str) > 1: + if str.startswith('"') and str.endswith('"'): + return str[1:-1].replace('\\\\', '\\').replace('\\"', '"') + if str.startswith('<') and str.endswith('>'): + return str[1:-1] + return str + + + +# RFC2231-related functions - parameter encoding and decoding +def decode_rfc2231(s): + """Decode string according to RFC 2231""" + parts = s.split(TICK, 2) + if len(parts) <= 2: + return None, None, s + return parts + + +def encode_rfc2231(s, charset=None, language=None): + """Encode string according to RFC 2231. + + If neither charset nor language is given, then s is returned as-is. If + charset is given but not language, the string is encoded using the empty + string for language. + """ + s = url_quote(s, safe='', encoding=charset or 'ascii') + if charset is None and language is None: + return s + if language is None: + language = '' + return "%s'%s'%s" % (charset, language, s) + + +rfc2231_continuation = re.compile(r'^(?P\w+)\*((?P[0-9]+)\*?)?$', + re.ASCII) + +def decode_params(params): + """Decode parameters list according to RFC 2231. + + params is a sequence of 2-tuples containing (param name, string value). + """ + # Copy params so we don't mess with the original + params = params[:] + new_params = [] + # Map parameter's name to a list of continuations. The values are a + # 3-tuple of the continuation number, the string value, and a flag + # specifying whether a particular segment is %-encoded. + rfc2231_params = {} + name, value = params.pop(0) + new_params.append((name, value)) + while params: + name, value = params.pop(0) + if name.endswith('*'): + encoded = True + else: + encoded = False + value = unquote(value) + mo = rfc2231_continuation.match(name) + if mo: + name, num = mo.group('name', 'num') + if num is not None: + num = int(num) + rfc2231_params.setdefault(name, []).append((num, value, encoded)) + else: + new_params.append((name, '"%s"' % quote(value))) + if rfc2231_params: + for name, continuations in rfc2231_params.items(): + value = [] + extended = False + # Sort by number + continuations.sort() + # And now append all values in numerical order, converting + # %-encodings for the encoded segments. If any of the + # continuation names ends in a *, then the entire string, after + # decoding segments and concatenating, must have the charset and + # language specifiers at the beginning of the string. + for num, s, encoded in continuations: + if encoded: + # Decode as "latin-1", so the characters in s directly + # represent the percent-encoded octet values. + # collapse_rfc2231_value treats this as an octet sequence. + s = url_unquote(s, encoding="latin-1") + extended = True + value.append(s) + value = quote(EMPTYSTRING.join(value)) + if extended: + charset, language, value = decode_rfc2231(value) + new_params.append((name, (charset, language, '"%s"' % value))) + else: + new_params.append((name, '"%s"' % value)) + return new_params + +def collapse_rfc2231_value(value, errors='replace', + fallback_charset='us-ascii'): + if not isinstance(value, tuple) or len(value) != 3: + return unquote(value) + # While value comes to us as a unicode string, we need it to be a bytes + # object. We do not want bytes() normal utf-8 decoder, we want a straight + # interpretation of the string as character bytes. + charset, language, text = value + rawbytes = bytes(text, 'raw-unicode-escape') + try: + return str(rawbytes, charset, errors) + except LookupError: + # charset is not a known codec. + return unquote(text) + + +# +# datetime doesn't provide a localtime function yet, so provide one. Code +# adapted from the patch in issue 9527. This may not be perfect, but it is +# better than not having it. +# + +def localtime(dt=None, isdst=-1): + """Return local time as an aware datetime object. + + If called without arguments, return current time. Otherwise *dt* + argument should be a datetime instance, and it is converted to the + local time zone according to the system time zone database. If *dt* is + naive (that is, dt.tzinfo is None), it is assumed to be in local time. + In this case, a positive or zero value for *isdst* causes localtime to + presume initially that summer time (for example, Daylight Saving Time) + is or is not (respectively) in effect for the specified time. A + negative value for *isdst* causes the localtime() function to attempt + to divine whether summer time is in effect for the specified time. + + """ + if dt is None: + return datetime.datetime.now(datetime.timezone.utc).astimezone() + if dt.tzinfo is not None: + return dt.astimezone() + # We have a naive datetime. Convert to a (localtime) timetuple and pass to + # system mktime together with the isdst hint. System mktime will return + # seconds since epoch. + tm = dt.timetuple()[:-1] + (isdst,) + seconds = time.mktime(tm) + localtm = time.localtime(seconds) + try: + delta = datetime.timedelta(seconds=localtm.tm_gmtoff) + tz = datetime.timezone(delta, localtm.tm_zone) + except AttributeError: + # Compute UTC offset and compare with the value implied by tm_isdst. + # If the values match, use the zone name implied by tm_isdst. + delta = dt - datetime.datetime(*time.gmtime(seconds)[:6]) + dst = time.daylight and localtm.tm_isdst > 0 + gmtoff = -(time.altzone if dst else time.timezone) + if delta == datetime.timedelta(seconds=gmtoff): + tz = datetime.timezone(delta, time.tzname[dst]) + else: + tz = datetime.timezone(delta) + return dt.replace(tzinfo=tz) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/html/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/html/__init__.py new file mode 100644 index 0000000..58e133f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/html/__init__.py @@ -0,0 +1,27 @@ +""" +General functions for HTML manipulation, backported from Py3. + +Note that this uses Python 2.7 code with the corresponding Python 3 +module names and locations. +""" + +from __future__ import unicode_literals + + +_escape_map = {ord('&'): '&', ord('<'): '<', ord('>'): '>'} +_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>', + ord('"'): '"', ord('\''): '''} + +# NB: this is a candidate for a bytes/string polymorphic interface + +def escape(s, quote=True): + """ + Replace special characters "&", "<" and ">" to HTML-safe sequences. + If the optional flag quote is true (the default), the quotation mark + characters, both double quote (") and single quote (') characters are also + translated. + """ + assert not isinstance(s, bytes), 'Pass a unicode string' + if quote: + return s.translate(_escape_map_full) + return s.translate(_escape_map) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..704a5ff Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/entities.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/entities.cpython-36.pyc new file mode 100644 index 0000000..8abd996 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/entities.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/parser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/parser.cpython-36.pyc new file mode 100644 index 0000000..507253b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/html/__pycache__/parser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/html/entities.py b/minor_project/lib/python3.6/site-packages/future/backports/html/entities.py new file mode 100644 index 0000000..5c73f69 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/html/entities.py @@ -0,0 +1,2514 @@ +"""HTML character entity references. + +Backported for python-future from Python 3.3 +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import * + + +# maps the HTML entity name to the Unicode codepoint +name2codepoint = { + 'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1 + 'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1 + 'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1 + 'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1 + 'Alpha': 0x0391, # greek capital letter alpha, U+0391 + 'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1 + 'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1 + 'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1 + 'Beta': 0x0392, # greek capital letter beta, U+0392 + 'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1 + 'Chi': 0x03a7, # greek capital letter chi, U+03A7 + 'Dagger': 0x2021, # double dagger, U+2021 ISOpub + 'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3 + 'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1 + 'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1 + 'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1 + 'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1 + 'Epsilon': 0x0395, # greek capital letter epsilon, U+0395 + 'Eta': 0x0397, # greek capital letter eta, U+0397 + 'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1 + 'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3 + 'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1 + 'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1 + 'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1 + 'Iota': 0x0399, # greek capital letter iota, U+0399 + 'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1 + 'Kappa': 0x039a, # greek capital letter kappa, U+039A + 'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3 + 'Mu': 0x039c, # greek capital letter mu, U+039C + 'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1 + 'Nu': 0x039d, # greek capital letter nu, U+039D + 'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2 + 'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1 + 'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1 + 'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1 + 'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3 + 'Omicron': 0x039f, # greek capital letter omicron, U+039F + 'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1 + 'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1 + 'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1 + 'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3 + 'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3 + 'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech + 'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3 + 'Rho': 0x03a1, # greek capital letter rho, U+03A1 + 'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2 + 'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3 + 'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1 + 'Tau': 0x03a4, # greek capital letter tau, U+03A4 + 'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3 + 'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1 + 'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1 + 'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1 + 'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3 + 'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1 + 'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3 + 'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1 + 'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2 + 'Zeta': 0x0396, # greek capital letter zeta, U+0396 + 'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1 + 'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1 + 'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia + 'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1 + 'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1 + 'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW + 'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3 + 'amp': 0x0026, # ampersand, U+0026 ISOnum + 'and': 0x2227, # logical and = wedge, U+2227 ISOtech + 'ang': 0x2220, # angle, U+2220 ISOamso + 'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1 + 'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr + 'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1 + 'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1 + 'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW + 'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3 + 'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum + 'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub + 'cap': 0x2229, # intersection = cap, U+2229 ISOtech + 'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1 + 'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia + 'cent': 0x00a2, # cent sign, U+00A2 ISOnum + 'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3 + 'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub + 'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub + 'cong': 0x2245, # approximately equal to, U+2245 ISOtech + 'copy': 0x00a9, # copyright sign, U+00A9 ISOnum + 'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW + 'cup': 0x222a, # union = cup, U+222A ISOtech + 'curren': 0x00a4, # currency sign, U+00A4 ISOnum + 'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa + 'dagger': 0x2020, # dagger, U+2020 ISOpub + 'darr': 0x2193, # downwards arrow, U+2193 ISOnum + 'deg': 0x00b0, # degree sign, U+00B0 ISOnum + 'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3 + 'diams': 0x2666, # black diamond suit, U+2666 ISOpub + 'divide': 0x00f7, # division sign, U+00F7 ISOnum + 'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1 + 'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1 + 'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1 + 'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso + 'emsp': 0x2003, # em space, U+2003 ISOpub + 'ensp': 0x2002, # en space, U+2002 ISOpub + 'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3 + 'equiv': 0x2261, # identical to, U+2261 ISOtech + 'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3 + 'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1 + 'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1 + 'euro': 0x20ac, # euro sign, U+20AC NEW + 'exist': 0x2203, # there exists, U+2203 ISOtech + 'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech + 'forall': 0x2200, # for all, U+2200 ISOtech + 'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum + 'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum + 'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum + 'frasl': 0x2044, # fraction slash, U+2044 NEW + 'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3 + 'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech + 'gt': 0x003e, # greater-than sign, U+003E ISOnum + 'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa + 'harr': 0x2194, # left right arrow, U+2194 ISOamsa + 'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub + 'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub + 'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1 + 'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1 + 'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum + 'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1 + 'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso + 'infin': 0x221e, # infinity, U+221E ISOtech + 'int': 0x222b, # integral, U+222B ISOtech + 'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3 + 'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum + 'isin': 0x2208, # element of, U+2208 ISOtech + 'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1 + 'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3 + 'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech + 'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3 + 'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech + 'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum + 'larr': 0x2190, # leftwards arrow, U+2190 ISOnum + 'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc + 'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum + 'le': 0x2264, # less-than or equal to, U+2264 ISOtech + 'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc + 'lowast': 0x2217, # asterisk operator, U+2217 ISOtech + 'loz': 0x25ca, # lozenge, U+25CA ISOpub + 'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070 + 'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed + 'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum + 'lt': 0x003c, # less-than sign, U+003C ISOnum + 'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia + 'mdash': 0x2014, # em dash, U+2014 ISOpub + 'micro': 0x00b5, # micro sign, U+00B5 ISOnum + 'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum + 'minus': 0x2212, # minus sign, U+2212 ISOtech + 'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3 + 'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech + 'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum + 'ndash': 0x2013, # en dash, U+2013 ISOpub + 'ne': 0x2260, # not equal to, U+2260 ISOtech + 'ni': 0x220b, # contains as member, U+220B ISOtech + 'not': 0x00ac, # not sign, U+00AC ISOnum + 'notin': 0x2209, # not an element of, U+2209 ISOtech + 'nsub': 0x2284, # not a subset of, U+2284 ISOamsn + 'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1 + 'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3 + 'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1 + 'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1 + 'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2 + 'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1 + 'oline': 0x203e, # overline = spacing overscore, U+203E NEW + 'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3 + 'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW + 'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb + 'or': 0x2228, # logical or = vee, U+2228 ISOtech + 'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum + 'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum + 'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1 + 'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1 + 'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb + 'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1 + 'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum + 'part': 0x2202, # partial differential, U+2202 ISOtech + 'permil': 0x2030, # per mille sign, U+2030 ISOtech + 'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech + 'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3 + 'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3 + 'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3 + 'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum + 'pound': 0x00a3, # pound sign, U+00A3 ISOnum + 'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech + 'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb + 'prop': 0x221d, # proportional to, U+221D ISOtech + 'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3 + 'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum + 'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech + 'radic': 0x221a, # square root = radical sign, U+221A ISOtech + 'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech + 'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum + 'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum + 'rceil': 0x2309, # right ceiling, U+2309 ISOamsc + 'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum + 'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso + 'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum + 'rfloor': 0x230b, # right floor, U+230B ISOamsc + 'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3 + 'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070 + 'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed + 'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum + 'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW + 'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2 + 'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb + 'sect': 0x00a7, # section sign, U+00A7 ISOnum + 'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum + 'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3 + 'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3 + 'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech + 'spades': 0x2660, # black spade suit, U+2660 ISOpub + 'sub': 0x2282, # subset of, U+2282 ISOtech + 'sube': 0x2286, # subset of or equal to, U+2286 ISOtech + 'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb + 'sup': 0x2283, # superset of, U+2283 ISOtech + 'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum + 'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum + 'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum + 'supe': 0x2287, # superset of or equal to, U+2287 ISOtech + 'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1 + 'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3 + 'there4': 0x2234, # therefore, U+2234 ISOtech + 'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3 + 'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW + 'thinsp': 0x2009, # thin space, U+2009 ISOpub + 'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1 + 'tilde': 0x02dc, # small tilde, U+02DC ISOdia + 'times': 0x00d7, # multiplication sign, U+00D7 ISOnum + 'trade': 0x2122, # trade mark sign, U+2122 ISOnum + 'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa + 'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1 + 'uarr': 0x2191, # upwards arrow, U+2191 ISOnum + 'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1 + 'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1 + 'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia + 'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW + 'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3 + 'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1 + 'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso + 'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3 + 'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1 + 'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum + 'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1 + 'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3 + 'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070 + 'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070 +} + + +# maps the HTML5 named character references to the equivalent Unicode character(s) +html5 = { + 'Aacute': '\xc1', + 'aacute': '\xe1', + 'Aacute;': '\xc1', + 'aacute;': '\xe1', + 'Abreve;': '\u0102', + 'abreve;': '\u0103', + 'ac;': '\u223e', + 'acd;': '\u223f', + 'acE;': '\u223e\u0333', + 'Acirc': '\xc2', + 'acirc': '\xe2', + 'Acirc;': '\xc2', + 'acirc;': '\xe2', + 'acute': '\xb4', + 'acute;': '\xb4', + 'Acy;': '\u0410', + 'acy;': '\u0430', + 'AElig': '\xc6', + 'aelig': '\xe6', + 'AElig;': '\xc6', + 'aelig;': '\xe6', + 'af;': '\u2061', + 'Afr;': '\U0001d504', + 'afr;': '\U0001d51e', + 'Agrave': '\xc0', + 'agrave': '\xe0', + 'Agrave;': '\xc0', + 'agrave;': '\xe0', + 'alefsym;': '\u2135', + 'aleph;': '\u2135', + 'Alpha;': '\u0391', + 'alpha;': '\u03b1', + 'Amacr;': '\u0100', + 'amacr;': '\u0101', + 'amalg;': '\u2a3f', + 'AMP': '&', + 'amp': '&', + 'AMP;': '&', + 'amp;': '&', + 'And;': '\u2a53', + 'and;': '\u2227', + 'andand;': '\u2a55', + 'andd;': '\u2a5c', + 'andslope;': '\u2a58', + 'andv;': '\u2a5a', + 'ang;': '\u2220', + 'ange;': '\u29a4', + 'angle;': '\u2220', + 'angmsd;': '\u2221', + 'angmsdaa;': '\u29a8', + 'angmsdab;': '\u29a9', + 'angmsdac;': '\u29aa', + 'angmsdad;': '\u29ab', + 'angmsdae;': '\u29ac', + 'angmsdaf;': '\u29ad', + 'angmsdag;': '\u29ae', + 'angmsdah;': '\u29af', + 'angrt;': '\u221f', + 'angrtvb;': '\u22be', + 'angrtvbd;': '\u299d', + 'angsph;': '\u2222', + 'angst;': '\xc5', + 'angzarr;': '\u237c', + 'Aogon;': '\u0104', + 'aogon;': '\u0105', + 'Aopf;': '\U0001d538', + 'aopf;': '\U0001d552', + 'ap;': '\u2248', + 'apacir;': '\u2a6f', + 'apE;': '\u2a70', + 'ape;': '\u224a', + 'apid;': '\u224b', + 'apos;': "'", + 'ApplyFunction;': '\u2061', + 'approx;': '\u2248', + 'approxeq;': '\u224a', + 'Aring': '\xc5', + 'aring': '\xe5', + 'Aring;': '\xc5', + 'aring;': '\xe5', + 'Ascr;': '\U0001d49c', + 'ascr;': '\U0001d4b6', + 'Assign;': '\u2254', + 'ast;': '*', + 'asymp;': '\u2248', + 'asympeq;': '\u224d', + 'Atilde': '\xc3', + 'atilde': '\xe3', + 'Atilde;': '\xc3', + 'atilde;': '\xe3', + 'Auml': '\xc4', + 'auml': '\xe4', + 'Auml;': '\xc4', + 'auml;': '\xe4', + 'awconint;': '\u2233', + 'awint;': '\u2a11', + 'backcong;': '\u224c', + 'backepsilon;': '\u03f6', + 'backprime;': '\u2035', + 'backsim;': '\u223d', + 'backsimeq;': '\u22cd', + 'Backslash;': '\u2216', + 'Barv;': '\u2ae7', + 'barvee;': '\u22bd', + 'Barwed;': '\u2306', + 'barwed;': '\u2305', + 'barwedge;': '\u2305', + 'bbrk;': '\u23b5', + 'bbrktbrk;': '\u23b6', + 'bcong;': '\u224c', + 'Bcy;': '\u0411', + 'bcy;': '\u0431', + 'bdquo;': '\u201e', + 'becaus;': '\u2235', + 'Because;': '\u2235', + 'because;': '\u2235', + 'bemptyv;': '\u29b0', + 'bepsi;': '\u03f6', + 'bernou;': '\u212c', + 'Bernoullis;': '\u212c', + 'Beta;': '\u0392', + 'beta;': '\u03b2', + 'beth;': '\u2136', + 'between;': '\u226c', + 'Bfr;': '\U0001d505', + 'bfr;': '\U0001d51f', + 'bigcap;': '\u22c2', + 'bigcirc;': '\u25ef', + 'bigcup;': '\u22c3', + 'bigodot;': '\u2a00', + 'bigoplus;': '\u2a01', + 'bigotimes;': '\u2a02', + 'bigsqcup;': '\u2a06', + 'bigstar;': '\u2605', + 'bigtriangledown;': '\u25bd', + 'bigtriangleup;': '\u25b3', + 'biguplus;': '\u2a04', + 'bigvee;': '\u22c1', + 'bigwedge;': '\u22c0', + 'bkarow;': '\u290d', + 'blacklozenge;': '\u29eb', + 'blacksquare;': '\u25aa', + 'blacktriangle;': '\u25b4', + 'blacktriangledown;': '\u25be', + 'blacktriangleleft;': '\u25c2', + 'blacktriangleright;': '\u25b8', + 'blank;': '\u2423', + 'blk12;': '\u2592', + 'blk14;': '\u2591', + 'blk34;': '\u2593', + 'block;': '\u2588', + 'bne;': '=\u20e5', + 'bnequiv;': '\u2261\u20e5', + 'bNot;': '\u2aed', + 'bnot;': '\u2310', + 'Bopf;': '\U0001d539', + 'bopf;': '\U0001d553', + 'bot;': '\u22a5', + 'bottom;': '\u22a5', + 'bowtie;': '\u22c8', + 'boxbox;': '\u29c9', + 'boxDL;': '\u2557', + 'boxDl;': '\u2556', + 'boxdL;': '\u2555', + 'boxdl;': '\u2510', + 'boxDR;': '\u2554', + 'boxDr;': '\u2553', + 'boxdR;': '\u2552', + 'boxdr;': '\u250c', + 'boxH;': '\u2550', + 'boxh;': '\u2500', + 'boxHD;': '\u2566', + 'boxHd;': '\u2564', + 'boxhD;': '\u2565', + 'boxhd;': '\u252c', + 'boxHU;': '\u2569', + 'boxHu;': '\u2567', + 'boxhU;': '\u2568', + 'boxhu;': '\u2534', + 'boxminus;': '\u229f', + 'boxplus;': '\u229e', + 'boxtimes;': '\u22a0', + 'boxUL;': '\u255d', + 'boxUl;': '\u255c', + 'boxuL;': '\u255b', + 'boxul;': '\u2518', + 'boxUR;': '\u255a', + 'boxUr;': '\u2559', + 'boxuR;': '\u2558', + 'boxur;': '\u2514', + 'boxV;': '\u2551', + 'boxv;': '\u2502', + 'boxVH;': '\u256c', + 'boxVh;': '\u256b', + 'boxvH;': '\u256a', + 'boxvh;': '\u253c', + 'boxVL;': '\u2563', + 'boxVl;': '\u2562', + 'boxvL;': '\u2561', + 'boxvl;': '\u2524', + 'boxVR;': '\u2560', + 'boxVr;': '\u255f', + 'boxvR;': '\u255e', + 'boxvr;': '\u251c', + 'bprime;': '\u2035', + 'Breve;': '\u02d8', + 'breve;': '\u02d8', + 'brvbar': '\xa6', + 'brvbar;': '\xa6', + 'Bscr;': '\u212c', + 'bscr;': '\U0001d4b7', + 'bsemi;': '\u204f', + 'bsim;': '\u223d', + 'bsime;': '\u22cd', + 'bsol;': '\\', + 'bsolb;': '\u29c5', + 'bsolhsub;': '\u27c8', + 'bull;': '\u2022', + 'bullet;': '\u2022', + 'bump;': '\u224e', + 'bumpE;': '\u2aae', + 'bumpe;': '\u224f', + 'Bumpeq;': '\u224e', + 'bumpeq;': '\u224f', + 'Cacute;': '\u0106', + 'cacute;': '\u0107', + 'Cap;': '\u22d2', + 'cap;': '\u2229', + 'capand;': '\u2a44', + 'capbrcup;': '\u2a49', + 'capcap;': '\u2a4b', + 'capcup;': '\u2a47', + 'capdot;': '\u2a40', + 'CapitalDifferentialD;': '\u2145', + 'caps;': '\u2229\ufe00', + 'caret;': '\u2041', + 'caron;': '\u02c7', + 'Cayleys;': '\u212d', + 'ccaps;': '\u2a4d', + 'Ccaron;': '\u010c', + 'ccaron;': '\u010d', + 'Ccedil': '\xc7', + 'ccedil': '\xe7', + 'Ccedil;': '\xc7', + 'ccedil;': '\xe7', + 'Ccirc;': '\u0108', + 'ccirc;': '\u0109', + 'Cconint;': '\u2230', + 'ccups;': '\u2a4c', + 'ccupssm;': '\u2a50', + 'Cdot;': '\u010a', + 'cdot;': '\u010b', + 'cedil': '\xb8', + 'cedil;': '\xb8', + 'Cedilla;': '\xb8', + 'cemptyv;': '\u29b2', + 'cent': '\xa2', + 'cent;': '\xa2', + 'CenterDot;': '\xb7', + 'centerdot;': '\xb7', + 'Cfr;': '\u212d', + 'cfr;': '\U0001d520', + 'CHcy;': '\u0427', + 'chcy;': '\u0447', + 'check;': '\u2713', + 'checkmark;': '\u2713', + 'Chi;': '\u03a7', + 'chi;': '\u03c7', + 'cir;': '\u25cb', + 'circ;': '\u02c6', + 'circeq;': '\u2257', + 'circlearrowleft;': '\u21ba', + 'circlearrowright;': '\u21bb', + 'circledast;': '\u229b', + 'circledcirc;': '\u229a', + 'circleddash;': '\u229d', + 'CircleDot;': '\u2299', + 'circledR;': '\xae', + 'circledS;': '\u24c8', + 'CircleMinus;': '\u2296', + 'CirclePlus;': '\u2295', + 'CircleTimes;': '\u2297', + 'cirE;': '\u29c3', + 'cire;': '\u2257', + 'cirfnint;': '\u2a10', + 'cirmid;': '\u2aef', + 'cirscir;': '\u29c2', + 'ClockwiseContourIntegral;': '\u2232', + 'CloseCurlyDoubleQuote;': '\u201d', + 'CloseCurlyQuote;': '\u2019', + 'clubs;': '\u2663', + 'clubsuit;': '\u2663', + 'Colon;': '\u2237', + 'colon;': ':', + 'Colone;': '\u2a74', + 'colone;': '\u2254', + 'coloneq;': '\u2254', + 'comma;': ',', + 'commat;': '@', + 'comp;': '\u2201', + 'compfn;': '\u2218', + 'complement;': '\u2201', + 'complexes;': '\u2102', + 'cong;': '\u2245', + 'congdot;': '\u2a6d', + 'Congruent;': '\u2261', + 'Conint;': '\u222f', + 'conint;': '\u222e', + 'ContourIntegral;': '\u222e', + 'Copf;': '\u2102', + 'copf;': '\U0001d554', + 'coprod;': '\u2210', + 'Coproduct;': '\u2210', + 'COPY': '\xa9', + 'copy': '\xa9', + 'COPY;': '\xa9', + 'copy;': '\xa9', + 'copysr;': '\u2117', + 'CounterClockwiseContourIntegral;': '\u2233', + 'crarr;': '\u21b5', + 'Cross;': '\u2a2f', + 'cross;': '\u2717', + 'Cscr;': '\U0001d49e', + 'cscr;': '\U0001d4b8', + 'csub;': '\u2acf', + 'csube;': '\u2ad1', + 'csup;': '\u2ad0', + 'csupe;': '\u2ad2', + 'ctdot;': '\u22ef', + 'cudarrl;': '\u2938', + 'cudarrr;': '\u2935', + 'cuepr;': '\u22de', + 'cuesc;': '\u22df', + 'cularr;': '\u21b6', + 'cularrp;': '\u293d', + 'Cup;': '\u22d3', + 'cup;': '\u222a', + 'cupbrcap;': '\u2a48', + 'CupCap;': '\u224d', + 'cupcap;': '\u2a46', + 'cupcup;': '\u2a4a', + 'cupdot;': '\u228d', + 'cupor;': '\u2a45', + 'cups;': '\u222a\ufe00', + 'curarr;': '\u21b7', + 'curarrm;': '\u293c', + 'curlyeqprec;': '\u22de', + 'curlyeqsucc;': '\u22df', + 'curlyvee;': '\u22ce', + 'curlywedge;': '\u22cf', + 'curren': '\xa4', + 'curren;': '\xa4', + 'curvearrowleft;': '\u21b6', + 'curvearrowright;': '\u21b7', + 'cuvee;': '\u22ce', + 'cuwed;': '\u22cf', + 'cwconint;': '\u2232', + 'cwint;': '\u2231', + 'cylcty;': '\u232d', + 'Dagger;': '\u2021', + 'dagger;': '\u2020', + 'daleth;': '\u2138', + 'Darr;': '\u21a1', + 'dArr;': '\u21d3', + 'darr;': '\u2193', + 'dash;': '\u2010', + 'Dashv;': '\u2ae4', + 'dashv;': '\u22a3', + 'dbkarow;': '\u290f', + 'dblac;': '\u02dd', + 'Dcaron;': '\u010e', + 'dcaron;': '\u010f', + 'Dcy;': '\u0414', + 'dcy;': '\u0434', + 'DD;': '\u2145', + 'dd;': '\u2146', + 'ddagger;': '\u2021', + 'ddarr;': '\u21ca', + 'DDotrahd;': '\u2911', + 'ddotseq;': '\u2a77', + 'deg': '\xb0', + 'deg;': '\xb0', + 'Del;': '\u2207', + 'Delta;': '\u0394', + 'delta;': '\u03b4', + 'demptyv;': '\u29b1', + 'dfisht;': '\u297f', + 'Dfr;': '\U0001d507', + 'dfr;': '\U0001d521', + 'dHar;': '\u2965', + 'dharl;': '\u21c3', + 'dharr;': '\u21c2', + 'DiacriticalAcute;': '\xb4', + 'DiacriticalDot;': '\u02d9', + 'DiacriticalDoubleAcute;': '\u02dd', + 'DiacriticalGrave;': '`', + 'DiacriticalTilde;': '\u02dc', + 'diam;': '\u22c4', + 'Diamond;': '\u22c4', + 'diamond;': '\u22c4', + 'diamondsuit;': '\u2666', + 'diams;': '\u2666', + 'die;': '\xa8', + 'DifferentialD;': '\u2146', + 'digamma;': '\u03dd', + 'disin;': '\u22f2', + 'div;': '\xf7', + 'divide': '\xf7', + 'divide;': '\xf7', + 'divideontimes;': '\u22c7', + 'divonx;': '\u22c7', + 'DJcy;': '\u0402', + 'djcy;': '\u0452', + 'dlcorn;': '\u231e', + 'dlcrop;': '\u230d', + 'dollar;': '$', + 'Dopf;': '\U0001d53b', + 'dopf;': '\U0001d555', + 'Dot;': '\xa8', + 'dot;': '\u02d9', + 'DotDot;': '\u20dc', + 'doteq;': '\u2250', + 'doteqdot;': '\u2251', + 'DotEqual;': '\u2250', + 'dotminus;': '\u2238', + 'dotplus;': '\u2214', + 'dotsquare;': '\u22a1', + 'doublebarwedge;': '\u2306', + 'DoubleContourIntegral;': '\u222f', + 'DoubleDot;': '\xa8', + 'DoubleDownArrow;': '\u21d3', + 'DoubleLeftArrow;': '\u21d0', + 'DoubleLeftRightArrow;': '\u21d4', + 'DoubleLeftTee;': '\u2ae4', + 'DoubleLongLeftArrow;': '\u27f8', + 'DoubleLongLeftRightArrow;': '\u27fa', + 'DoubleLongRightArrow;': '\u27f9', + 'DoubleRightArrow;': '\u21d2', + 'DoubleRightTee;': '\u22a8', + 'DoubleUpArrow;': '\u21d1', + 'DoubleUpDownArrow;': '\u21d5', + 'DoubleVerticalBar;': '\u2225', + 'DownArrow;': '\u2193', + 'Downarrow;': '\u21d3', + 'downarrow;': '\u2193', + 'DownArrowBar;': '\u2913', + 'DownArrowUpArrow;': '\u21f5', + 'DownBreve;': '\u0311', + 'downdownarrows;': '\u21ca', + 'downharpoonleft;': '\u21c3', + 'downharpoonright;': '\u21c2', + 'DownLeftRightVector;': '\u2950', + 'DownLeftTeeVector;': '\u295e', + 'DownLeftVector;': '\u21bd', + 'DownLeftVectorBar;': '\u2956', + 'DownRightTeeVector;': '\u295f', + 'DownRightVector;': '\u21c1', + 'DownRightVectorBar;': '\u2957', + 'DownTee;': '\u22a4', + 'DownTeeArrow;': '\u21a7', + 'drbkarow;': '\u2910', + 'drcorn;': '\u231f', + 'drcrop;': '\u230c', + 'Dscr;': '\U0001d49f', + 'dscr;': '\U0001d4b9', + 'DScy;': '\u0405', + 'dscy;': '\u0455', + 'dsol;': '\u29f6', + 'Dstrok;': '\u0110', + 'dstrok;': '\u0111', + 'dtdot;': '\u22f1', + 'dtri;': '\u25bf', + 'dtrif;': '\u25be', + 'duarr;': '\u21f5', + 'duhar;': '\u296f', + 'dwangle;': '\u29a6', + 'DZcy;': '\u040f', + 'dzcy;': '\u045f', + 'dzigrarr;': '\u27ff', + 'Eacute': '\xc9', + 'eacute': '\xe9', + 'Eacute;': '\xc9', + 'eacute;': '\xe9', + 'easter;': '\u2a6e', + 'Ecaron;': '\u011a', + 'ecaron;': '\u011b', + 'ecir;': '\u2256', + 'Ecirc': '\xca', + 'ecirc': '\xea', + 'Ecirc;': '\xca', + 'ecirc;': '\xea', + 'ecolon;': '\u2255', + 'Ecy;': '\u042d', + 'ecy;': '\u044d', + 'eDDot;': '\u2a77', + 'Edot;': '\u0116', + 'eDot;': '\u2251', + 'edot;': '\u0117', + 'ee;': '\u2147', + 'efDot;': '\u2252', + 'Efr;': '\U0001d508', + 'efr;': '\U0001d522', + 'eg;': '\u2a9a', + 'Egrave': '\xc8', + 'egrave': '\xe8', + 'Egrave;': '\xc8', + 'egrave;': '\xe8', + 'egs;': '\u2a96', + 'egsdot;': '\u2a98', + 'el;': '\u2a99', + 'Element;': '\u2208', + 'elinters;': '\u23e7', + 'ell;': '\u2113', + 'els;': '\u2a95', + 'elsdot;': '\u2a97', + 'Emacr;': '\u0112', + 'emacr;': '\u0113', + 'empty;': '\u2205', + 'emptyset;': '\u2205', + 'EmptySmallSquare;': '\u25fb', + 'emptyv;': '\u2205', + 'EmptyVerySmallSquare;': '\u25ab', + 'emsp13;': '\u2004', + 'emsp14;': '\u2005', + 'emsp;': '\u2003', + 'ENG;': '\u014a', + 'eng;': '\u014b', + 'ensp;': '\u2002', + 'Eogon;': '\u0118', + 'eogon;': '\u0119', + 'Eopf;': '\U0001d53c', + 'eopf;': '\U0001d556', + 'epar;': '\u22d5', + 'eparsl;': '\u29e3', + 'eplus;': '\u2a71', + 'epsi;': '\u03b5', + 'Epsilon;': '\u0395', + 'epsilon;': '\u03b5', + 'epsiv;': '\u03f5', + 'eqcirc;': '\u2256', + 'eqcolon;': '\u2255', + 'eqsim;': '\u2242', + 'eqslantgtr;': '\u2a96', + 'eqslantless;': '\u2a95', + 'Equal;': '\u2a75', + 'equals;': '=', + 'EqualTilde;': '\u2242', + 'equest;': '\u225f', + 'Equilibrium;': '\u21cc', + 'equiv;': '\u2261', + 'equivDD;': '\u2a78', + 'eqvparsl;': '\u29e5', + 'erarr;': '\u2971', + 'erDot;': '\u2253', + 'Escr;': '\u2130', + 'escr;': '\u212f', + 'esdot;': '\u2250', + 'Esim;': '\u2a73', + 'esim;': '\u2242', + 'Eta;': '\u0397', + 'eta;': '\u03b7', + 'ETH': '\xd0', + 'eth': '\xf0', + 'ETH;': '\xd0', + 'eth;': '\xf0', + 'Euml': '\xcb', + 'euml': '\xeb', + 'Euml;': '\xcb', + 'euml;': '\xeb', + 'euro;': '\u20ac', + 'excl;': '!', + 'exist;': '\u2203', + 'Exists;': '\u2203', + 'expectation;': '\u2130', + 'ExponentialE;': '\u2147', + 'exponentiale;': '\u2147', + 'fallingdotseq;': '\u2252', + 'Fcy;': '\u0424', + 'fcy;': '\u0444', + 'female;': '\u2640', + 'ffilig;': '\ufb03', + 'fflig;': '\ufb00', + 'ffllig;': '\ufb04', + 'Ffr;': '\U0001d509', + 'ffr;': '\U0001d523', + 'filig;': '\ufb01', + 'FilledSmallSquare;': '\u25fc', + 'FilledVerySmallSquare;': '\u25aa', + 'fjlig;': 'fj', + 'flat;': '\u266d', + 'fllig;': '\ufb02', + 'fltns;': '\u25b1', + 'fnof;': '\u0192', + 'Fopf;': '\U0001d53d', + 'fopf;': '\U0001d557', + 'ForAll;': '\u2200', + 'forall;': '\u2200', + 'fork;': '\u22d4', + 'forkv;': '\u2ad9', + 'Fouriertrf;': '\u2131', + 'fpartint;': '\u2a0d', + 'frac12': '\xbd', + 'frac12;': '\xbd', + 'frac13;': '\u2153', + 'frac14': '\xbc', + 'frac14;': '\xbc', + 'frac15;': '\u2155', + 'frac16;': '\u2159', + 'frac18;': '\u215b', + 'frac23;': '\u2154', + 'frac25;': '\u2156', + 'frac34': '\xbe', + 'frac34;': '\xbe', + 'frac35;': '\u2157', + 'frac38;': '\u215c', + 'frac45;': '\u2158', + 'frac56;': '\u215a', + 'frac58;': '\u215d', + 'frac78;': '\u215e', + 'frasl;': '\u2044', + 'frown;': '\u2322', + 'Fscr;': '\u2131', + 'fscr;': '\U0001d4bb', + 'gacute;': '\u01f5', + 'Gamma;': '\u0393', + 'gamma;': '\u03b3', + 'Gammad;': '\u03dc', + 'gammad;': '\u03dd', + 'gap;': '\u2a86', + 'Gbreve;': '\u011e', + 'gbreve;': '\u011f', + 'Gcedil;': '\u0122', + 'Gcirc;': '\u011c', + 'gcirc;': '\u011d', + 'Gcy;': '\u0413', + 'gcy;': '\u0433', + 'Gdot;': '\u0120', + 'gdot;': '\u0121', + 'gE;': '\u2267', + 'ge;': '\u2265', + 'gEl;': '\u2a8c', + 'gel;': '\u22db', + 'geq;': '\u2265', + 'geqq;': '\u2267', + 'geqslant;': '\u2a7e', + 'ges;': '\u2a7e', + 'gescc;': '\u2aa9', + 'gesdot;': '\u2a80', + 'gesdoto;': '\u2a82', + 'gesdotol;': '\u2a84', + 'gesl;': '\u22db\ufe00', + 'gesles;': '\u2a94', + 'Gfr;': '\U0001d50a', + 'gfr;': '\U0001d524', + 'Gg;': '\u22d9', + 'gg;': '\u226b', + 'ggg;': '\u22d9', + 'gimel;': '\u2137', + 'GJcy;': '\u0403', + 'gjcy;': '\u0453', + 'gl;': '\u2277', + 'gla;': '\u2aa5', + 'glE;': '\u2a92', + 'glj;': '\u2aa4', + 'gnap;': '\u2a8a', + 'gnapprox;': '\u2a8a', + 'gnE;': '\u2269', + 'gne;': '\u2a88', + 'gneq;': '\u2a88', + 'gneqq;': '\u2269', + 'gnsim;': '\u22e7', + 'Gopf;': '\U0001d53e', + 'gopf;': '\U0001d558', + 'grave;': '`', + 'GreaterEqual;': '\u2265', + 'GreaterEqualLess;': '\u22db', + 'GreaterFullEqual;': '\u2267', + 'GreaterGreater;': '\u2aa2', + 'GreaterLess;': '\u2277', + 'GreaterSlantEqual;': '\u2a7e', + 'GreaterTilde;': '\u2273', + 'Gscr;': '\U0001d4a2', + 'gscr;': '\u210a', + 'gsim;': '\u2273', + 'gsime;': '\u2a8e', + 'gsiml;': '\u2a90', + 'GT': '>', + 'gt': '>', + 'GT;': '>', + 'Gt;': '\u226b', + 'gt;': '>', + 'gtcc;': '\u2aa7', + 'gtcir;': '\u2a7a', + 'gtdot;': '\u22d7', + 'gtlPar;': '\u2995', + 'gtquest;': '\u2a7c', + 'gtrapprox;': '\u2a86', + 'gtrarr;': '\u2978', + 'gtrdot;': '\u22d7', + 'gtreqless;': '\u22db', + 'gtreqqless;': '\u2a8c', + 'gtrless;': '\u2277', + 'gtrsim;': '\u2273', + 'gvertneqq;': '\u2269\ufe00', + 'gvnE;': '\u2269\ufe00', + 'Hacek;': '\u02c7', + 'hairsp;': '\u200a', + 'half;': '\xbd', + 'hamilt;': '\u210b', + 'HARDcy;': '\u042a', + 'hardcy;': '\u044a', + 'hArr;': '\u21d4', + 'harr;': '\u2194', + 'harrcir;': '\u2948', + 'harrw;': '\u21ad', + 'Hat;': '^', + 'hbar;': '\u210f', + 'Hcirc;': '\u0124', + 'hcirc;': '\u0125', + 'hearts;': '\u2665', + 'heartsuit;': '\u2665', + 'hellip;': '\u2026', + 'hercon;': '\u22b9', + 'Hfr;': '\u210c', + 'hfr;': '\U0001d525', + 'HilbertSpace;': '\u210b', + 'hksearow;': '\u2925', + 'hkswarow;': '\u2926', + 'hoarr;': '\u21ff', + 'homtht;': '\u223b', + 'hookleftarrow;': '\u21a9', + 'hookrightarrow;': '\u21aa', + 'Hopf;': '\u210d', + 'hopf;': '\U0001d559', + 'horbar;': '\u2015', + 'HorizontalLine;': '\u2500', + 'Hscr;': '\u210b', + 'hscr;': '\U0001d4bd', + 'hslash;': '\u210f', + 'Hstrok;': '\u0126', + 'hstrok;': '\u0127', + 'HumpDownHump;': '\u224e', + 'HumpEqual;': '\u224f', + 'hybull;': '\u2043', + 'hyphen;': '\u2010', + 'Iacute': '\xcd', + 'iacute': '\xed', + 'Iacute;': '\xcd', + 'iacute;': '\xed', + 'ic;': '\u2063', + 'Icirc': '\xce', + 'icirc': '\xee', + 'Icirc;': '\xce', + 'icirc;': '\xee', + 'Icy;': '\u0418', + 'icy;': '\u0438', + 'Idot;': '\u0130', + 'IEcy;': '\u0415', + 'iecy;': '\u0435', + 'iexcl': '\xa1', + 'iexcl;': '\xa1', + 'iff;': '\u21d4', + 'Ifr;': '\u2111', + 'ifr;': '\U0001d526', + 'Igrave': '\xcc', + 'igrave': '\xec', + 'Igrave;': '\xcc', + 'igrave;': '\xec', + 'ii;': '\u2148', + 'iiiint;': '\u2a0c', + 'iiint;': '\u222d', + 'iinfin;': '\u29dc', + 'iiota;': '\u2129', + 'IJlig;': '\u0132', + 'ijlig;': '\u0133', + 'Im;': '\u2111', + 'Imacr;': '\u012a', + 'imacr;': '\u012b', + 'image;': '\u2111', + 'ImaginaryI;': '\u2148', + 'imagline;': '\u2110', + 'imagpart;': '\u2111', + 'imath;': '\u0131', + 'imof;': '\u22b7', + 'imped;': '\u01b5', + 'Implies;': '\u21d2', + 'in;': '\u2208', + 'incare;': '\u2105', + 'infin;': '\u221e', + 'infintie;': '\u29dd', + 'inodot;': '\u0131', + 'Int;': '\u222c', + 'int;': '\u222b', + 'intcal;': '\u22ba', + 'integers;': '\u2124', + 'Integral;': '\u222b', + 'intercal;': '\u22ba', + 'Intersection;': '\u22c2', + 'intlarhk;': '\u2a17', + 'intprod;': '\u2a3c', + 'InvisibleComma;': '\u2063', + 'InvisibleTimes;': '\u2062', + 'IOcy;': '\u0401', + 'iocy;': '\u0451', + 'Iogon;': '\u012e', + 'iogon;': '\u012f', + 'Iopf;': '\U0001d540', + 'iopf;': '\U0001d55a', + 'Iota;': '\u0399', + 'iota;': '\u03b9', + 'iprod;': '\u2a3c', + 'iquest': '\xbf', + 'iquest;': '\xbf', + 'Iscr;': '\u2110', + 'iscr;': '\U0001d4be', + 'isin;': '\u2208', + 'isindot;': '\u22f5', + 'isinE;': '\u22f9', + 'isins;': '\u22f4', + 'isinsv;': '\u22f3', + 'isinv;': '\u2208', + 'it;': '\u2062', + 'Itilde;': '\u0128', + 'itilde;': '\u0129', + 'Iukcy;': '\u0406', + 'iukcy;': '\u0456', + 'Iuml': '\xcf', + 'iuml': '\xef', + 'Iuml;': '\xcf', + 'iuml;': '\xef', + 'Jcirc;': '\u0134', + 'jcirc;': '\u0135', + 'Jcy;': '\u0419', + 'jcy;': '\u0439', + 'Jfr;': '\U0001d50d', + 'jfr;': '\U0001d527', + 'jmath;': '\u0237', + 'Jopf;': '\U0001d541', + 'jopf;': '\U0001d55b', + 'Jscr;': '\U0001d4a5', + 'jscr;': '\U0001d4bf', + 'Jsercy;': '\u0408', + 'jsercy;': '\u0458', + 'Jukcy;': '\u0404', + 'jukcy;': '\u0454', + 'Kappa;': '\u039a', + 'kappa;': '\u03ba', + 'kappav;': '\u03f0', + 'Kcedil;': '\u0136', + 'kcedil;': '\u0137', + 'Kcy;': '\u041a', + 'kcy;': '\u043a', + 'Kfr;': '\U0001d50e', + 'kfr;': '\U0001d528', + 'kgreen;': '\u0138', + 'KHcy;': '\u0425', + 'khcy;': '\u0445', + 'KJcy;': '\u040c', + 'kjcy;': '\u045c', + 'Kopf;': '\U0001d542', + 'kopf;': '\U0001d55c', + 'Kscr;': '\U0001d4a6', + 'kscr;': '\U0001d4c0', + 'lAarr;': '\u21da', + 'Lacute;': '\u0139', + 'lacute;': '\u013a', + 'laemptyv;': '\u29b4', + 'lagran;': '\u2112', + 'Lambda;': '\u039b', + 'lambda;': '\u03bb', + 'Lang;': '\u27ea', + 'lang;': '\u27e8', + 'langd;': '\u2991', + 'langle;': '\u27e8', + 'lap;': '\u2a85', + 'Laplacetrf;': '\u2112', + 'laquo': '\xab', + 'laquo;': '\xab', + 'Larr;': '\u219e', + 'lArr;': '\u21d0', + 'larr;': '\u2190', + 'larrb;': '\u21e4', + 'larrbfs;': '\u291f', + 'larrfs;': '\u291d', + 'larrhk;': '\u21a9', + 'larrlp;': '\u21ab', + 'larrpl;': '\u2939', + 'larrsim;': '\u2973', + 'larrtl;': '\u21a2', + 'lat;': '\u2aab', + 'lAtail;': '\u291b', + 'latail;': '\u2919', + 'late;': '\u2aad', + 'lates;': '\u2aad\ufe00', + 'lBarr;': '\u290e', + 'lbarr;': '\u290c', + 'lbbrk;': '\u2772', + 'lbrace;': '{', + 'lbrack;': '[', + 'lbrke;': '\u298b', + 'lbrksld;': '\u298f', + 'lbrkslu;': '\u298d', + 'Lcaron;': '\u013d', + 'lcaron;': '\u013e', + 'Lcedil;': '\u013b', + 'lcedil;': '\u013c', + 'lceil;': '\u2308', + 'lcub;': '{', + 'Lcy;': '\u041b', + 'lcy;': '\u043b', + 'ldca;': '\u2936', + 'ldquo;': '\u201c', + 'ldquor;': '\u201e', + 'ldrdhar;': '\u2967', + 'ldrushar;': '\u294b', + 'ldsh;': '\u21b2', + 'lE;': '\u2266', + 'le;': '\u2264', + 'LeftAngleBracket;': '\u27e8', + 'LeftArrow;': '\u2190', + 'Leftarrow;': '\u21d0', + 'leftarrow;': '\u2190', + 'LeftArrowBar;': '\u21e4', + 'LeftArrowRightArrow;': '\u21c6', + 'leftarrowtail;': '\u21a2', + 'LeftCeiling;': '\u2308', + 'LeftDoubleBracket;': '\u27e6', + 'LeftDownTeeVector;': '\u2961', + 'LeftDownVector;': '\u21c3', + 'LeftDownVectorBar;': '\u2959', + 'LeftFloor;': '\u230a', + 'leftharpoondown;': '\u21bd', + 'leftharpoonup;': '\u21bc', + 'leftleftarrows;': '\u21c7', + 'LeftRightArrow;': '\u2194', + 'Leftrightarrow;': '\u21d4', + 'leftrightarrow;': '\u2194', + 'leftrightarrows;': '\u21c6', + 'leftrightharpoons;': '\u21cb', + 'leftrightsquigarrow;': '\u21ad', + 'LeftRightVector;': '\u294e', + 'LeftTee;': '\u22a3', + 'LeftTeeArrow;': '\u21a4', + 'LeftTeeVector;': '\u295a', + 'leftthreetimes;': '\u22cb', + 'LeftTriangle;': '\u22b2', + 'LeftTriangleBar;': '\u29cf', + 'LeftTriangleEqual;': '\u22b4', + 'LeftUpDownVector;': '\u2951', + 'LeftUpTeeVector;': '\u2960', + 'LeftUpVector;': '\u21bf', + 'LeftUpVectorBar;': '\u2958', + 'LeftVector;': '\u21bc', + 'LeftVectorBar;': '\u2952', + 'lEg;': '\u2a8b', + 'leg;': '\u22da', + 'leq;': '\u2264', + 'leqq;': '\u2266', + 'leqslant;': '\u2a7d', + 'les;': '\u2a7d', + 'lescc;': '\u2aa8', + 'lesdot;': '\u2a7f', + 'lesdoto;': '\u2a81', + 'lesdotor;': '\u2a83', + 'lesg;': '\u22da\ufe00', + 'lesges;': '\u2a93', + 'lessapprox;': '\u2a85', + 'lessdot;': '\u22d6', + 'lesseqgtr;': '\u22da', + 'lesseqqgtr;': '\u2a8b', + 'LessEqualGreater;': '\u22da', + 'LessFullEqual;': '\u2266', + 'LessGreater;': '\u2276', + 'lessgtr;': '\u2276', + 'LessLess;': '\u2aa1', + 'lesssim;': '\u2272', + 'LessSlantEqual;': '\u2a7d', + 'LessTilde;': '\u2272', + 'lfisht;': '\u297c', + 'lfloor;': '\u230a', + 'Lfr;': '\U0001d50f', + 'lfr;': '\U0001d529', + 'lg;': '\u2276', + 'lgE;': '\u2a91', + 'lHar;': '\u2962', + 'lhard;': '\u21bd', + 'lharu;': '\u21bc', + 'lharul;': '\u296a', + 'lhblk;': '\u2584', + 'LJcy;': '\u0409', + 'ljcy;': '\u0459', + 'Ll;': '\u22d8', + 'll;': '\u226a', + 'llarr;': '\u21c7', + 'llcorner;': '\u231e', + 'Lleftarrow;': '\u21da', + 'llhard;': '\u296b', + 'lltri;': '\u25fa', + 'Lmidot;': '\u013f', + 'lmidot;': '\u0140', + 'lmoust;': '\u23b0', + 'lmoustache;': '\u23b0', + 'lnap;': '\u2a89', + 'lnapprox;': '\u2a89', + 'lnE;': '\u2268', + 'lne;': '\u2a87', + 'lneq;': '\u2a87', + 'lneqq;': '\u2268', + 'lnsim;': '\u22e6', + 'loang;': '\u27ec', + 'loarr;': '\u21fd', + 'lobrk;': '\u27e6', + 'LongLeftArrow;': '\u27f5', + 'Longleftarrow;': '\u27f8', + 'longleftarrow;': '\u27f5', + 'LongLeftRightArrow;': '\u27f7', + 'Longleftrightarrow;': '\u27fa', + 'longleftrightarrow;': '\u27f7', + 'longmapsto;': '\u27fc', + 'LongRightArrow;': '\u27f6', + 'Longrightarrow;': '\u27f9', + 'longrightarrow;': '\u27f6', + 'looparrowleft;': '\u21ab', + 'looparrowright;': '\u21ac', + 'lopar;': '\u2985', + 'Lopf;': '\U0001d543', + 'lopf;': '\U0001d55d', + 'loplus;': '\u2a2d', + 'lotimes;': '\u2a34', + 'lowast;': '\u2217', + 'lowbar;': '_', + 'LowerLeftArrow;': '\u2199', + 'LowerRightArrow;': '\u2198', + 'loz;': '\u25ca', + 'lozenge;': '\u25ca', + 'lozf;': '\u29eb', + 'lpar;': '(', + 'lparlt;': '\u2993', + 'lrarr;': '\u21c6', + 'lrcorner;': '\u231f', + 'lrhar;': '\u21cb', + 'lrhard;': '\u296d', + 'lrm;': '\u200e', + 'lrtri;': '\u22bf', + 'lsaquo;': '\u2039', + 'Lscr;': '\u2112', + 'lscr;': '\U0001d4c1', + 'Lsh;': '\u21b0', + 'lsh;': '\u21b0', + 'lsim;': '\u2272', + 'lsime;': '\u2a8d', + 'lsimg;': '\u2a8f', + 'lsqb;': '[', + 'lsquo;': '\u2018', + 'lsquor;': '\u201a', + 'Lstrok;': '\u0141', + 'lstrok;': '\u0142', + 'LT': '<', + 'lt': '<', + 'LT;': '<', + 'Lt;': '\u226a', + 'lt;': '<', + 'ltcc;': '\u2aa6', + 'ltcir;': '\u2a79', + 'ltdot;': '\u22d6', + 'lthree;': '\u22cb', + 'ltimes;': '\u22c9', + 'ltlarr;': '\u2976', + 'ltquest;': '\u2a7b', + 'ltri;': '\u25c3', + 'ltrie;': '\u22b4', + 'ltrif;': '\u25c2', + 'ltrPar;': '\u2996', + 'lurdshar;': '\u294a', + 'luruhar;': '\u2966', + 'lvertneqq;': '\u2268\ufe00', + 'lvnE;': '\u2268\ufe00', + 'macr': '\xaf', + 'macr;': '\xaf', + 'male;': '\u2642', + 'malt;': '\u2720', + 'maltese;': '\u2720', + 'Map;': '\u2905', + 'map;': '\u21a6', + 'mapsto;': '\u21a6', + 'mapstodown;': '\u21a7', + 'mapstoleft;': '\u21a4', + 'mapstoup;': '\u21a5', + 'marker;': '\u25ae', + 'mcomma;': '\u2a29', + 'Mcy;': '\u041c', + 'mcy;': '\u043c', + 'mdash;': '\u2014', + 'mDDot;': '\u223a', + 'measuredangle;': '\u2221', + 'MediumSpace;': '\u205f', + 'Mellintrf;': '\u2133', + 'Mfr;': '\U0001d510', + 'mfr;': '\U0001d52a', + 'mho;': '\u2127', + 'micro': '\xb5', + 'micro;': '\xb5', + 'mid;': '\u2223', + 'midast;': '*', + 'midcir;': '\u2af0', + 'middot': '\xb7', + 'middot;': '\xb7', + 'minus;': '\u2212', + 'minusb;': '\u229f', + 'minusd;': '\u2238', + 'minusdu;': '\u2a2a', + 'MinusPlus;': '\u2213', + 'mlcp;': '\u2adb', + 'mldr;': '\u2026', + 'mnplus;': '\u2213', + 'models;': '\u22a7', + 'Mopf;': '\U0001d544', + 'mopf;': '\U0001d55e', + 'mp;': '\u2213', + 'Mscr;': '\u2133', + 'mscr;': '\U0001d4c2', + 'mstpos;': '\u223e', + 'Mu;': '\u039c', + 'mu;': '\u03bc', + 'multimap;': '\u22b8', + 'mumap;': '\u22b8', + 'nabla;': '\u2207', + 'Nacute;': '\u0143', + 'nacute;': '\u0144', + 'nang;': '\u2220\u20d2', + 'nap;': '\u2249', + 'napE;': '\u2a70\u0338', + 'napid;': '\u224b\u0338', + 'napos;': '\u0149', + 'napprox;': '\u2249', + 'natur;': '\u266e', + 'natural;': '\u266e', + 'naturals;': '\u2115', + 'nbsp': '\xa0', + 'nbsp;': '\xa0', + 'nbump;': '\u224e\u0338', + 'nbumpe;': '\u224f\u0338', + 'ncap;': '\u2a43', + 'Ncaron;': '\u0147', + 'ncaron;': '\u0148', + 'Ncedil;': '\u0145', + 'ncedil;': '\u0146', + 'ncong;': '\u2247', + 'ncongdot;': '\u2a6d\u0338', + 'ncup;': '\u2a42', + 'Ncy;': '\u041d', + 'ncy;': '\u043d', + 'ndash;': '\u2013', + 'ne;': '\u2260', + 'nearhk;': '\u2924', + 'neArr;': '\u21d7', + 'nearr;': '\u2197', + 'nearrow;': '\u2197', + 'nedot;': '\u2250\u0338', + 'NegativeMediumSpace;': '\u200b', + 'NegativeThickSpace;': '\u200b', + 'NegativeThinSpace;': '\u200b', + 'NegativeVeryThinSpace;': '\u200b', + 'nequiv;': '\u2262', + 'nesear;': '\u2928', + 'nesim;': '\u2242\u0338', + 'NestedGreaterGreater;': '\u226b', + 'NestedLessLess;': '\u226a', + 'NewLine;': '\n', + 'nexist;': '\u2204', + 'nexists;': '\u2204', + 'Nfr;': '\U0001d511', + 'nfr;': '\U0001d52b', + 'ngE;': '\u2267\u0338', + 'nge;': '\u2271', + 'ngeq;': '\u2271', + 'ngeqq;': '\u2267\u0338', + 'ngeqslant;': '\u2a7e\u0338', + 'nges;': '\u2a7e\u0338', + 'nGg;': '\u22d9\u0338', + 'ngsim;': '\u2275', + 'nGt;': '\u226b\u20d2', + 'ngt;': '\u226f', + 'ngtr;': '\u226f', + 'nGtv;': '\u226b\u0338', + 'nhArr;': '\u21ce', + 'nharr;': '\u21ae', + 'nhpar;': '\u2af2', + 'ni;': '\u220b', + 'nis;': '\u22fc', + 'nisd;': '\u22fa', + 'niv;': '\u220b', + 'NJcy;': '\u040a', + 'njcy;': '\u045a', + 'nlArr;': '\u21cd', + 'nlarr;': '\u219a', + 'nldr;': '\u2025', + 'nlE;': '\u2266\u0338', + 'nle;': '\u2270', + 'nLeftarrow;': '\u21cd', + 'nleftarrow;': '\u219a', + 'nLeftrightarrow;': '\u21ce', + 'nleftrightarrow;': '\u21ae', + 'nleq;': '\u2270', + 'nleqq;': '\u2266\u0338', + 'nleqslant;': '\u2a7d\u0338', + 'nles;': '\u2a7d\u0338', + 'nless;': '\u226e', + 'nLl;': '\u22d8\u0338', + 'nlsim;': '\u2274', + 'nLt;': '\u226a\u20d2', + 'nlt;': '\u226e', + 'nltri;': '\u22ea', + 'nltrie;': '\u22ec', + 'nLtv;': '\u226a\u0338', + 'nmid;': '\u2224', + 'NoBreak;': '\u2060', + 'NonBreakingSpace;': '\xa0', + 'Nopf;': '\u2115', + 'nopf;': '\U0001d55f', + 'not': '\xac', + 'Not;': '\u2aec', + 'not;': '\xac', + 'NotCongruent;': '\u2262', + 'NotCupCap;': '\u226d', + 'NotDoubleVerticalBar;': '\u2226', + 'NotElement;': '\u2209', + 'NotEqual;': '\u2260', + 'NotEqualTilde;': '\u2242\u0338', + 'NotExists;': '\u2204', + 'NotGreater;': '\u226f', + 'NotGreaterEqual;': '\u2271', + 'NotGreaterFullEqual;': '\u2267\u0338', + 'NotGreaterGreater;': '\u226b\u0338', + 'NotGreaterLess;': '\u2279', + 'NotGreaterSlantEqual;': '\u2a7e\u0338', + 'NotGreaterTilde;': '\u2275', + 'NotHumpDownHump;': '\u224e\u0338', + 'NotHumpEqual;': '\u224f\u0338', + 'notin;': '\u2209', + 'notindot;': '\u22f5\u0338', + 'notinE;': '\u22f9\u0338', + 'notinva;': '\u2209', + 'notinvb;': '\u22f7', + 'notinvc;': '\u22f6', + 'NotLeftTriangle;': '\u22ea', + 'NotLeftTriangleBar;': '\u29cf\u0338', + 'NotLeftTriangleEqual;': '\u22ec', + 'NotLess;': '\u226e', + 'NotLessEqual;': '\u2270', + 'NotLessGreater;': '\u2278', + 'NotLessLess;': '\u226a\u0338', + 'NotLessSlantEqual;': '\u2a7d\u0338', + 'NotLessTilde;': '\u2274', + 'NotNestedGreaterGreater;': '\u2aa2\u0338', + 'NotNestedLessLess;': '\u2aa1\u0338', + 'notni;': '\u220c', + 'notniva;': '\u220c', + 'notnivb;': '\u22fe', + 'notnivc;': '\u22fd', + 'NotPrecedes;': '\u2280', + 'NotPrecedesEqual;': '\u2aaf\u0338', + 'NotPrecedesSlantEqual;': '\u22e0', + 'NotReverseElement;': '\u220c', + 'NotRightTriangle;': '\u22eb', + 'NotRightTriangleBar;': '\u29d0\u0338', + 'NotRightTriangleEqual;': '\u22ed', + 'NotSquareSubset;': '\u228f\u0338', + 'NotSquareSubsetEqual;': '\u22e2', + 'NotSquareSuperset;': '\u2290\u0338', + 'NotSquareSupersetEqual;': '\u22e3', + 'NotSubset;': '\u2282\u20d2', + 'NotSubsetEqual;': '\u2288', + 'NotSucceeds;': '\u2281', + 'NotSucceedsEqual;': '\u2ab0\u0338', + 'NotSucceedsSlantEqual;': '\u22e1', + 'NotSucceedsTilde;': '\u227f\u0338', + 'NotSuperset;': '\u2283\u20d2', + 'NotSupersetEqual;': '\u2289', + 'NotTilde;': '\u2241', + 'NotTildeEqual;': '\u2244', + 'NotTildeFullEqual;': '\u2247', + 'NotTildeTilde;': '\u2249', + 'NotVerticalBar;': '\u2224', + 'npar;': '\u2226', + 'nparallel;': '\u2226', + 'nparsl;': '\u2afd\u20e5', + 'npart;': '\u2202\u0338', + 'npolint;': '\u2a14', + 'npr;': '\u2280', + 'nprcue;': '\u22e0', + 'npre;': '\u2aaf\u0338', + 'nprec;': '\u2280', + 'npreceq;': '\u2aaf\u0338', + 'nrArr;': '\u21cf', + 'nrarr;': '\u219b', + 'nrarrc;': '\u2933\u0338', + 'nrarrw;': '\u219d\u0338', + 'nRightarrow;': '\u21cf', + 'nrightarrow;': '\u219b', + 'nrtri;': '\u22eb', + 'nrtrie;': '\u22ed', + 'nsc;': '\u2281', + 'nsccue;': '\u22e1', + 'nsce;': '\u2ab0\u0338', + 'Nscr;': '\U0001d4a9', + 'nscr;': '\U0001d4c3', + 'nshortmid;': '\u2224', + 'nshortparallel;': '\u2226', + 'nsim;': '\u2241', + 'nsime;': '\u2244', + 'nsimeq;': '\u2244', + 'nsmid;': '\u2224', + 'nspar;': '\u2226', + 'nsqsube;': '\u22e2', + 'nsqsupe;': '\u22e3', + 'nsub;': '\u2284', + 'nsubE;': '\u2ac5\u0338', + 'nsube;': '\u2288', + 'nsubset;': '\u2282\u20d2', + 'nsubseteq;': '\u2288', + 'nsubseteqq;': '\u2ac5\u0338', + 'nsucc;': '\u2281', + 'nsucceq;': '\u2ab0\u0338', + 'nsup;': '\u2285', + 'nsupE;': '\u2ac6\u0338', + 'nsupe;': '\u2289', + 'nsupset;': '\u2283\u20d2', + 'nsupseteq;': '\u2289', + 'nsupseteqq;': '\u2ac6\u0338', + 'ntgl;': '\u2279', + 'Ntilde': '\xd1', + 'ntilde': '\xf1', + 'Ntilde;': '\xd1', + 'ntilde;': '\xf1', + 'ntlg;': '\u2278', + 'ntriangleleft;': '\u22ea', + 'ntrianglelefteq;': '\u22ec', + 'ntriangleright;': '\u22eb', + 'ntrianglerighteq;': '\u22ed', + 'Nu;': '\u039d', + 'nu;': '\u03bd', + 'num;': '#', + 'numero;': '\u2116', + 'numsp;': '\u2007', + 'nvap;': '\u224d\u20d2', + 'nVDash;': '\u22af', + 'nVdash;': '\u22ae', + 'nvDash;': '\u22ad', + 'nvdash;': '\u22ac', + 'nvge;': '\u2265\u20d2', + 'nvgt;': '>\u20d2', + 'nvHarr;': '\u2904', + 'nvinfin;': '\u29de', + 'nvlArr;': '\u2902', + 'nvle;': '\u2264\u20d2', + 'nvlt;': '<\u20d2', + 'nvltrie;': '\u22b4\u20d2', + 'nvrArr;': '\u2903', + 'nvrtrie;': '\u22b5\u20d2', + 'nvsim;': '\u223c\u20d2', + 'nwarhk;': '\u2923', + 'nwArr;': '\u21d6', + 'nwarr;': '\u2196', + 'nwarrow;': '\u2196', + 'nwnear;': '\u2927', + 'Oacute': '\xd3', + 'oacute': '\xf3', + 'Oacute;': '\xd3', + 'oacute;': '\xf3', + 'oast;': '\u229b', + 'ocir;': '\u229a', + 'Ocirc': '\xd4', + 'ocirc': '\xf4', + 'Ocirc;': '\xd4', + 'ocirc;': '\xf4', + 'Ocy;': '\u041e', + 'ocy;': '\u043e', + 'odash;': '\u229d', + 'Odblac;': '\u0150', + 'odblac;': '\u0151', + 'odiv;': '\u2a38', + 'odot;': '\u2299', + 'odsold;': '\u29bc', + 'OElig;': '\u0152', + 'oelig;': '\u0153', + 'ofcir;': '\u29bf', + 'Ofr;': '\U0001d512', + 'ofr;': '\U0001d52c', + 'ogon;': '\u02db', + 'Ograve': '\xd2', + 'ograve': '\xf2', + 'Ograve;': '\xd2', + 'ograve;': '\xf2', + 'ogt;': '\u29c1', + 'ohbar;': '\u29b5', + 'ohm;': '\u03a9', + 'oint;': '\u222e', + 'olarr;': '\u21ba', + 'olcir;': '\u29be', + 'olcross;': '\u29bb', + 'oline;': '\u203e', + 'olt;': '\u29c0', + 'Omacr;': '\u014c', + 'omacr;': '\u014d', + 'Omega;': '\u03a9', + 'omega;': '\u03c9', + 'Omicron;': '\u039f', + 'omicron;': '\u03bf', + 'omid;': '\u29b6', + 'ominus;': '\u2296', + 'Oopf;': '\U0001d546', + 'oopf;': '\U0001d560', + 'opar;': '\u29b7', + 'OpenCurlyDoubleQuote;': '\u201c', + 'OpenCurlyQuote;': '\u2018', + 'operp;': '\u29b9', + 'oplus;': '\u2295', + 'Or;': '\u2a54', + 'or;': '\u2228', + 'orarr;': '\u21bb', + 'ord;': '\u2a5d', + 'order;': '\u2134', + 'orderof;': '\u2134', + 'ordf': '\xaa', + 'ordf;': '\xaa', + 'ordm': '\xba', + 'ordm;': '\xba', + 'origof;': '\u22b6', + 'oror;': '\u2a56', + 'orslope;': '\u2a57', + 'orv;': '\u2a5b', + 'oS;': '\u24c8', + 'Oscr;': '\U0001d4aa', + 'oscr;': '\u2134', + 'Oslash': '\xd8', + 'oslash': '\xf8', + 'Oslash;': '\xd8', + 'oslash;': '\xf8', + 'osol;': '\u2298', + 'Otilde': '\xd5', + 'otilde': '\xf5', + 'Otilde;': '\xd5', + 'otilde;': '\xf5', + 'Otimes;': '\u2a37', + 'otimes;': '\u2297', + 'otimesas;': '\u2a36', + 'Ouml': '\xd6', + 'ouml': '\xf6', + 'Ouml;': '\xd6', + 'ouml;': '\xf6', + 'ovbar;': '\u233d', + 'OverBar;': '\u203e', + 'OverBrace;': '\u23de', + 'OverBracket;': '\u23b4', + 'OverParenthesis;': '\u23dc', + 'par;': '\u2225', + 'para': '\xb6', + 'para;': '\xb6', + 'parallel;': '\u2225', + 'parsim;': '\u2af3', + 'parsl;': '\u2afd', + 'part;': '\u2202', + 'PartialD;': '\u2202', + 'Pcy;': '\u041f', + 'pcy;': '\u043f', + 'percnt;': '%', + 'period;': '.', + 'permil;': '\u2030', + 'perp;': '\u22a5', + 'pertenk;': '\u2031', + 'Pfr;': '\U0001d513', + 'pfr;': '\U0001d52d', + 'Phi;': '\u03a6', + 'phi;': '\u03c6', + 'phiv;': '\u03d5', + 'phmmat;': '\u2133', + 'phone;': '\u260e', + 'Pi;': '\u03a0', + 'pi;': '\u03c0', + 'pitchfork;': '\u22d4', + 'piv;': '\u03d6', + 'planck;': '\u210f', + 'planckh;': '\u210e', + 'plankv;': '\u210f', + 'plus;': '+', + 'plusacir;': '\u2a23', + 'plusb;': '\u229e', + 'pluscir;': '\u2a22', + 'plusdo;': '\u2214', + 'plusdu;': '\u2a25', + 'pluse;': '\u2a72', + 'PlusMinus;': '\xb1', + 'plusmn': '\xb1', + 'plusmn;': '\xb1', + 'plussim;': '\u2a26', + 'plustwo;': '\u2a27', + 'pm;': '\xb1', + 'Poincareplane;': '\u210c', + 'pointint;': '\u2a15', + 'Popf;': '\u2119', + 'popf;': '\U0001d561', + 'pound': '\xa3', + 'pound;': '\xa3', + 'Pr;': '\u2abb', + 'pr;': '\u227a', + 'prap;': '\u2ab7', + 'prcue;': '\u227c', + 'prE;': '\u2ab3', + 'pre;': '\u2aaf', + 'prec;': '\u227a', + 'precapprox;': '\u2ab7', + 'preccurlyeq;': '\u227c', + 'Precedes;': '\u227a', + 'PrecedesEqual;': '\u2aaf', + 'PrecedesSlantEqual;': '\u227c', + 'PrecedesTilde;': '\u227e', + 'preceq;': '\u2aaf', + 'precnapprox;': '\u2ab9', + 'precneqq;': '\u2ab5', + 'precnsim;': '\u22e8', + 'precsim;': '\u227e', + 'Prime;': '\u2033', + 'prime;': '\u2032', + 'primes;': '\u2119', + 'prnap;': '\u2ab9', + 'prnE;': '\u2ab5', + 'prnsim;': '\u22e8', + 'prod;': '\u220f', + 'Product;': '\u220f', + 'profalar;': '\u232e', + 'profline;': '\u2312', + 'profsurf;': '\u2313', + 'prop;': '\u221d', + 'Proportion;': '\u2237', + 'Proportional;': '\u221d', + 'propto;': '\u221d', + 'prsim;': '\u227e', + 'prurel;': '\u22b0', + 'Pscr;': '\U0001d4ab', + 'pscr;': '\U0001d4c5', + 'Psi;': '\u03a8', + 'psi;': '\u03c8', + 'puncsp;': '\u2008', + 'Qfr;': '\U0001d514', + 'qfr;': '\U0001d52e', + 'qint;': '\u2a0c', + 'Qopf;': '\u211a', + 'qopf;': '\U0001d562', + 'qprime;': '\u2057', + 'Qscr;': '\U0001d4ac', + 'qscr;': '\U0001d4c6', + 'quaternions;': '\u210d', + 'quatint;': '\u2a16', + 'quest;': '?', + 'questeq;': '\u225f', + 'QUOT': '"', + 'quot': '"', + 'QUOT;': '"', + 'quot;': '"', + 'rAarr;': '\u21db', + 'race;': '\u223d\u0331', + 'Racute;': '\u0154', + 'racute;': '\u0155', + 'radic;': '\u221a', + 'raemptyv;': '\u29b3', + 'Rang;': '\u27eb', + 'rang;': '\u27e9', + 'rangd;': '\u2992', + 'range;': '\u29a5', + 'rangle;': '\u27e9', + 'raquo': '\xbb', + 'raquo;': '\xbb', + 'Rarr;': '\u21a0', + 'rArr;': '\u21d2', + 'rarr;': '\u2192', + 'rarrap;': '\u2975', + 'rarrb;': '\u21e5', + 'rarrbfs;': '\u2920', + 'rarrc;': '\u2933', + 'rarrfs;': '\u291e', + 'rarrhk;': '\u21aa', + 'rarrlp;': '\u21ac', + 'rarrpl;': '\u2945', + 'rarrsim;': '\u2974', + 'Rarrtl;': '\u2916', + 'rarrtl;': '\u21a3', + 'rarrw;': '\u219d', + 'rAtail;': '\u291c', + 'ratail;': '\u291a', + 'ratio;': '\u2236', + 'rationals;': '\u211a', + 'RBarr;': '\u2910', + 'rBarr;': '\u290f', + 'rbarr;': '\u290d', + 'rbbrk;': '\u2773', + 'rbrace;': '}', + 'rbrack;': ']', + 'rbrke;': '\u298c', + 'rbrksld;': '\u298e', + 'rbrkslu;': '\u2990', + 'Rcaron;': '\u0158', + 'rcaron;': '\u0159', + 'Rcedil;': '\u0156', + 'rcedil;': '\u0157', + 'rceil;': '\u2309', + 'rcub;': '}', + 'Rcy;': '\u0420', + 'rcy;': '\u0440', + 'rdca;': '\u2937', + 'rdldhar;': '\u2969', + 'rdquo;': '\u201d', + 'rdquor;': '\u201d', + 'rdsh;': '\u21b3', + 'Re;': '\u211c', + 'real;': '\u211c', + 'realine;': '\u211b', + 'realpart;': '\u211c', + 'reals;': '\u211d', + 'rect;': '\u25ad', + 'REG': '\xae', + 'reg': '\xae', + 'REG;': '\xae', + 'reg;': '\xae', + 'ReverseElement;': '\u220b', + 'ReverseEquilibrium;': '\u21cb', + 'ReverseUpEquilibrium;': '\u296f', + 'rfisht;': '\u297d', + 'rfloor;': '\u230b', + 'Rfr;': '\u211c', + 'rfr;': '\U0001d52f', + 'rHar;': '\u2964', + 'rhard;': '\u21c1', + 'rharu;': '\u21c0', + 'rharul;': '\u296c', + 'Rho;': '\u03a1', + 'rho;': '\u03c1', + 'rhov;': '\u03f1', + 'RightAngleBracket;': '\u27e9', + 'RightArrow;': '\u2192', + 'Rightarrow;': '\u21d2', + 'rightarrow;': '\u2192', + 'RightArrowBar;': '\u21e5', + 'RightArrowLeftArrow;': '\u21c4', + 'rightarrowtail;': '\u21a3', + 'RightCeiling;': '\u2309', + 'RightDoubleBracket;': '\u27e7', + 'RightDownTeeVector;': '\u295d', + 'RightDownVector;': '\u21c2', + 'RightDownVectorBar;': '\u2955', + 'RightFloor;': '\u230b', + 'rightharpoondown;': '\u21c1', + 'rightharpoonup;': '\u21c0', + 'rightleftarrows;': '\u21c4', + 'rightleftharpoons;': '\u21cc', + 'rightrightarrows;': '\u21c9', + 'rightsquigarrow;': '\u219d', + 'RightTee;': '\u22a2', + 'RightTeeArrow;': '\u21a6', + 'RightTeeVector;': '\u295b', + 'rightthreetimes;': '\u22cc', + 'RightTriangle;': '\u22b3', + 'RightTriangleBar;': '\u29d0', + 'RightTriangleEqual;': '\u22b5', + 'RightUpDownVector;': '\u294f', + 'RightUpTeeVector;': '\u295c', + 'RightUpVector;': '\u21be', + 'RightUpVectorBar;': '\u2954', + 'RightVector;': '\u21c0', + 'RightVectorBar;': '\u2953', + 'ring;': '\u02da', + 'risingdotseq;': '\u2253', + 'rlarr;': '\u21c4', + 'rlhar;': '\u21cc', + 'rlm;': '\u200f', + 'rmoust;': '\u23b1', + 'rmoustache;': '\u23b1', + 'rnmid;': '\u2aee', + 'roang;': '\u27ed', + 'roarr;': '\u21fe', + 'robrk;': '\u27e7', + 'ropar;': '\u2986', + 'Ropf;': '\u211d', + 'ropf;': '\U0001d563', + 'roplus;': '\u2a2e', + 'rotimes;': '\u2a35', + 'RoundImplies;': '\u2970', + 'rpar;': ')', + 'rpargt;': '\u2994', + 'rppolint;': '\u2a12', + 'rrarr;': '\u21c9', + 'Rrightarrow;': '\u21db', + 'rsaquo;': '\u203a', + 'Rscr;': '\u211b', + 'rscr;': '\U0001d4c7', + 'Rsh;': '\u21b1', + 'rsh;': '\u21b1', + 'rsqb;': ']', + 'rsquo;': '\u2019', + 'rsquor;': '\u2019', + 'rthree;': '\u22cc', + 'rtimes;': '\u22ca', + 'rtri;': '\u25b9', + 'rtrie;': '\u22b5', + 'rtrif;': '\u25b8', + 'rtriltri;': '\u29ce', + 'RuleDelayed;': '\u29f4', + 'ruluhar;': '\u2968', + 'rx;': '\u211e', + 'Sacute;': '\u015a', + 'sacute;': '\u015b', + 'sbquo;': '\u201a', + 'Sc;': '\u2abc', + 'sc;': '\u227b', + 'scap;': '\u2ab8', + 'Scaron;': '\u0160', + 'scaron;': '\u0161', + 'sccue;': '\u227d', + 'scE;': '\u2ab4', + 'sce;': '\u2ab0', + 'Scedil;': '\u015e', + 'scedil;': '\u015f', + 'Scirc;': '\u015c', + 'scirc;': '\u015d', + 'scnap;': '\u2aba', + 'scnE;': '\u2ab6', + 'scnsim;': '\u22e9', + 'scpolint;': '\u2a13', + 'scsim;': '\u227f', + 'Scy;': '\u0421', + 'scy;': '\u0441', + 'sdot;': '\u22c5', + 'sdotb;': '\u22a1', + 'sdote;': '\u2a66', + 'searhk;': '\u2925', + 'seArr;': '\u21d8', + 'searr;': '\u2198', + 'searrow;': '\u2198', + 'sect': '\xa7', + 'sect;': '\xa7', + 'semi;': ';', + 'seswar;': '\u2929', + 'setminus;': '\u2216', + 'setmn;': '\u2216', + 'sext;': '\u2736', + 'Sfr;': '\U0001d516', + 'sfr;': '\U0001d530', + 'sfrown;': '\u2322', + 'sharp;': '\u266f', + 'SHCHcy;': '\u0429', + 'shchcy;': '\u0449', + 'SHcy;': '\u0428', + 'shcy;': '\u0448', + 'ShortDownArrow;': '\u2193', + 'ShortLeftArrow;': '\u2190', + 'shortmid;': '\u2223', + 'shortparallel;': '\u2225', + 'ShortRightArrow;': '\u2192', + 'ShortUpArrow;': '\u2191', + 'shy': '\xad', + 'shy;': '\xad', + 'Sigma;': '\u03a3', + 'sigma;': '\u03c3', + 'sigmaf;': '\u03c2', + 'sigmav;': '\u03c2', + 'sim;': '\u223c', + 'simdot;': '\u2a6a', + 'sime;': '\u2243', + 'simeq;': '\u2243', + 'simg;': '\u2a9e', + 'simgE;': '\u2aa0', + 'siml;': '\u2a9d', + 'simlE;': '\u2a9f', + 'simne;': '\u2246', + 'simplus;': '\u2a24', + 'simrarr;': '\u2972', + 'slarr;': '\u2190', + 'SmallCircle;': '\u2218', + 'smallsetminus;': '\u2216', + 'smashp;': '\u2a33', + 'smeparsl;': '\u29e4', + 'smid;': '\u2223', + 'smile;': '\u2323', + 'smt;': '\u2aaa', + 'smte;': '\u2aac', + 'smtes;': '\u2aac\ufe00', + 'SOFTcy;': '\u042c', + 'softcy;': '\u044c', + 'sol;': '/', + 'solb;': '\u29c4', + 'solbar;': '\u233f', + 'Sopf;': '\U0001d54a', + 'sopf;': '\U0001d564', + 'spades;': '\u2660', + 'spadesuit;': '\u2660', + 'spar;': '\u2225', + 'sqcap;': '\u2293', + 'sqcaps;': '\u2293\ufe00', + 'sqcup;': '\u2294', + 'sqcups;': '\u2294\ufe00', + 'Sqrt;': '\u221a', + 'sqsub;': '\u228f', + 'sqsube;': '\u2291', + 'sqsubset;': '\u228f', + 'sqsubseteq;': '\u2291', + 'sqsup;': '\u2290', + 'sqsupe;': '\u2292', + 'sqsupset;': '\u2290', + 'sqsupseteq;': '\u2292', + 'squ;': '\u25a1', + 'Square;': '\u25a1', + 'square;': '\u25a1', + 'SquareIntersection;': '\u2293', + 'SquareSubset;': '\u228f', + 'SquareSubsetEqual;': '\u2291', + 'SquareSuperset;': '\u2290', + 'SquareSupersetEqual;': '\u2292', + 'SquareUnion;': '\u2294', + 'squarf;': '\u25aa', + 'squf;': '\u25aa', + 'srarr;': '\u2192', + 'Sscr;': '\U0001d4ae', + 'sscr;': '\U0001d4c8', + 'ssetmn;': '\u2216', + 'ssmile;': '\u2323', + 'sstarf;': '\u22c6', + 'Star;': '\u22c6', + 'star;': '\u2606', + 'starf;': '\u2605', + 'straightepsilon;': '\u03f5', + 'straightphi;': '\u03d5', + 'strns;': '\xaf', + 'Sub;': '\u22d0', + 'sub;': '\u2282', + 'subdot;': '\u2abd', + 'subE;': '\u2ac5', + 'sube;': '\u2286', + 'subedot;': '\u2ac3', + 'submult;': '\u2ac1', + 'subnE;': '\u2acb', + 'subne;': '\u228a', + 'subplus;': '\u2abf', + 'subrarr;': '\u2979', + 'Subset;': '\u22d0', + 'subset;': '\u2282', + 'subseteq;': '\u2286', + 'subseteqq;': '\u2ac5', + 'SubsetEqual;': '\u2286', + 'subsetneq;': '\u228a', + 'subsetneqq;': '\u2acb', + 'subsim;': '\u2ac7', + 'subsub;': '\u2ad5', + 'subsup;': '\u2ad3', + 'succ;': '\u227b', + 'succapprox;': '\u2ab8', + 'succcurlyeq;': '\u227d', + 'Succeeds;': '\u227b', + 'SucceedsEqual;': '\u2ab0', + 'SucceedsSlantEqual;': '\u227d', + 'SucceedsTilde;': '\u227f', + 'succeq;': '\u2ab0', + 'succnapprox;': '\u2aba', + 'succneqq;': '\u2ab6', + 'succnsim;': '\u22e9', + 'succsim;': '\u227f', + 'SuchThat;': '\u220b', + 'Sum;': '\u2211', + 'sum;': '\u2211', + 'sung;': '\u266a', + 'sup1': '\xb9', + 'sup1;': '\xb9', + 'sup2': '\xb2', + 'sup2;': '\xb2', + 'sup3': '\xb3', + 'sup3;': '\xb3', + 'Sup;': '\u22d1', + 'sup;': '\u2283', + 'supdot;': '\u2abe', + 'supdsub;': '\u2ad8', + 'supE;': '\u2ac6', + 'supe;': '\u2287', + 'supedot;': '\u2ac4', + 'Superset;': '\u2283', + 'SupersetEqual;': '\u2287', + 'suphsol;': '\u27c9', + 'suphsub;': '\u2ad7', + 'suplarr;': '\u297b', + 'supmult;': '\u2ac2', + 'supnE;': '\u2acc', + 'supne;': '\u228b', + 'supplus;': '\u2ac0', + 'Supset;': '\u22d1', + 'supset;': '\u2283', + 'supseteq;': '\u2287', + 'supseteqq;': '\u2ac6', + 'supsetneq;': '\u228b', + 'supsetneqq;': '\u2acc', + 'supsim;': '\u2ac8', + 'supsub;': '\u2ad4', + 'supsup;': '\u2ad6', + 'swarhk;': '\u2926', + 'swArr;': '\u21d9', + 'swarr;': '\u2199', + 'swarrow;': '\u2199', + 'swnwar;': '\u292a', + 'szlig': '\xdf', + 'szlig;': '\xdf', + 'Tab;': '\t', + 'target;': '\u2316', + 'Tau;': '\u03a4', + 'tau;': '\u03c4', + 'tbrk;': '\u23b4', + 'Tcaron;': '\u0164', + 'tcaron;': '\u0165', + 'Tcedil;': '\u0162', + 'tcedil;': '\u0163', + 'Tcy;': '\u0422', + 'tcy;': '\u0442', + 'tdot;': '\u20db', + 'telrec;': '\u2315', + 'Tfr;': '\U0001d517', + 'tfr;': '\U0001d531', + 'there4;': '\u2234', + 'Therefore;': '\u2234', + 'therefore;': '\u2234', + 'Theta;': '\u0398', + 'theta;': '\u03b8', + 'thetasym;': '\u03d1', + 'thetav;': '\u03d1', + 'thickapprox;': '\u2248', + 'thicksim;': '\u223c', + 'ThickSpace;': '\u205f\u200a', + 'thinsp;': '\u2009', + 'ThinSpace;': '\u2009', + 'thkap;': '\u2248', + 'thksim;': '\u223c', + 'THORN': '\xde', + 'thorn': '\xfe', + 'THORN;': '\xde', + 'thorn;': '\xfe', + 'Tilde;': '\u223c', + 'tilde;': '\u02dc', + 'TildeEqual;': '\u2243', + 'TildeFullEqual;': '\u2245', + 'TildeTilde;': '\u2248', + 'times': '\xd7', + 'times;': '\xd7', + 'timesb;': '\u22a0', + 'timesbar;': '\u2a31', + 'timesd;': '\u2a30', + 'tint;': '\u222d', + 'toea;': '\u2928', + 'top;': '\u22a4', + 'topbot;': '\u2336', + 'topcir;': '\u2af1', + 'Topf;': '\U0001d54b', + 'topf;': '\U0001d565', + 'topfork;': '\u2ada', + 'tosa;': '\u2929', + 'tprime;': '\u2034', + 'TRADE;': '\u2122', + 'trade;': '\u2122', + 'triangle;': '\u25b5', + 'triangledown;': '\u25bf', + 'triangleleft;': '\u25c3', + 'trianglelefteq;': '\u22b4', + 'triangleq;': '\u225c', + 'triangleright;': '\u25b9', + 'trianglerighteq;': '\u22b5', + 'tridot;': '\u25ec', + 'trie;': '\u225c', + 'triminus;': '\u2a3a', + 'TripleDot;': '\u20db', + 'triplus;': '\u2a39', + 'trisb;': '\u29cd', + 'tritime;': '\u2a3b', + 'trpezium;': '\u23e2', + 'Tscr;': '\U0001d4af', + 'tscr;': '\U0001d4c9', + 'TScy;': '\u0426', + 'tscy;': '\u0446', + 'TSHcy;': '\u040b', + 'tshcy;': '\u045b', + 'Tstrok;': '\u0166', + 'tstrok;': '\u0167', + 'twixt;': '\u226c', + 'twoheadleftarrow;': '\u219e', + 'twoheadrightarrow;': '\u21a0', + 'Uacute': '\xda', + 'uacute': '\xfa', + 'Uacute;': '\xda', + 'uacute;': '\xfa', + 'Uarr;': '\u219f', + 'uArr;': '\u21d1', + 'uarr;': '\u2191', + 'Uarrocir;': '\u2949', + 'Ubrcy;': '\u040e', + 'ubrcy;': '\u045e', + 'Ubreve;': '\u016c', + 'ubreve;': '\u016d', + 'Ucirc': '\xdb', + 'ucirc': '\xfb', + 'Ucirc;': '\xdb', + 'ucirc;': '\xfb', + 'Ucy;': '\u0423', + 'ucy;': '\u0443', + 'udarr;': '\u21c5', + 'Udblac;': '\u0170', + 'udblac;': '\u0171', + 'udhar;': '\u296e', + 'ufisht;': '\u297e', + 'Ufr;': '\U0001d518', + 'ufr;': '\U0001d532', + 'Ugrave': '\xd9', + 'ugrave': '\xf9', + 'Ugrave;': '\xd9', + 'ugrave;': '\xf9', + 'uHar;': '\u2963', + 'uharl;': '\u21bf', + 'uharr;': '\u21be', + 'uhblk;': '\u2580', + 'ulcorn;': '\u231c', + 'ulcorner;': '\u231c', + 'ulcrop;': '\u230f', + 'ultri;': '\u25f8', + 'Umacr;': '\u016a', + 'umacr;': '\u016b', + 'uml': '\xa8', + 'uml;': '\xa8', + 'UnderBar;': '_', + 'UnderBrace;': '\u23df', + 'UnderBracket;': '\u23b5', + 'UnderParenthesis;': '\u23dd', + 'Union;': '\u22c3', + 'UnionPlus;': '\u228e', + 'Uogon;': '\u0172', + 'uogon;': '\u0173', + 'Uopf;': '\U0001d54c', + 'uopf;': '\U0001d566', + 'UpArrow;': '\u2191', + 'Uparrow;': '\u21d1', + 'uparrow;': '\u2191', + 'UpArrowBar;': '\u2912', + 'UpArrowDownArrow;': '\u21c5', + 'UpDownArrow;': '\u2195', + 'Updownarrow;': '\u21d5', + 'updownarrow;': '\u2195', + 'UpEquilibrium;': '\u296e', + 'upharpoonleft;': '\u21bf', + 'upharpoonright;': '\u21be', + 'uplus;': '\u228e', + 'UpperLeftArrow;': '\u2196', + 'UpperRightArrow;': '\u2197', + 'Upsi;': '\u03d2', + 'upsi;': '\u03c5', + 'upsih;': '\u03d2', + 'Upsilon;': '\u03a5', + 'upsilon;': '\u03c5', + 'UpTee;': '\u22a5', + 'UpTeeArrow;': '\u21a5', + 'upuparrows;': '\u21c8', + 'urcorn;': '\u231d', + 'urcorner;': '\u231d', + 'urcrop;': '\u230e', + 'Uring;': '\u016e', + 'uring;': '\u016f', + 'urtri;': '\u25f9', + 'Uscr;': '\U0001d4b0', + 'uscr;': '\U0001d4ca', + 'utdot;': '\u22f0', + 'Utilde;': '\u0168', + 'utilde;': '\u0169', + 'utri;': '\u25b5', + 'utrif;': '\u25b4', + 'uuarr;': '\u21c8', + 'Uuml': '\xdc', + 'uuml': '\xfc', + 'Uuml;': '\xdc', + 'uuml;': '\xfc', + 'uwangle;': '\u29a7', + 'vangrt;': '\u299c', + 'varepsilon;': '\u03f5', + 'varkappa;': '\u03f0', + 'varnothing;': '\u2205', + 'varphi;': '\u03d5', + 'varpi;': '\u03d6', + 'varpropto;': '\u221d', + 'vArr;': '\u21d5', + 'varr;': '\u2195', + 'varrho;': '\u03f1', + 'varsigma;': '\u03c2', + 'varsubsetneq;': '\u228a\ufe00', + 'varsubsetneqq;': '\u2acb\ufe00', + 'varsupsetneq;': '\u228b\ufe00', + 'varsupsetneqq;': '\u2acc\ufe00', + 'vartheta;': '\u03d1', + 'vartriangleleft;': '\u22b2', + 'vartriangleright;': '\u22b3', + 'Vbar;': '\u2aeb', + 'vBar;': '\u2ae8', + 'vBarv;': '\u2ae9', + 'Vcy;': '\u0412', + 'vcy;': '\u0432', + 'VDash;': '\u22ab', + 'Vdash;': '\u22a9', + 'vDash;': '\u22a8', + 'vdash;': '\u22a2', + 'Vdashl;': '\u2ae6', + 'Vee;': '\u22c1', + 'vee;': '\u2228', + 'veebar;': '\u22bb', + 'veeeq;': '\u225a', + 'vellip;': '\u22ee', + 'Verbar;': '\u2016', + 'verbar;': '|', + 'Vert;': '\u2016', + 'vert;': '|', + 'VerticalBar;': '\u2223', + 'VerticalLine;': '|', + 'VerticalSeparator;': '\u2758', + 'VerticalTilde;': '\u2240', + 'VeryThinSpace;': '\u200a', + 'Vfr;': '\U0001d519', + 'vfr;': '\U0001d533', + 'vltri;': '\u22b2', + 'vnsub;': '\u2282\u20d2', + 'vnsup;': '\u2283\u20d2', + 'Vopf;': '\U0001d54d', + 'vopf;': '\U0001d567', + 'vprop;': '\u221d', + 'vrtri;': '\u22b3', + 'Vscr;': '\U0001d4b1', + 'vscr;': '\U0001d4cb', + 'vsubnE;': '\u2acb\ufe00', + 'vsubne;': '\u228a\ufe00', + 'vsupnE;': '\u2acc\ufe00', + 'vsupne;': '\u228b\ufe00', + 'Vvdash;': '\u22aa', + 'vzigzag;': '\u299a', + 'Wcirc;': '\u0174', + 'wcirc;': '\u0175', + 'wedbar;': '\u2a5f', + 'Wedge;': '\u22c0', + 'wedge;': '\u2227', + 'wedgeq;': '\u2259', + 'weierp;': '\u2118', + 'Wfr;': '\U0001d51a', + 'wfr;': '\U0001d534', + 'Wopf;': '\U0001d54e', + 'wopf;': '\U0001d568', + 'wp;': '\u2118', + 'wr;': '\u2240', + 'wreath;': '\u2240', + 'Wscr;': '\U0001d4b2', + 'wscr;': '\U0001d4cc', + 'xcap;': '\u22c2', + 'xcirc;': '\u25ef', + 'xcup;': '\u22c3', + 'xdtri;': '\u25bd', + 'Xfr;': '\U0001d51b', + 'xfr;': '\U0001d535', + 'xhArr;': '\u27fa', + 'xharr;': '\u27f7', + 'Xi;': '\u039e', + 'xi;': '\u03be', + 'xlArr;': '\u27f8', + 'xlarr;': '\u27f5', + 'xmap;': '\u27fc', + 'xnis;': '\u22fb', + 'xodot;': '\u2a00', + 'Xopf;': '\U0001d54f', + 'xopf;': '\U0001d569', + 'xoplus;': '\u2a01', + 'xotime;': '\u2a02', + 'xrArr;': '\u27f9', + 'xrarr;': '\u27f6', + 'Xscr;': '\U0001d4b3', + 'xscr;': '\U0001d4cd', + 'xsqcup;': '\u2a06', + 'xuplus;': '\u2a04', + 'xutri;': '\u25b3', + 'xvee;': '\u22c1', + 'xwedge;': '\u22c0', + 'Yacute': '\xdd', + 'yacute': '\xfd', + 'Yacute;': '\xdd', + 'yacute;': '\xfd', + 'YAcy;': '\u042f', + 'yacy;': '\u044f', + 'Ycirc;': '\u0176', + 'ycirc;': '\u0177', + 'Ycy;': '\u042b', + 'ycy;': '\u044b', + 'yen': '\xa5', + 'yen;': '\xa5', + 'Yfr;': '\U0001d51c', + 'yfr;': '\U0001d536', + 'YIcy;': '\u0407', + 'yicy;': '\u0457', + 'Yopf;': '\U0001d550', + 'yopf;': '\U0001d56a', + 'Yscr;': '\U0001d4b4', + 'yscr;': '\U0001d4ce', + 'YUcy;': '\u042e', + 'yucy;': '\u044e', + 'yuml': '\xff', + 'Yuml;': '\u0178', + 'yuml;': '\xff', + 'Zacute;': '\u0179', + 'zacute;': '\u017a', + 'Zcaron;': '\u017d', + 'zcaron;': '\u017e', + 'Zcy;': '\u0417', + 'zcy;': '\u0437', + 'Zdot;': '\u017b', + 'zdot;': '\u017c', + 'zeetrf;': '\u2128', + 'ZeroWidthSpace;': '\u200b', + 'Zeta;': '\u0396', + 'zeta;': '\u03b6', + 'Zfr;': '\u2128', + 'zfr;': '\U0001d537', + 'ZHcy;': '\u0416', + 'zhcy;': '\u0436', + 'zigrarr;': '\u21dd', + 'Zopf;': '\u2124', + 'zopf;': '\U0001d56b', + 'Zscr;': '\U0001d4b5', + 'zscr;': '\U0001d4cf', + 'zwj;': '\u200d', + 'zwnj;': '\u200c', +} + +# maps the Unicode codepoint to the HTML entity name +codepoint2name = {} + +# maps the HTML entity name to the character +# (or a character reference if the character is outside the Latin-1 range) +entitydefs = {} + +for (name, codepoint) in name2codepoint.items(): + codepoint2name[codepoint] = name + entitydefs[name] = chr(codepoint) + +del name, codepoint diff --git a/minor_project/lib/python3.6/site-packages/future/backports/html/parser.py b/minor_project/lib/python3.6/site-packages/future/backports/html/parser.py new file mode 100644 index 0000000..fb65263 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/html/parser.py @@ -0,0 +1,536 @@ +"""A parser for HTML and XHTML. + +Backported for python-future from Python 3.3. +""" + +# This file is based on sgmllib.py, but the API is slightly different. + +# XXX There should be a way to distinguish between PCDATA (parsed +# character data -- the normal case), RCDATA (replaceable character +# data -- only char and entity references and end tags are special) +# and CDATA (character data -- only end tags are special). + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import * +from future.backports import _markupbase +import re +import warnings + +# Regular expressions used for parsing + +interesting_normal = re.compile('[&<]') +incomplete = re.compile('&[a-zA-Z#]') + +entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]') +charref = re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]') + +starttagopen = re.compile('<[a-zA-Z]') +piclose = re.compile('>') +commentclose = re.compile(r'--\s*>') +tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*') +# see http://www.w3.org/TR/html5/tokenization.html#tag-open-state +# and http://www.w3.org/TR/html5/tokenization.html#tag-name-state +tagfind_tolerant = re.compile('[a-zA-Z][^\t\n\r\f />\x00]*') +# Note: +# 1) the strict attrfind isn't really strict, but we can't make it +# correctly strict without breaking backward compatibility; +# 2) if you change attrfind remember to update locatestarttagend too; +# 3) if you change attrfind and/or locatestarttagend the parser will +# explode, so don't do it. +attrfind = re.compile( + r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*' + r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?') +attrfind_tolerant = re.compile( + r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*' + r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*') +locatestarttagend = re.compile(r""" + <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name + (?:\s+ # whitespace before attribute name + (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name + (?:\s*=\s* # value indicator + (?:'[^']*' # LITA-enclosed value + |\"[^\"]*\" # LIT-enclosed value + |[^'\">\s]+ # bare value + ) + )? + ) + )* + \s* # trailing whitespace +""", re.VERBOSE) +locatestarttagend_tolerant = re.compile(r""" + <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name + (?:[\s/]* # optional whitespace before attribute name + (?:(?<=['"\s/])[^\s/>][^\s/=>]* # attribute name + (?:\s*=+\s* # value indicator + (?:'[^']*' # LITA-enclosed value + |"[^"]*" # LIT-enclosed value + |(?!['"])[^>\s]* # bare value + ) + (?:\s*,)* # possibly followed by a comma + )?(?:\s|/(?!>))* + )* + )? + \s* # trailing whitespace +""", re.VERBOSE) +endendtag = re.compile('>') +# the HTML 5 spec, section 8.1.2.2, doesn't allow spaces between +# ') + + +class HTMLParseError(Exception): + """Exception raised for all parse errors.""" + + def __init__(self, msg, position=(None, None)): + assert msg + self.msg = msg + self.lineno = position[0] + self.offset = position[1] + + def __str__(self): + result = self.msg + if self.lineno is not None: + result = result + ", at line %d" % self.lineno + if self.offset is not None: + result = result + ", column %d" % (self.offset + 1) + return result + + +class HTMLParser(_markupbase.ParserBase): + """Find tags and other markup and call handler functions. + + Usage: + p = HTMLParser() + p.feed(data) + ... + p.close() + + Start tags are handled by calling self.handle_starttag() or + self.handle_startendtag(); end tags by self.handle_endtag(). The + data between tags is passed from the parser to the derived class + by calling self.handle_data() with the data as argument (the data + may be split up in arbitrary chunks). Entity references are + passed by calling self.handle_entityref() with the entity + reference as the argument. Numeric character references are + passed to self.handle_charref() with the string containing the + reference as the argument. + """ + + CDATA_CONTENT_ELEMENTS = ("script", "style") + + def __init__(self, strict=False): + """Initialize and reset this instance. + + If strict is set to False (the default) the parser will parse invalid + markup, otherwise it will raise an error. Note that the strict mode + is deprecated. + """ + if strict: + warnings.warn("The strict mode is deprecated.", + DeprecationWarning, stacklevel=2) + self.strict = strict + self.reset() + + def reset(self): + """Reset this instance. Loses all unprocessed data.""" + self.rawdata = '' + self.lasttag = '???' + self.interesting = interesting_normal + self.cdata_elem = None + _markupbase.ParserBase.reset(self) + + def feed(self, data): + r"""Feed data to the parser. + + Call this as often as you want, with as little or as much text + as you want (may include '\n'). + """ + self.rawdata = self.rawdata + data + self.goahead(0) + + def close(self): + """Handle any buffered data.""" + self.goahead(1) + + def error(self, message): + raise HTMLParseError(message, self.getpos()) + + __starttag_text = None + + def get_starttag_text(self): + """Return full source of start tag: '<...>'.""" + return self.__starttag_text + + def set_cdata_mode(self, elem): + self.cdata_elem = elem.lower() + self.interesting = re.compile(r'' % self.cdata_elem, re.I) + + def clear_cdata_mode(self): + self.interesting = interesting_normal + self.cdata_elem = None + + # Internal -- handle data as far as reasonable. May leave state + # and data to be processed by a subsequent call. If 'end' is + # true, force handling all data as if followed by EOF marker. + def goahead(self, end): + rawdata = self.rawdata + i = 0 + n = len(rawdata) + while i < n: + match = self.interesting.search(rawdata, i) # < or & + if match: + j = match.start() + else: + if self.cdata_elem: + break + j = n + if i < j: self.handle_data(rawdata[i:j]) + i = self.updatepos(i, j) + if i == n: break + startswith = rawdata.startswith + if startswith('<', i): + if starttagopen.match(rawdata, i): # < + letter + k = self.parse_starttag(i) + elif startswith("', i + 1) + if k < 0: + k = rawdata.find('<', i + 1) + if k < 0: + k = i + 1 + else: + k += 1 + self.handle_data(rawdata[i:k]) + i = self.updatepos(i, k) + elif startswith("&#", i): + match = charref.match(rawdata, i) + if match: + name = match.group()[2:-1] + self.handle_charref(name) + k = match.end() + if not startswith(';', k-1): + k = k - 1 + i = self.updatepos(i, k) + continue + else: + if ";" in rawdata[i:]: #bail by consuming &# + self.handle_data(rawdata[0:2]) + i = self.updatepos(i, 2) + break + elif startswith('&', i): + match = entityref.match(rawdata, i) + if match: + name = match.group(1) + self.handle_entityref(name) + k = match.end() + if not startswith(';', k-1): + k = k - 1 + i = self.updatepos(i, k) + continue + match = incomplete.match(rawdata, i) + if match: + # match.group() will contain at least 2 chars + if end and match.group() == rawdata[i:]: + if self.strict: + self.error("EOF in middle of entity or char ref") + else: + if k <= i: + k = n + i = self.updatepos(i, i + 1) + # incomplete + break + elif (i + 1) < n: + # not the end of the buffer, and can't be confused + # with some other construct + self.handle_data("&") + i = self.updatepos(i, i + 1) + else: + break + else: + assert 0, "interesting.search() lied" + # end while + if end and i < n and not self.cdata_elem: + self.handle_data(rawdata[i:n]) + i = self.updatepos(i, n) + self.rawdata = rawdata[i:] + + # Internal -- parse html declarations, return length or -1 if not terminated + # See w3.org/TR/html5/tokenization.html#markup-declaration-open-state + # See also parse_declaration in _markupbase + def parse_html_declaration(self, i): + rawdata = self.rawdata + assert rawdata[i:i+2] == ' + gtpos = rawdata.find('>', i+9) + if gtpos == -1: + return -1 + self.handle_decl(rawdata[i+2:gtpos]) + return gtpos+1 + else: + return self.parse_bogus_comment(i) + + # Internal -- parse bogus comment, return length or -1 if not terminated + # see http://www.w3.org/TR/html5/tokenization.html#bogus-comment-state + def parse_bogus_comment(self, i, report=1): + rawdata = self.rawdata + assert rawdata[i:i+2] in ('', i+2) + if pos == -1: + return -1 + if report: + self.handle_comment(rawdata[i+2:pos]) + return pos + 1 + + # Internal -- parse processing instr, return end or -1 if not terminated + def parse_pi(self, i): + rawdata = self.rawdata + assert rawdata[i:i+2] == ' + if not match: + return -1 + j = match.start() + self.handle_pi(rawdata[i+2: j]) + j = match.end() + return j + + # Internal -- handle starttag, return end or -1 if not terminated + def parse_starttag(self, i): + self.__starttag_text = None + endpos = self.check_for_whole_start_tag(i) + if endpos < 0: + return endpos + rawdata = self.rawdata + self.__starttag_text = rawdata[i:endpos] + + # Now parse the data between i+1 and j into a tag and attrs + attrs = [] + match = tagfind.match(rawdata, i+1) + assert match, 'unexpected call to parse_starttag()' + k = match.end() + self.lasttag = tag = match.group(1).lower() + while k < endpos: + if self.strict: + m = attrfind.match(rawdata, k) + else: + m = attrfind_tolerant.match(rawdata, k) + if not m: + break + attrname, rest, attrvalue = m.group(1, 2, 3) + if not rest: + attrvalue = None + elif attrvalue[:1] == '\'' == attrvalue[-1:] or \ + attrvalue[:1] == '"' == attrvalue[-1:]: + attrvalue = attrvalue[1:-1] + if attrvalue: + attrvalue = self.unescape(attrvalue) + attrs.append((attrname.lower(), attrvalue)) + k = m.end() + + end = rawdata[k:endpos].strip() + if end not in (">", "/>"): + lineno, offset = self.getpos() + if "\n" in self.__starttag_text: + lineno = lineno + self.__starttag_text.count("\n") + offset = len(self.__starttag_text) \ + - self.__starttag_text.rfind("\n") + else: + offset = offset + len(self.__starttag_text) + if self.strict: + self.error("junk characters in start tag: %r" + % (rawdata[k:endpos][:20],)) + self.handle_data(rawdata[i:endpos]) + return endpos + if end.endswith('/>'): + # XHTML-style empty tag: + self.handle_startendtag(tag, attrs) + else: + self.handle_starttag(tag, attrs) + if tag in self.CDATA_CONTENT_ELEMENTS: + self.set_cdata_mode(tag) + return endpos + + # Internal -- check to see if we have a complete starttag; return end + # or -1 if incomplete. + def check_for_whole_start_tag(self, i): + rawdata = self.rawdata + if self.strict: + m = locatestarttagend.match(rawdata, i) + else: + m = locatestarttagend_tolerant.match(rawdata, i) + if m: + j = m.end() + next = rawdata[j:j+1] + if next == ">": + return j + 1 + if next == "/": + if rawdata.startswith("/>", j): + return j + 2 + if rawdata.startswith("/", j): + # buffer boundary + return -1 + # else bogus input + if self.strict: + self.updatepos(i, j + 1) + self.error("malformed empty start tag") + if j > i: + return j + else: + return i + 1 + if next == "": + # end of input + return -1 + if next in ("abcdefghijklmnopqrstuvwxyz=/" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ"): + # end of input in or before attribute value, or we have the + # '/' from a '/>' ending + return -1 + if self.strict: + self.updatepos(i, j) + self.error("malformed start tag") + if j > i: + return j + else: + return i + 1 + raise AssertionError("we should not get here!") + + # Internal -- parse endtag, return end or -1 if incomplete + def parse_endtag(self, i): + rawdata = self.rawdata + assert rawdata[i:i+2] == " + if not match: + return -1 + gtpos = match.end() + match = endtagfind.match(rawdata, i) # + if not match: + if self.cdata_elem is not None: + self.handle_data(rawdata[i:gtpos]) + return gtpos + if self.strict: + self.error("bad end tag: %r" % (rawdata[i:gtpos],)) + # find the name: w3.org/TR/html5/tokenization.html#tag-name-state + namematch = tagfind_tolerant.match(rawdata, i+2) + if not namematch: + # w3.org/TR/html5/tokenization.html#end-tag-open-state + if rawdata[i:i+3] == '': + return i+3 + else: + return self.parse_bogus_comment(i) + tagname = namematch.group().lower() + # consume and ignore other stuff between the name and the > + # Note: this is not 100% correct, since we might have things like + # , but looking for > after tha name should cover + # most of the cases and is much simpler + gtpos = rawdata.find('>', namematch.end()) + self.handle_endtag(tagname) + return gtpos+1 + + elem = match.group(1).lower() # script or style + if self.cdata_elem is not None: + if elem != self.cdata_elem: + self.handle_data(rawdata[i:gtpos]) + return gtpos + + self.handle_endtag(elem.lower()) + self.clear_cdata_mode() + return gtpos + + # Overridable -- finish processing of start+end tag: + def handle_startendtag(self, tag, attrs): + self.handle_starttag(tag, attrs) + self.handle_endtag(tag) + + # Overridable -- handle start tag + def handle_starttag(self, tag, attrs): + pass + + # Overridable -- handle end tag + def handle_endtag(self, tag): + pass + + # Overridable -- handle character reference + def handle_charref(self, name): + pass + + # Overridable -- handle entity reference + def handle_entityref(self, name): + pass + + # Overridable -- handle data + def handle_data(self, data): + pass + + # Overridable -- handle comment + def handle_comment(self, data): + pass + + # Overridable -- handle declaration + def handle_decl(self, decl): + pass + + # Overridable -- handle processing instruction + def handle_pi(self, data): + pass + + def unknown_decl(self, data): + if self.strict: + self.error("unknown declaration: %r" % (data,)) + + # Internal -- helper to remove special character quoting + def unescape(self, s): + if '&' not in s: + return s + def replaceEntities(s): + s = s.groups()[0] + try: + if s[0] == "#": + s = s[1:] + if s[0] in ['x','X']: + c = int(s[1:].rstrip(';'), 16) + else: + c = int(s.rstrip(';')) + return chr(c) + except ValueError: + return '&#' + s + else: + from future.backports.html.entities import html5 + if s in html5: + return html5[s] + elif s.endswith(';'): + return '&' + s + for x in range(2, len(s)): + if s[:x] in html5: + return html5[s[:x]] + s[x:] + else: + return '&' + s + + return re.sub(r"&(#?[xX]?(?:[0-9a-fA-F]+;|\w{1,32};?))", + replaceEntities, s) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/http/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..b94cfbe Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/client.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/client.cpython-36.pyc new file mode 100644 index 0000000..a5d94d9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/client.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/cookiejar.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/cookiejar.cpython-36.pyc new file mode 100644 index 0000000..54d7a5b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/cookiejar.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/cookies.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/cookies.cpython-36.pyc new file mode 100644 index 0000000..6ac3107 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/cookies.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/server.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/server.cpython-36.pyc new file mode 100644 index 0000000..618fb91 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/http/__pycache__/server.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/client.py b/minor_project/lib/python3.6/site-packages/future/backports/http/client.py new file mode 100644 index 0000000..e663d12 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/http/client.py @@ -0,0 +1,1346 @@ +"""HTTP/1.1 client library + +A backport of the Python 3.3 http/client.py module for python-future. + + + + +HTTPConnection goes through a number of "states", which define when a client +may legally make another request or fetch the response for a particular +request. This diagram details these state transitions: + + (null) + | + | HTTPConnection() + v + Idle + | + | putrequest() + v + Request-started + | + | ( putheader() )* endheaders() + v + Request-sent + | + | response = getresponse() + v + Unread-response [Response-headers-read] + |\____________________ + | | + | response.read() | putrequest() + v v + Idle Req-started-unread-response + ______/| + / | + response.read() | | ( putheader() )* endheaders() + v v + Request-started Req-sent-unread-response + | + | response.read() + v + Request-sent + +This diagram presents the following rules: + -- a second request may not be started until {response-headers-read} + -- a response [object] cannot be retrieved until {request-sent} + -- there is no differentiation between an unread response body and a + partially read response body + +Note: this enforcement is applied by the HTTPConnection class. The + HTTPResponse class does not enforce this state machine, which + implies sophisticated clients may accelerate the request/response + pipeline. Caution should be taken, though: accelerating the states + beyond the above pattern may imply knowledge of the server's + connection-close behavior for certain requests. For example, it + is impossible to tell whether the server will close the connection + UNTIL the response headers have been read; this means that further + requests cannot be placed into the pipeline until it is known that + the server will NOT be closing the connection. + +Logical State __state __response +------------- ------- ---------- +Idle _CS_IDLE None +Request-started _CS_REQ_STARTED None +Request-sent _CS_REQ_SENT None +Unread-response _CS_IDLE +Req-started-unread-response _CS_REQ_STARTED +Req-sent-unread-response _CS_REQ_SENT +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import bytes, int, str, super +from future.utils import PY2 + +from future.backports.email import parser as email_parser +from future.backports.email import message as email_message +from future.backports.misc import create_connection as socket_create_connection +import io +import os +import socket +from future.backports.urllib.parse import urlsplit +import warnings +from array import array + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +__all__ = ["HTTPResponse", "HTTPConnection", + "HTTPException", "NotConnected", "UnknownProtocol", + "UnknownTransferEncoding", "UnimplementedFileMode", + "IncompleteRead", "InvalidURL", "ImproperConnectionState", + "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", + "BadStatusLine", "error", "responses"] + +HTTP_PORT = 80 +HTTPS_PORT = 443 + +_UNKNOWN = 'UNKNOWN' + +# connection states +_CS_IDLE = 'Idle' +_CS_REQ_STARTED = 'Request-started' +_CS_REQ_SENT = 'Request-sent' + +# status codes +# informational +CONTINUE = 100 +SWITCHING_PROTOCOLS = 101 +PROCESSING = 102 + +# successful +OK = 200 +CREATED = 201 +ACCEPTED = 202 +NON_AUTHORITATIVE_INFORMATION = 203 +NO_CONTENT = 204 +RESET_CONTENT = 205 +PARTIAL_CONTENT = 206 +MULTI_STATUS = 207 +IM_USED = 226 + +# redirection +MULTIPLE_CHOICES = 300 +MOVED_PERMANENTLY = 301 +FOUND = 302 +SEE_OTHER = 303 +NOT_MODIFIED = 304 +USE_PROXY = 305 +TEMPORARY_REDIRECT = 307 + +# client error +BAD_REQUEST = 400 +UNAUTHORIZED = 401 +PAYMENT_REQUIRED = 402 +FORBIDDEN = 403 +NOT_FOUND = 404 +METHOD_NOT_ALLOWED = 405 +NOT_ACCEPTABLE = 406 +PROXY_AUTHENTICATION_REQUIRED = 407 +REQUEST_TIMEOUT = 408 +CONFLICT = 409 +GONE = 410 +LENGTH_REQUIRED = 411 +PRECONDITION_FAILED = 412 +REQUEST_ENTITY_TOO_LARGE = 413 +REQUEST_URI_TOO_LONG = 414 +UNSUPPORTED_MEDIA_TYPE = 415 +REQUESTED_RANGE_NOT_SATISFIABLE = 416 +EXPECTATION_FAILED = 417 +UNPROCESSABLE_ENTITY = 422 +LOCKED = 423 +FAILED_DEPENDENCY = 424 +UPGRADE_REQUIRED = 426 +PRECONDITION_REQUIRED = 428 +TOO_MANY_REQUESTS = 429 +REQUEST_HEADER_FIELDS_TOO_LARGE = 431 + +# server error +INTERNAL_SERVER_ERROR = 500 +NOT_IMPLEMENTED = 501 +BAD_GATEWAY = 502 +SERVICE_UNAVAILABLE = 503 +GATEWAY_TIMEOUT = 504 +HTTP_VERSION_NOT_SUPPORTED = 505 +INSUFFICIENT_STORAGE = 507 +NOT_EXTENDED = 510 +NETWORK_AUTHENTICATION_REQUIRED = 511 + +# Mapping status codes to official W3C names +responses = { + 100: 'Continue', + 101: 'Switching Protocols', + + 200: 'OK', + 201: 'Created', + 202: 'Accepted', + 203: 'Non-Authoritative Information', + 204: 'No Content', + 205: 'Reset Content', + 206: 'Partial Content', + + 300: 'Multiple Choices', + 301: 'Moved Permanently', + 302: 'Found', + 303: 'See Other', + 304: 'Not Modified', + 305: 'Use Proxy', + 306: '(Unused)', + 307: 'Temporary Redirect', + + 400: 'Bad Request', + 401: 'Unauthorized', + 402: 'Payment Required', + 403: 'Forbidden', + 404: 'Not Found', + 405: 'Method Not Allowed', + 406: 'Not Acceptable', + 407: 'Proxy Authentication Required', + 408: 'Request Timeout', + 409: 'Conflict', + 410: 'Gone', + 411: 'Length Required', + 412: 'Precondition Failed', + 413: 'Request Entity Too Large', + 414: 'Request-URI Too Long', + 415: 'Unsupported Media Type', + 416: 'Requested Range Not Satisfiable', + 417: 'Expectation Failed', + 428: 'Precondition Required', + 429: 'Too Many Requests', + 431: 'Request Header Fields Too Large', + + 500: 'Internal Server Error', + 501: 'Not Implemented', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + 504: 'Gateway Timeout', + 505: 'HTTP Version Not Supported', + 511: 'Network Authentication Required', +} + +# maximal amount of data to read at one time in _safe_read +MAXAMOUNT = 1048576 + +# maximal line length when calling readline(). +_MAXLINE = 65536 +_MAXHEADERS = 100 + + +class HTTPMessage(email_message.Message): + # XXX The only usage of this method is in + # http.server.CGIHTTPRequestHandler. Maybe move the code there so + # that it doesn't need to be part of the public API. The API has + # never been defined so this could cause backwards compatibility + # issues. + + def getallmatchingheaders(self, name): + """Find all header lines matching a given header name. + + Look through the list of headers and find all lines matching a given + header name (and their continuation lines). A list of the lines is + returned, without interpretation. If the header does not occur, an + empty list is returned. If the header occurs multiple times, all + occurrences are returned. Case is not important in the header name. + + """ + name = name.lower() + ':' + n = len(name) + lst = [] + hit = 0 + for line in self.keys(): + if line[:n].lower() == name: + hit = 1 + elif not line[:1].isspace(): + hit = 0 + if hit: + lst.append(line) + return lst + +def parse_headers(fp, _class=HTTPMessage): + """Parses only RFC2822 headers from a file pointer. + + email Parser wants to see strings rather than bytes. + But a TextIOWrapper around self.rfile would buffer too many bytes + from the stream, bytes which we later need to read as bytes. + So we read the correct bytes here, as bytes, for email Parser + to parse. + + """ + headers = [] + while True: + line = fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("header line") + headers.append(line) + if len(headers) > _MAXHEADERS: + raise HTTPException("got more than %d headers" % _MAXHEADERS) + if line in (b'\r\n', b'\n', b''): + break + hstring = bytes(b'').join(headers).decode('iso-8859-1') + return email_parser.Parser(_class=_class).parsestr(hstring) + + +_strict_sentinel = object() + +class HTTPResponse(io.RawIOBase): + + # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. + + # The bytes from the socket object are iso-8859-1 strings. + # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded + # text following RFC 2047. The basic status line parsing only + # accepts iso-8859-1. + + def __init__(self, sock, debuglevel=0, strict=_strict_sentinel, method=None, url=None): + # If the response includes a content-length header, we need to + # make sure that the client doesn't read more than the + # specified number of bytes. If it does, it will block until + # the server times out and closes the connection. This will + # happen if a self.fp.read() is done (without a size) whether + # self.fp is buffered or not. So, no self.fp.read() by + # clients unless they know what they are doing. + self.fp = sock.makefile("rb") + self.debuglevel = debuglevel + if strict is not _strict_sentinel: + warnings.warn("the 'strict' argument isn't supported anymore; " + "http.client now always assumes HTTP/1.x compliant servers.", + DeprecationWarning, 2) + self._method = method + + # The HTTPResponse object is returned via urllib. The clients + # of http and urllib expect different attributes for the + # headers. headers is used here and supports urllib. msg is + # provided as a backwards compatibility layer for http + # clients. + + self.headers = self.msg = None + + # from the Status-Line of the response + self.version = _UNKNOWN # HTTP-Version + self.status = _UNKNOWN # Status-Code + self.reason = _UNKNOWN # Reason-Phrase + + self.chunked = _UNKNOWN # is "chunked" being used? + self.chunk_left = _UNKNOWN # bytes left to read in current chunk + self.length = _UNKNOWN # number of bytes left in response + self.will_close = _UNKNOWN # conn will close at end of response + + def _read_status(self): + line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") + if len(line) > _MAXLINE: + raise LineTooLong("status line") + if self.debuglevel > 0: + print("reply:", repr(line)) + if not line: + # Presumably, the server closed the connection before + # sending a valid response. + raise BadStatusLine(line) + try: + version, status, reason = line.split(None, 2) + except ValueError: + try: + version, status = line.split(None, 1) + reason = "" + except ValueError: + # empty version will cause next test to fail. + version = "" + if not version.startswith("HTTP/"): + self._close_conn() + raise BadStatusLine(line) + + # The status code is a three-digit number + try: + status = int(status) + if status < 100 or status > 999: + raise BadStatusLine(line) + except ValueError: + raise BadStatusLine(line) + return version, status, reason + + def begin(self): + if self.headers is not None: + # we've already started reading the response + return + + # read until we get a non-100 response + while True: + version, status, reason = self._read_status() + if status != CONTINUE: + break + # skip the header from the 100 response + while True: + skip = self.fp.readline(_MAXLINE + 1) + if len(skip) > _MAXLINE: + raise LineTooLong("header line") + skip = skip.strip() + if not skip: + break + if self.debuglevel > 0: + print("header:", skip) + + self.code = self.status = status + self.reason = reason.strip() + if version in ("HTTP/1.0", "HTTP/0.9"): + # Some servers might still return "0.9", treat it as 1.0 anyway + self.version = 10 + elif version.startswith("HTTP/1."): + self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 + else: + raise UnknownProtocol(version) + + self.headers = self.msg = parse_headers(self.fp) + + if self.debuglevel > 0: + for hdr in self.headers: + print("header:", hdr, end=" ") + + # are we using the chunked-style of transfer encoding? + tr_enc = self.headers.get("transfer-encoding") + if tr_enc and tr_enc.lower() == "chunked": + self.chunked = True + self.chunk_left = None + else: + self.chunked = False + + # will the connection close at the end of the response? + self.will_close = self._check_close() + + # do we have a Content-Length? + # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" + self.length = None + length = self.headers.get("content-length") + + # are we using the chunked-style of transfer encoding? + tr_enc = self.headers.get("transfer-encoding") + if length and not self.chunked: + try: + self.length = int(length) + except ValueError: + self.length = None + else: + if self.length < 0: # ignore nonsensical negative lengths + self.length = None + else: + self.length = None + + # does the body have a fixed length? (of zero) + if (status == NO_CONTENT or status == NOT_MODIFIED or + 100 <= status < 200 or # 1xx codes + self._method == "HEAD"): + self.length = 0 + + # if the connection remains open, and we aren't using chunked, and + # a content-length was not provided, then assume that the connection + # WILL close. + if (not self.will_close and + not self.chunked and + self.length is None): + self.will_close = True + + def _check_close(self): + conn = self.headers.get("connection") + if self.version == 11: + # An HTTP/1.1 proxy is assumed to stay open unless + # explicitly closed. + conn = self.headers.get("connection") + if conn and "close" in conn.lower(): + return True + return False + + # Some HTTP/1.0 implementations have support for persistent + # connections, using rules different than HTTP/1.1. + + # For older HTTP, Keep-Alive indicates persistent connection. + if self.headers.get("keep-alive"): + return False + + # At least Akamai returns a "Connection: Keep-Alive" header, + # which was supposed to be sent by the client. + if conn and "keep-alive" in conn.lower(): + return False + + # Proxy-Connection is a netscape hack. + pconn = self.headers.get("proxy-connection") + if pconn and "keep-alive" in pconn.lower(): + return False + + # otherwise, assume it will close + return True + + def _close_conn(self): + fp = self.fp + self.fp = None + fp.close() + + def close(self): + super().close() # set "closed" flag + if self.fp: + self._close_conn() + + # These implementations are for the benefit of io.BufferedReader. + + # XXX This class should probably be revised to act more like + # the "raw stream" that BufferedReader expects. + + def flush(self): + super().flush() + if self.fp: + self.fp.flush() + + def readable(self): + return True + + # End of "raw stream" methods + + def isclosed(self): + """True if the connection is closed.""" + # NOTE: it is possible that we will not ever call self.close(). This + # case occurs when will_close is TRUE, length is None, and we + # read up to the last byte, but NOT past it. + # + # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be + # called, meaning self.isclosed() is meaningful. + return self.fp is None + + def read(self, amt=None): + if self.fp is None: + return bytes(b"") + + if self._method == "HEAD": + self._close_conn() + return bytes(b"") + + if amt is not None: + # Amount is given, so call base class version + # (which is implemented in terms of self.readinto) + return bytes(super(HTTPResponse, self).read(amt)) + else: + # Amount is not given (unbounded read) so we must check self.length + # and self.chunked + + if self.chunked: + return self._readall_chunked() + + if self.length is None: + s = self.fp.read() + else: + try: + s = self._safe_read(self.length) + except IncompleteRead: + self._close_conn() + raise + self.length = 0 + self._close_conn() # we read everything + return bytes(s) + + def readinto(self, b): + if self.fp is None: + return 0 + + if self._method == "HEAD": + self._close_conn() + return 0 + + if self.chunked: + return self._readinto_chunked(b) + + if self.length is not None: + if len(b) > self.length: + # clip the read to the "end of response" + b = memoryview(b)[0:self.length] + + # we do not use _safe_read() here because this may be a .will_close + # connection, and the user is reading more bytes than will be provided + # (for example, reading in 1k chunks) + + if PY2: + data = self.fp.read(len(b)) + n = len(data) + b[:n] = data + else: + n = self.fp.readinto(b) + + if not n and b: + # Ideally, we would raise IncompleteRead if the content-length + # wasn't satisfied, but it might break compatibility. + self._close_conn() + elif self.length is not None: + self.length -= n + if not self.length: + self._close_conn() + return n + + def _read_next_chunk_size(self): + # Read the next chunk size from the file + line = self.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("chunk size") + i = line.find(b";") + if i >= 0: + line = line[:i] # strip chunk-extensions + try: + return int(line, 16) + except ValueError: + # close the connection as protocol synchronisation is + # probably lost + self._close_conn() + raise + + def _read_and_discard_trailer(self): + # read and discard trailer up to the CRLF terminator + ### note: we shouldn't have any trailers! + while True: + line = self.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("trailer line") + if not line: + # a vanishingly small number of sites EOF without + # sending the trailer + break + if line in (b'\r\n', b'\n', b''): + break + + def _readall_chunked(self): + assert self.chunked != _UNKNOWN + chunk_left = self.chunk_left + value = [] + while True: + if chunk_left is None: + try: + chunk_left = self._read_next_chunk_size() + if chunk_left == 0: + break + except ValueError: + raise IncompleteRead(bytes(b'').join(value)) + value.append(self._safe_read(chunk_left)) + + # we read the whole chunk, get another + self._safe_read(2) # toss the CRLF at the end of the chunk + chunk_left = None + + self._read_and_discard_trailer() + + # we read everything; close the "file" + self._close_conn() + + return bytes(b'').join(value) + + def _readinto_chunked(self, b): + assert self.chunked != _UNKNOWN + chunk_left = self.chunk_left + + total_bytes = 0 + mvb = memoryview(b) + while True: + if chunk_left is None: + try: + chunk_left = self._read_next_chunk_size() + if chunk_left == 0: + break + except ValueError: + raise IncompleteRead(bytes(b[0:total_bytes])) + + if len(mvb) < chunk_left: + n = self._safe_readinto(mvb) + self.chunk_left = chunk_left - n + return total_bytes + n + elif len(mvb) == chunk_left: + n = self._safe_readinto(mvb) + self._safe_read(2) # toss the CRLF at the end of the chunk + self.chunk_left = None + return total_bytes + n + else: + temp_mvb = mvb[0:chunk_left] + n = self._safe_readinto(temp_mvb) + mvb = mvb[n:] + total_bytes += n + + # we read the whole chunk, get another + self._safe_read(2) # toss the CRLF at the end of the chunk + chunk_left = None + + self._read_and_discard_trailer() + + # we read everything; close the "file" + self._close_conn() + + return total_bytes + + def _safe_read(self, amt): + """Read the number of bytes requested, compensating for partial reads. + + Normally, we have a blocking socket, but a read() can be interrupted + by a signal (resulting in a partial read). + + Note that we cannot distinguish between EOF and an interrupt when zero + bytes have been read. IncompleteRead() will be raised in this + situation. + + This function should be used when bytes "should" be present for + reading. If the bytes are truly not available (due to EOF), then the + IncompleteRead exception can be used to detect the problem. + """ + s = [] + while amt > 0: + chunk = self.fp.read(min(amt, MAXAMOUNT)) + if not chunk: + raise IncompleteRead(bytes(b'').join(s), amt) + s.append(chunk) + amt -= len(chunk) + return bytes(b"").join(s) + + def _safe_readinto(self, b): + """Same as _safe_read, but for reading into a buffer.""" + total_bytes = 0 + mvb = memoryview(b) + while total_bytes < len(b): + if MAXAMOUNT < len(mvb): + temp_mvb = mvb[0:MAXAMOUNT] + if PY2: + data = self.fp.read(len(temp_mvb)) + n = len(data) + temp_mvb[:n] = data + else: + n = self.fp.readinto(temp_mvb) + else: + if PY2: + data = self.fp.read(len(mvb)) + n = len(data) + mvb[:n] = data + else: + n = self.fp.readinto(mvb) + if not n: + raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) + mvb = mvb[n:] + total_bytes += n + return total_bytes + + def fileno(self): + return self.fp.fileno() + + def getheader(self, name, default=None): + if self.headers is None: + raise ResponseNotReady() + headers = self.headers.get_all(name) or default + if isinstance(headers, str) or not hasattr(headers, '__iter__'): + return headers + else: + return ', '.join(headers) + + def getheaders(self): + """Return list of (header, value) tuples.""" + if self.headers is None: + raise ResponseNotReady() + return list(self.headers.items()) + + # We override IOBase.__iter__ so that it doesn't check for closed-ness + + def __iter__(self): + return self + + # For compatibility with old-style urllib responses. + + def info(self): + return self.headers + + def geturl(self): + return self.url + + def getcode(self): + return self.status + +class HTTPConnection(object): + + _http_vsn = 11 + _http_vsn_str = 'HTTP/1.1' + + response_class = HTTPResponse + default_port = HTTP_PORT + auto_open = 1 + debuglevel = 0 + + def __init__(self, host, port=None, strict=_strict_sentinel, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): + if strict is not _strict_sentinel: + warnings.warn("the 'strict' argument isn't supported anymore; " + "http.client now always assumes HTTP/1.x compliant servers.", + DeprecationWarning, 2) + self.timeout = timeout + self.source_address = source_address + self.sock = None + self._buffer = [] + self.__response = None + self.__state = _CS_IDLE + self._method = None + self._tunnel_host = None + self._tunnel_port = None + self._tunnel_headers = {} + + self._set_hostport(host, port) + + def set_tunnel(self, host, port=None, headers=None): + """ Sets up the host and the port for the HTTP CONNECT Tunnelling. + + The headers argument should be a mapping of extra HTTP headers + to send with the CONNECT request. + """ + self._tunnel_host = host + self._tunnel_port = port + if headers: + self._tunnel_headers = headers + else: + self._tunnel_headers.clear() + + def _set_hostport(self, host, port): + if port is None: + i = host.rfind(':') + j = host.rfind(']') # ipv6 addresses have [...] + if i > j: + try: + port = int(host[i+1:]) + except ValueError: + if host[i+1:] == "": # http://foo.com:/ == http://foo.com/ + port = self.default_port + else: + raise InvalidURL("nonnumeric port: '%s'" % host[i+1:]) + host = host[:i] + else: + port = self.default_port + if host and host[0] == '[' and host[-1] == ']': + host = host[1:-1] + self.host = host + self.port = port + + def set_debuglevel(self, level): + self.debuglevel = level + + def _tunnel(self): + self._set_hostport(self._tunnel_host, self._tunnel_port) + connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port) + connect_bytes = connect_str.encode("ascii") + self.send(connect_bytes) + for header, value in self._tunnel_headers.items(): + header_str = "%s: %s\r\n" % (header, value) + header_bytes = header_str.encode("latin-1") + self.send(header_bytes) + self.send(bytes(b'\r\n')) + + response = self.response_class(self.sock, method=self._method) + (version, code, message) = response._read_status() + + if code != 200: + self.close() + raise socket.error("Tunnel connection failed: %d %s" % (code, + message.strip())) + while True: + line = response.fp.readline(_MAXLINE + 1) + if len(line) > _MAXLINE: + raise LineTooLong("header line") + if not line: + # for sites which EOF without sending a trailer + break + if line in (b'\r\n', b'\n', b''): + break + + def connect(self): + """Connect to the host and port specified in __init__.""" + self.sock = socket_create_connection((self.host,self.port), + self.timeout, self.source_address) + if self._tunnel_host: + self._tunnel() + + def close(self): + """Close the connection to the HTTP server.""" + if self.sock: + self.sock.close() # close it manually... there may be other refs + self.sock = None + if self.__response: + self.__response.close() + self.__response = None + self.__state = _CS_IDLE + + def send(self, data): + """Send `data' to the server. + ``data`` can be a string object, a bytes object, an array object, a + file-like object that supports a .read() method, or an iterable object. + """ + + if self.sock is None: + if self.auto_open: + self.connect() + else: + raise NotConnected() + + if self.debuglevel > 0: + print("send:", repr(data)) + blocksize = 8192 + # Python 2.7 array objects have a read method which is incompatible + # with the 2-arg calling syntax below. + if hasattr(data, "read") and not isinstance(data, array): + if self.debuglevel > 0: + print("sendIng a read()able") + encode = False + try: + mode = data.mode + except AttributeError: + # io.BytesIO and other file-like objects don't have a `mode` + # attribute. + pass + else: + if "b" not in mode: + encode = True + if self.debuglevel > 0: + print("encoding file using iso-8859-1") + while 1: + datablock = data.read(blocksize) + if not datablock: + break + if encode: + datablock = datablock.encode("iso-8859-1") + self.sock.sendall(datablock) + return + try: + self.sock.sendall(data) + except TypeError: + if isinstance(data, Iterable): + for d in data: + self.sock.sendall(d) + else: + raise TypeError("data should be a bytes-like object " + "or an iterable, got %r" % type(data)) + + def _output(self, s): + """Add a line of output to the current request buffer. + + Assumes that the line does *not* end with \\r\\n. + """ + self._buffer.append(s) + + def _send_output(self, message_body=None): + """Send the currently buffered request and clear the buffer. + + Appends an extra \\r\\n to the buffer. + A message_body may be specified, to be appended to the request. + """ + self._buffer.extend((bytes(b""), bytes(b""))) + msg = bytes(b"\r\n").join(self._buffer) + del self._buffer[:] + # If msg and message_body are sent in a single send() call, + # it will avoid performance problems caused by the interaction + # between delayed ack and the Nagle algorithm. + if isinstance(message_body, bytes): + msg += message_body + message_body = None + self.send(msg) + if message_body is not None: + # message_body was not a string (i.e. it is a file), and + # we must run the risk of Nagle. + self.send(message_body) + + def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): + """Send a request to the server. + + `method' specifies an HTTP request method, e.g. 'GET'. + `url' specifies the object being requested, e.g. '/index.html'. + `skip_host' if True does not add automatically a 'Host:' header + `skip_accept_encoding' if True does not add automatically an + 'Accept-Encoding:' header + """ + + # if a prior response has been completed, then forget about it. + if self.__response and self.__response.isclosed(): + self.__response = None + + + # in certain cases, we cannot issue another request on this connection. + # this occurs when: + # 1) we are in the process of sending a request. (_CS_REQ_STARTED) + # 2) a response to a previous request has signalled that it is going + # to close the connection upon completion. + # 3) the headers for the previous response have not been read, thus + # we cannot determine whether point (2) is true. (_CS_REQ_SENT) + # + # if there is no prior response, then we can request at will. + # + # if point (2) is true, then we will have passed the socket to the + # response (effectively meaning, "there is no prior response"), and + # will open a new one when a new request is made. + # + # Note: if a prior response exists, then we *can* start a new request. + # We are not allowed to begin fetching the response to this new + # request, however, until that prior response is complete. + # + if self.__state == _CS_IDLE: + self.__state = _CS_REQ_STARTED + else: + raise CannotSendRequest(self.__state) + + # Save the method we use, we need it later in the response phase + self._method = method + if not url: + url = '/' + request = '%s %s %s' % (method, url, self._http_vsn_str) + + # Non-ASCII characters should have been eliminated earlier + self._output(request.encode('ascii')) + + if self._http_vsn == 11: + # Issue some standard headers for better HTTP/1.1 compliance + + if not skip_host: + # this header is issued *only* for HTTP/1.1 + # connections. more specifically, this means it is + # only issued when the client uses the new + # HTTPConnection() class. backwards-compat clients + # will be using HTTP/1.0 and those clients may be + # issuing this header themselves. we should NOT issue + # it twice; some web servers (such as Apache) barf + # when they see two Host: headers + + # If we need a non-standard port,include it in the + # header. If the request is going through a proxy, + # but the host of the actual URL, not the host of the + # proxy. + + netloc = '' + if url.startswith('http'): + nil, netloc, nil, nil, nil = urlsplit(url) + + if netloc: + try: + netloc_enc = netloc.encode("ascii") + except UnicodeEncodeError: + netloc_enc = netloc.encode("idna") + self.putheader('Host', netloc_enc) + else: + try: + host_enc = self.host.encode("ascii") + except UnicodeEncodeError: + host_enc = self.host.encode("idna") + + # As per RFC 273, IPv6 address should be wrapped with [] + # when used as Host header + + if self.host.find(':') >= 0: + host_enc = bytes(b'[' + host_enc + b']') + + if self.port == self.default_port: + self.putheader('Host', host_enc) + else: + host_enc = host_enc.decode("ascii") + self.putheader('Host', "%s:%s" % (host_enc, self.port)) + + # note: we are assuming that clients will not attempt to set these + # headers since *this* library must deal with the + # consequences. this also means that when the supporting + # libraries are updated to recognize other forms, then this + # code should be changed (removed or updated). + + # we only want a Content-Encoding of "identity" since we don't + # support encodings such as x-gzip or x-deflate. + if not skip_accept_encoding: + self.putheader('Accept-Encoding', 'identity') + + # we can accept "chunked" Transfer-Encodings, but no others + # NOTE: no TE header implies *only* "chunked" + #self.putheader('TE', 'chunked') + + # if TE is supplied in the header, then it must appear in a + # Connection header. + #self.putheader('Connection', 'TE') + + else: + # For HTTP/1.0, the server will assume "not chunked" + pass + + def putheader(self, header, *values): + """Send a request header line to the server. + + For example: h.putheader('Accept', 'text/html') + """ + if self.__state != _CS_REQ_STARTED: + raise CannotSendHeader() + + if hasattr(header, 'encode'): + header = header.encode('ascii') + values = list(values) + for i, one_value in enumerate(values): + if hasattr(one_value, 'encode'): + values[i] = one_value.encode('latin-1') + elif isinstance(one_value, int): + values[i] = str(one_value).encode('ascii') + value = bytes(b'\r\n\t').join(values) + header = header + bytes(b': ') + value + self._output(header) + + def endheaders(self, message_body=None): + """Indicate that the last header line has been sent to the server. + + This method sends the request to the server. The optional message_body + argument can be used to pass a message body associated with the + request. The message body will be sent in the same packet as the + message headers if it is a string, otherwise it is sent as a separate + packet. + """ + if self.__state == _CS_REQ_STARTED: + self.__state = _CS_REQ_SENT + else: + raise CannotSendHeader() + self._send_output(message_body) + + def request(self, method, url, body=None, headers={}): + """Send a complete request to the server.""" + self._send_request(method, url, body, headers) + + def _set_content_length(self, body): + # Set the content-length based on the body. + thelen = None + try: + thelen = str(len(body)) + except TypeError as te: + # If this is a file-like object, try to + # fstat its file descriptor + try: + thelen = str(os.fstat(body.fileno()).st_size) + except (AttributeError, OSError): + # Don't send a length if this failed + if self.debuglevel > 0: print("Cannot stat!!") + + if thelen is not None: + self.putheader('Content-Length', thelen) + + def _send_request(self, method, url, body, headers): + # Honor explicitly requested Host: and Accept-Encoding: headers. + header_names = dict.fromkeys([k.lower() for k in headers]) + skips = {} + if 'host' in header_names: + skips['skip_host'] = 1 + if 'accept-encoding' in header_names: + skips['skip_accept_encoding'] = 1 + + self.putrequest(method, url, **skips) + + if body is not None and ('content-length' not in header_names): + self._set_content_length(body) + for hdr, value in headers.items(): + self.putheader(hdr, value) + if isinstance(body, str): + # RFC 2616 Section 3.7.1 says that text default has a + # default charset of iso-8859-1. + body = body.encode('iso-8859-1') + self.endheaders(body) + + def getresponse(self): + """Get the response from the server. + + If the HTTPConnection is in the correct state, returns an + instance of HTTPResponse or of whatever object is returned by + class the response_class variable. + + If a request has not been sent or if a previous response has + not be handled, ResponseNotReady is raised. If the HTTP + response indicates that the connection should be closed, then + it will be closed before the response is returned. When the + connection is closed, the underlying socket is closed. + """ + + # if a prior response has been completed, then forget about it. + if self.__response and self.__response.isclosed(): + self.__response = None + + # if a prior response exists, then it must be completed (otherwise, we + # cannot read this response's header to determine the connection-close + # behavior) + # + # note: if a prior response existed, but was connection-close, then the + # socket and response were made independent of this HTTPConnection + # object since a new request requires that we open a whole new + # connection + # + # this means the prior response had one of two states: + # 1) will_close: this connection was reset and the prior socket and + # response operate independently + # 2) persistent: the response was retained and we await its + # isclosed() status to become true. + # + if self.__state != _CS_REQ_SENT or self.__response: + raise ResponseNotReady(self.__state) + + if self.debuglevel > 0: + response = self.response_class(self.sock, self.debuglevel, + method=self._method) + else: + response = self.response_class(self.sock, method=self._method) + + response.begin() + assert response.will_close != _UNKNOWN + self.__state = _CS_IDLE + + if response.will_close: + # this effectively passes the connection to the response + self.close() + else: + # remember this, so we can tell when it is complete + self.__response = response + + return response + +try: + import ssl + from ssl import SSLContext +except ImportError: + pass +else: + class HTTPSConnection(HTTPConnection): + "This class allows communication via SSL." + + default_port = HTTPS_PORT + + # XXX Should key_file and cert_file be deprecated in favour of context? + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=_strict_sentinel, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, **_3to2kwargs): + if 'check_hostname' in _3to2kwargs: check_hostname = _3to2kwargs['check_hostname']; del _3to2kwargs['check_hostname'] + else: check_hostname = None + if 'context' in _3to2kwargs: context = _3to2kwargs['context']; del _3to2kwargs['context'] + else: context = None + super(HTTPSConnection, self).__init__(host, port, strict, timeout, + source_address) + self.key_file = key_file + self.cert_file = cert_file + if context is None: + # Some reasonable defaults + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + will_verify = context.verify_mode != ssl.CERT_NONE + if check_hostname is None: + check_hostname = will_verify + elif check_hostname and not will_verify: + raise ValueError("check_hostname needs a SSL context with " + "either CERT_OPTIONAL or CERT_REQUIRED") + if key_file or cert_file: + context.load_cert_chain(cert_file, key_file) + self._context = context + self._check_hostname = check_hostname + + def connect(self): + "Connect to a host on a given (SSL) port." + + sock = socket_create_connection((self.host, self.port), + self.timeout, self.source_address) + + if self._tunnel_host: + self.sock = sock + self._tunnel() + + server_hostname = self.host if ssl.HAS_SNI else None + self.sock = self._context.wrap_socket(sock, + server_hostname=server_hostname) + try: + if self._check_hostname: + ssl.match_hostname(self.sock.getpeercert(), self.host) + except Exception: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + __all__.append("HTTPSConnection") + + + # ###################################### + # # We use the old HTTPSConnection class from Py2.7, because ssl.SSLContext + # # doesn't exist in the Py2.7 stdlib + # class HTTPSConnection(HTTPConnection): + # "This class allows communication via SSL." + + # default_port = HTTPS_PORT + + # def __init__(self, host, port=None, key_file=None, cert_file=None, + # strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + # source_address=None): + # HTTPConnection.__init__(self, host, port, strict, timeout, + # source_address) + # self.key_file = key_file + # self.cert_file = cert_file + + # def connect(self): + # "Connect to a host on a given (SSL) port." + + # sock = socket_create_connection((self.host, self.port), + # self.timeout, self.source_address) + # if self._tunnel_host: + # self.sock = sock + # self._tunnel() + # self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) + + # __all__.append("HTTPSConnection") + # ###################################### + + +class HTTPException(Exception): + # Subclasses that define an __init__ must call Exception.__init__ + # or define self.args. Otherwise, str() will fail. + pass + +class NotConnected(HTTPException): + pass + +class InvalidURL(HTTPException): + pass + +class UnknownProtocol(HTTPException): + def __init__(self, version): + self.args = version, + self.version = version + +class UnknownTransferEncoding(HTTPException): + pass + +class UnimplementedFileMode(HTTPException): + pass + +class IncompleteRead(HTTPException): + def __init__(self, partial, expected=None): + self.args = partial, + self.partial = partial + self.expected = expected + def __repr__(self): + if self.expected is not None: + e = ', %i more expected' % self.expected + else: + e = '' + return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e) + def __str__(self): + return repr(self) + +class ImproperConnectionState(HTTPException): + pass + +class CannotSendRequest(ImproperConnectionState): + pass + +class CannotSendHeader(ImproperConnectionState): + pass + +class ResponseNotReady(ImproperConnectionState): + pass + +class BadStatusLine(HTTPException): + def __init__(self, line): + if not line: + line = repr(line) + self.args = line, + self.line = line + +class LineTooLong(HTTPException): + def __init__(self, line_type): + HTTPException.__init__(self, "got more than %d bytes when reading %s" + % (_MAXLINE, line_type)) + +# for backwards compatibility +error = HTTPException diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/cookiejar.py b/minor_project/lib/python3.6/site-packages/future/backports/http/cookiejar.py new file mode 100644 index 0000000..af3ef41 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/http/cookiejar.py @@ -0,0 +1,2110 @@ +r"""HTTP cookie handling for web clients. + +This is a backport of the Py3.3 ``http.cookiejar`` module for +python-future. + +This module has (now fairly distant) origins in Gisle Aas' Perl module +HTTP::Cookies, from the libwww-perl library. + +Docstrings, comments and debug strings in this code refer to the +attributes of the HTTP cookie system as cookie-attributes, to distinguish +them clearly from Python attributes. + +Class diagram (note that BSDDBCookieJar and the MSIE* classes are not +distributed with the Python standard library, but are available from +http://wwwsearch.sf.net/): + + CookieJar____ + / \ \ + FileCookieJar \ \ + / | \ \ \ + MozillaCookieJar | LWPCookieJar \ \ + | | \ + | ---MSIEBase | \ + | / | | \ + | / MSIEDBCookieJar BSDDBCookieJar + |/ + MSIECookieJar + +""" + +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import filter, int, map, open, str +from future.utils import as_native_str, PY2 + +__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', + 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] + +import copy +import datetime +import re +if PY2: + re.ASCII = 0 +import time +from future.backports.urllib.parse import urlparse, urlsplit, quote +from future.backports.http.client import HTTP_PORT +try: + import threading as _threading +except ImportError: + import dummy_threading as _threading +from calendar import timegm + +debug = False # set to True to enable debugging via the logging module +logger = None + +def _debug(*args): + if not debug: + return + global logger + if not logger: + import logging + logger = logging.getLogger("http.cookiejar") + return logger.debug(*args) + + +DEFAULT_HTTP_PORT = str(HTTP_PORT) +MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " + "instance initialised with one)") + +def _warn_unhandled_exception(): + # There are a few catch-all except: statements in this module, for + # catching input that's bad in unexpected ways. Warn if any + # exceptions are caught there. + import io, warnings, traceback + f = io.StringIO() + traceback.print_exc(None, f) + msg = f.getvalue() + warnings.warn("http.cookiejar bug!\n%s" % msg, stacklevel=2) + + +# Date/time conversion +# ----------------------------------------------------------------------------- + +EPOCH_YEAR = 1970 +def _timegm(tt): + year, month, mday, hour, min, sec = tt[:6] + if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and + (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)): + return timegm(tt) + else: + return None + +DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] +MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", + "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] +MONTHS_LOWER = [] +for month in MONTHS: MONTHS_LOWER.append(month.lower()) + +def time2isoz(t=None): + """Return a string representing time in seconds since epoch, t. + + If the function is called without an argument, it will use the current + time. + + The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", + representing Universal Time (UTC, aka GMT). An example of this format is: + + 1994-11-24 08:49:37Z + + """ + if t is None: + dt = datetime.datetime.utcnow() + else: + dt = datetime.datetime.utcfromtimestamp(t) + return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( + dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) + +def time2netscape(t=None): + """Return a string representing time in seconds since epoch, t. + + If the function is called without an argument, it will use the current + time. + + The format of the returned string is like this: + + Wed, DD-Mon-YYYY HH:MM:SS GMT + + """ + if t is None: + dt = datetime.datetime.utcnow() + else: + dt = datetime.datetime.utcfromtimestamp(t) + return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % ( + DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], + dt.year, dt.hour, dt.minute, dt.second) + + +UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None} + +TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$", re.ASCII) +def offset_from_tz_string(tz): + offset = None + if tz in UTC_ZONES: + offset = 0 + else: + m = TIMEZONE_RE.search(tz) + if m: + offset = 3600 * int(m.group(2)) + if m.group(3): + offset = offset + 60 * int(m.group(3)) + if m.group(1) == '-': + offset = -offset + return offset + +def _str2time(day, mon, yr, hr, min, sec, tz): + # translate month name to number + # month numbers start with 1 (January) + try: + mon = MONTHS_LOWER.index(mon.lower())+1 + except ValueError: + # maybe it's already a number + try: + imon = int(mon) + except ValueError: + return None + if 1 <= imon <= 12: + mon = imon + else: + return None + + # make sure clock elements are defined + if hr is None: hr = 0 + if min is None: min = 0 + if sec is None: sec = 0 + + yr = int(yr) + day = int(day) + hr = int(hr) + min = int(min) + sec = int(sec) + + if yr < 1000: + # find "obvious" year + cur_yr = time.localtime(time.time())[0] + m = cur_yr % 100 + tmp = yr + yr = yr + cur_yr - m + m = m - tmp + if abs(m) > 50: + if m > 0: yr = yr + 100 + else: yr = yr - 100 + + # convert UTC time tuple to seconds since epoch (not timezone-adjusted) + t = _timegm((yr, mon, day, hr, min, sec, tz)) + + if t is not None: + # adjust time using timezone string, to get absolute time since epoch + if tz is None: + tz = "UTC" + tz = tz.upper() + offset = offset_from_tz_string(tz) + if offset is None: + return None + t = t - offset + + return t + +STRICT_DATE_RE = re.compile( + r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) " + "(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$", re.ASCII) +WEEKDAY_RE = re.compile( + r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I | re.ASCII) +LOOSE_HTTP_DATE_RE = re.compile( + r"""^ + (\d\d?) # day + (?:\s+|[-\/]) + (\w+) # month + (?:\s+|[-\/]) + (\d+) # year + (?: + (?:\s+|:) # separator before clock + (\d\d?):(\d\d) # hour:min + (?::(\d\d))? # optional seconds + )? # optional clock + \s* + ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone + \s* + (?:\(\w+\))? # ASCII representation of timezone in parens. + \s*$""", re.X | re.ASCII) +def http2time(text): + """Returns time in seconds since epoch of time represented by a string. + + Return value is an integer. + + None is returned if the format of str is unrecognized, the time is outside + the representable range, or the timezone string is not recognized. If the + string contains no timezone, UTC is assumed. + + The timezone in the string may be numerical (like "-0800" or "+0100") or a + string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the + timezone strings equivalent to UTC (zero offset) are known to the function. + + The function loosely parses the following formats: + + Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format + Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format + Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format + 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) + 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) + 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) + + The parser ignores leading and trailing whitespace. The time may be + absent. + + If the year is given with only 2 digits, the function will select the + century that makes the year closest to the current date. + + """ + # fast exit for strictly conforming string + m = STRICT_DATE_RE.search(text) + if m: + g = m.groups() + mon = MONTHS_LOWER.index(g[1].lower()) + 1 + tt = (int(g[2]), mon, int(g[0]), + int(g[3]), int(g[4]), float(g[5])) + return _timegm(tt) + + # No, we need some messy parsing... + + # clean up + text = text.lstrip() + text = WEEKDAY_RE.sub("", text, 1) # Useless weekday + + # tz is time zone specifier string + day, mon, yr, hr, min, sec, tz = [None]*7 + + # loose regexp parse + m = LOOSE_HTTP_DATE_RE.search(text) + if m is not None: + day, mon, yr, hr, min, sec, tz = m.groups() + else: + return None # bad format + + return _str2time(day, mon, yr, hr, min, sec, tz) + +ISO_DATE_RE = re.compile( + """^ + (\d{4}) # year + [-\/]? + (\d\d?) # numerical month + [-\/]? + (\d\d?) # day + (?: + (?:\s+|[-:Tt]) # separator before clock + (\d\d?):?(\d\d) # hour:min + (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) + )? # optional clock + \s* + ([-+]?\d\d?:?(:?\d\d)? + |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) + \s*$""", re.X | re. ASCII) +def iso2time(text): + """ + As for http2time, but parses the ISO 8601 formats: + + 1994-02-03 14:15:29 -0100 -- ISO 8601 format + 1994-02-03 14:15:29 -- zone is optional + 1994-02-03 -- only date + 1994-02-03T14:15:29 -- Use T as separator + 19940203T141529Z -- ISO 8601 compact format + 19940203 -- only date + + """ + # clean up + text = text.lstrip() + + # tz is time zone specifier string + day, mon, yr, hr, min, sec, tz = [None]*7 + + # loose regexp parse + m = ISO_DATE_RE.search(text) + if m is not None: + # XXX there's an extra bit of the timezone I'm ignoring here: is + # this the right thing to do? + yr, mon, day, hr, min, sec, tz, _ = m.groups() + else: + return None # bad format + + return _str2time(day, mon, yr, hr, min, sec, tz) + + +# Header parsing +# ----------------------------------------------------------------------------- + +def unmatched(match): + """Return unmatched part of re.Match object.""" + start, end = match.span(0) + return match.string[:start]+match.string[end:] + +HEADER_TOKEN_RE = re.compile(r"^\s*([^=\s;,]+)") +HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"") +HEADER_VALUE_RE = re.compile(r"^\s*=\s*([^\s;,]*)") +HEADER_ESCAPE_RE = re.compile(r"\\(.)") +def split_header_words(header_values): + r"""Parse header values into a list of lists containing key,value pairs. + + The function knows how to deal with ",", ";" and "=" as well as quoted + values after "=". A list of space separated tokens are parsed as if they + were separated by ";". + + If the header_values passed as argument contains multiple values, then they + are treated as if they were a single value separated by comma ",". + + This means that this function is useful for parsing header fields that + follow this syntax (BNF as from the HTTP/1.1 specification, but we relax + the requirement for tokens). + + headers = #header + header = (token | parameter) *( [";"] (token | parameter)) + + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + + quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) + qdtext = > + quoted-pair = "\" CHAR + + parameter = attribute "=" value + attribute = token + value = token | quoted-string + + Each header is represented by a list of key/value pairs. The value for a + simple token (not part of a parameter) is None. Syntactically incorrect + headers will not necessarily be parsed as you would want. + + This is easier to describe with some examples: + + >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) + [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] + >>> split_header_words(['text/html; charset="iso-8859-1"']) + [[('text/html', None), ('charset', 'iso-8859-1')]] + >>> split_header_words([r'Basic realm="\"foo\bar\""']) + [[('Basic', None), ('realm', '"foobar"')]] + + """ + assert not isinstance(header_values, str) + result = [] + for text in header_values: + orig_text = text + pairs = [] + while text: + m = HEADER_TOKEN_RE.search(text) + if m: + text = unmatched(m) + name = m.group(1) + m = HEADER_QUOTED_VALUE_RE.search(text) + if m: # quoted value + text = unmatched(m) + value = m.group(1) + value = HEADER_ESCAPE_RE.sub(r"\1", value) + else: + m = HEADER_VALUE_RE.search(text) + if m: # unquoted value + text = unmatched(m) + value = m.group(1) + value = value.rstrip() + else: + # no value, a lone token + value = None + pairs.append((name, value)) + elif text.lstrip().startswith(","): + # concatenated headers, as per RFC 2616 section 4.2 + text = text.lstrip()[1:] + if pairs: result.append(pairs) + pairs = [] + else: + # skip junk + non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text) + assert nr_junk_chars > 0, ( + "split_header_words bug: '%s', '%s', %s" % + (orig_text, text, pairs)) + text = non_junk + if pairs: result.append(pairs) + return result + +HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])") +def join_header_words(lists): + """Do the inverse (almost) of the conversion done by split_header_words. + + Takes a list of lists of (key, value) pairs and produces a single header + value. Attribute values are quoted if needed. + + >>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]]) + 'text/plain; charset="iso-8859/1"' + >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]]) + 'text/plain, charset="iso-8859/1"' + + """ + headers = [] + for pairs in lists: + attr = [] + for k, v in pairs: + if v is not None: + if not re.search(r"^\w+$", v): + v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v) # escape " and \ + v = '"%s"' % v + k = "%s=%s" % (k, v) + attr.append(k) + if attr: headers.append("; ".join(attr)) + return ", ".join(headers) + +def strip_quotes(text): + if text.startswith('"'): + text = text[1:] + if text.endswith('"'): + text = text[:-1] + return text + +def parse_ns_headers(ns_headers): + """Ad-hoc parser for Netscape protocol cookie-attributes. + + The old Netscape cookie format for Set-Cookie can for instance contain + an unquoted "," in the expires field, so we have to use this ad-hoc + parser instead of split_header_words. + + XXX This may not make the best possible effort to parse all the crap + that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient + parser is probably better, so could do worse than following that if + this ever gives any trouble. + + Currently, this is also used for parsing RFC 2109 cookies. + + """ + known_attrs = ("expires", "domain", "path", "secure", + # RFC 2109 attrs (may turn up in Netscape cookies, too) + "version", "port", "max-age") + + result = [] + for ns_header in ns_headers: + pairs = [] + version_set = False + for ii, param in enumerate(re.split(r";\s*", ns_header)): + param = param.rstrip() + if param == "": continue + if "=" not in param: + k, v = param, None + else: + k, v = re.split(r"\s*=\s*", param, 1) + k = k.lstrip() + if ii != 0: + lc = k.lower() + if lc in known_attrs: + k = lc + if k == "version": + # This is an RFC 2109 cookie. + v = strip_quotes(v) + version_set = True + if k == "expires": + # convert expires date to seconds since epoch + v = http2time(strip_quotes(v)) # None if invalid + pairs.append((k, v)) + + if pairs: + if not version_set: + pairs.append(("version", "0")) + result.append(pairs) + + return result + + +IPV4_RE = re.compile(r"\.\d+$", re.ASCII) +def is_HDN(text): + """Return True if text is a host domain name.""" + # XXX + # This may well be wrong. Which RFC is HDN defined in, if any (for + # the purposes of RFC 2965)? + # For the current implementation, what about IPv6? Remember to look + # at other uses of IPV4_RE also, if change this. + if IPV4_RE.search(text): + return False + if text == "": + return False + if text[0] == "." or text[-1] == ".": + return False + return True + +def domain_match(A, B): + """Return True if domain A domain-matches domain B, according to RFC 2965. + + A and B may be host domain names or IP addresses. + + RFC 2965, section 1: + + Host names can be specified either as an IP address or a HDN string. + Sometimes we compare one host name with another. (Such comparisons SHALL + be case-insensitive.) Host A's name domain-matches host B's if + + * their host name strings string-compare equal; or + + * A is a HDN string and has the form NB, where N is a non-empty + name string, B has the form .B', and B' is a HDN string. (So, + x.y.com domain-matches .Y.com but not Y.com.) + + Note that domain-match is not a commutative operation: a.b.c.com + domain-matches .c.com, but not the reverse. + + """ + # Note that, if A or B are IP addresses, the only relevant part of the + # definition of the domain-match algorithm is the direct string-compare. + A = A.lower() + B = B.lower() + if A == B: + return True + if not is_HDN(A): + return False + i = A.rfind(B) + if i == -1 or i == 0: + # A does not have form NB, or N is the empty string + return False + if not B.startswith("."): + return False + if not is_HDN(B[1:]): + return False + return True + +def liberal_is_HDN(text): + """Return True if text is a sort-of-like a host domain name. + + For accepting/blocking domains. + + """ + if IPV4_RE.search(text): + return False + return True + +def user_domain_match(A, B): + """For blocking/accepting domains. + + A and B may be host domain names or IP addresses. + + """ + A = A.lower() + B = B.lower() + if not (liberal_is_HDN(A) and liberal_is_HDN(B)): + if A == B: + # equal IP addresses + return True + return False + initial_dot = B.startswith(".") + if initial_dot and A.endswith(B): + return True + if not initial_dot and A == B: + return True + return False + +cut_port_re = re.compile(r":\d+$", re.ASCII) +def request_host(request): + """Return request-host, as defined by RFC 2965. + + Variation from RFC: returned value is lowercased, for convenient + comparison. + + """ + url = request.get_full_url() + host = urlparse(url)[1] + if host == "": + host = request.get_header("Host", "") + + # remove port, if present + host = cut_port_re.sub("", host, 1) + return host.lower() + +def eff_request_host(request): + """Return a tuple (request-host, effective request-host name). + + As defined by RFC 2965, except both are lowercased. + + """ + erhn = req_host = request_host(request) + if req_host.find(".") == -1 and not IPV4_RE.search(req_host): + erhn = req_host + ".local" + return req_host, erhn + +def request_path(request): + """Path component of request-URI, as defined by RFC 2965.""" + url = request.get_full_url() + parts = urlsplit(url) + path = escape_path(parts.path) + if not path.startswith("/"): + # fix bad RFC 2396 absoluteURI + path = "/" + path + return path + +def request_port(request): + host = request.host + i = host.find(':') + if i >= 0: + port = host[i+1:] + try: + int(port) + except ValueError: + _debug("nonnumeric port: '%s'", port) + return None + else: + port = DEFAULT_HTTP_PORT + return port + +# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't +# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738). +HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()" +ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])") +def uppercase_escaped_char(match): + return "%%%s" % match.group(1).upper() +def escape_path(path): + """Escape any invalid characters in HTTP URL, and uppercase all escapes.""" + # There's no knowing what character encoding was used to create URLs + # containing %-escapes, but since we have to pick one to escape invalid + # path characters, we pick UTF-8, as recommended in the HTML 4.0 + # specification: + # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1 + # And here, kind of: draft-fielding-uri-rfc2396bis-03 + # (And in draft IRI specification: draft-duerst-iri-05) + # (And here, for new URI schemes: RFC 2718) + path = quote(path, HTTP_PATH_SAFE) + path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path) + return path + +def reach(h): + """Return reach of host h, as defined by RFC 2965, section 1. + + The reach R of a host name H is defined as follows: + + * If + + - H is the host domain name of a host; and, + + - H has the form A.B; and + + - A has no embedded (that is, interior) dots; and + + - B has at least one embedded dot, or B is the string "local". + then the reach of H is .B. + + * Otherwise, the reach of H is H. + + >>> reach("www.acme.com") + '.acme.com' + >>> reach("acme.com") + 'acme.com' + >>> reach("acme.local") + '.local' + + """ + i = h.find(".") + if i >= 0: + #a = h[:i] # this line is only here to show what a is + b = h[i+1:] + i = b.find(".") + if is_HDN(h) and (i >= 0 or b == "local"): + return "."+b + return h + +def is_third_party(request): + """ + + RFC 2965, section 3.3.6: + + An unverifiable transaction is to a third-party host if its request- + host U does not domain-match the reach R of the request-host O in the + origin transaction. + + """ + req_host = request_host(request) + if not domain_match(req_host, reach(request.get_origin_req_host())): + return True + else: + return False + + +class Cookie(object): + """HTTP Cookie. + + This class represents both Netscape and RFC 2965 cookies. + + This is deliberately a very simple class. It just holds attributes. It's + possible to construct Cookie instances that don't comply with the cookie + standards. CookieJar.make_cookies is the factory function for Cookie + objects -- it deals with cookie parsing, supplying defaults, and + normalising to the representation used in this class. CookiePolicy is + responsible for checking them to see whether they should be accepted from + and returned to the server. + + Note that the port may be present in the headers, but unspecified ("Port" + rather than"Port=80", for example); if this is the case, port is None. + + """ + + def __init__(self, version, name, value, + port, port_specified, + domain, domain_specified, domain_initial_dot, + path, path_specified, + secure, + expires, + discard, + comment, + comment_url, + rest, + rfc2109=False, + ): + + if version is not None: version = int(version) + if expires is not None: expires = int(expires) + if port is None and port_specified is True: + raise ValueError("if port is None, port_specified must be false") + + self.version = version + self.name = name + self.value = value + self.port = port + self.port_specified = port_specified + # normalise case, as per RFC 2965 section 3.3.3 + self.domain = domain.lower() + self.domain_specified = domain_specified + # Sigh. We need to know whether the domain given in the + # cookie-attribute had an initial dot, in order to follow RFC 2965 + # (as clarified in draft errata). Needed for the returned $Domain + # value. + self.domain_initial_dot = domain_initial_dot + self.path = path + self.path_specified = path_specified + self.secure = secure + self.expires = expires + self.discard = discard + self.comment = comment + self.comment_url = comment_url + self.rfc2109 = rfc2109 + + self._rest = copy.copy(rest) + + def has_nonstandard_attr(self, name): + return name in self._rest + def get_nonstandard_attr(self, name, default=None): + return self._rest.get(name, default) + def set_nonstandard_attr(self, name, value): + self._rest[name] = value + + def is_expired(self, now=None): + if now is None: now = time.time() + if (self.expires is not None) and (self.expires <= now): + return True + return False + + def __str__(self): + if self.port is None: p = "" + else: p = ":"+self.port + limit = self.domain + p + self.path + if self.value is not None: + namevalue = "%s=%s" % (self.name, self.value) + else: + namevalue = self.name + return "" % (namevalue, limit) + + @as_native_str() + def __repr__(self): + args = [] + for name in ("version", "name", "value", + "port", "port_specified", + "domain", "domain_specified", "domain_initial_dot", + "path", "path_specified", + "secure", "expires", "discard", "comment", "comment_url", + ): + attr = getattr(self, name) + ### Python-Future: + # Avoid u'...' prefixes for unicode strings: + if isinstance(attr, str): + attr = str(attr) + ### + args.append(str("%s=%s") % (name, repr(attr))) + args.append("rest=%s" % repr(self._rest)) + args.append("rfc2109=%s" % repr(self.rfc2109)) + return "Cookie(%s)" % ", ".join(args) + + +class CookiePolicy(object): + """Defines which cookies get accepted from and returned to server. + + May also modify cookies, though this is probably a bad idea. + + The subclass DefaultCookiePolicy defines the standard rules for Netscape + and RFC 2965 cookies -- override that if you want a customised policy. + + """ + def set_ok(self, cookie, request): + """Return true if (and only if) cookie should be accepted from server. + + Currently, pre-expired cookies never get this far -- the CookieJar + class deletes such cookies itself. + + """ + raise NotImplementedError() + + def return_ok(self, cookie, request): + """Return true if (and only if) cookie should be returned to server.""" + raise NotImplementedError() + + def domain_return_ok(self, domain, request): + """Return false if cookies should not be returned, given cookie domain. + """ + return True + + def path_return_ok(self, path, request): + """Return false if cookies should not be returned, given cookie path. + """ + return True + + +class DefaultCookiePolicy(CookiePolicy): + """Implements the standard rules for accepting and returning cookies.""" + + DomainStrictNoDots = 1 + DomainStrictNonDomain = 2 + DomainRFC2965Match = 4 + + DomainLiberal = 0 + DomainStrict = DomainStrictNoDots|DomainStrictNonDomain + + def __init__(self, + blocked_domains=None, allowed_domains=None, + netscape=True, rfc2965=False, + rfc2109_as_netscape=None, + hide_cookie2=False, + strict_domain=False, + strict_rfc2965_unverifiable=True, + strict_ns_unverifiable=False, + strict_ns_domain=DomainLiberal, + strict_ns_set_initial_dollar=False, + strict_ns_set_path=False, + ): + """Constructor arguments should be passed as keyword arguments only.""" + self.netscape = netscape + self.rfc2965 = rfc2965 + self.rfc2109_as_netscape = rfc2109_as_netscape + self.hide_cookie2 = hide_cookie2 + self.strict_domain = strict_domain + self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable + self.strict_ns_unverifiable = strict_ns_unverifiable + self.strict_ns_domain = strict_ns_domain + self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar + self.strict_ns_set_path = strict_ns_set_path + + if blocked_domains is not None: + self._blocked_domains = tuple(blocked_domains) + else: + self._blocked_domains = () + + if allowed_domains is not None: + allowed_domains = tuple(allowed_domains) + self._allowed_domains = allowed_domains + + def blocked_domains(self): + """Return the sequence of blocked domains (as a tuple).""" + return self._blocked_domains + def set_blocked_domains(self, blocked_domains): + """Set the sequence of blocked domains.""" + self._blocked_domains = tuple(blocked_domains) + + def is_blocked(self, domain): + for blocked_domain in self._blocked_domains: + if user_domain_match(domain, blocked_domain): + return True + return False + + def allowed_domains(self): + """Return None, or the sequence of allowed domains (as a tuple).""" + return self._allowed_domains + def set_allowed_domains(self, allowed_domains): + """Set the sequence of allowed domains, or None.""" + if allowed_domains is not None: + allowed_domains = tuple(allowed_domains) + self._allowed_domains = allowed_domains + + def is_not_allowed(self, domain): + if self._allowed_domains is None: + return False + for allowed_domain in self._allowed_domains: + if user_domain_match(domain, allowed_domain): + return False + return True + + def set_ok(self, cookie, request): + """ + If you override .set_ok(), be sure to call this method. If it returns + false, so should your subclass (assuming your subclass wants to be more + strict about which cookies to accept). + + """ + _debug(" - checking cookie %s=%s", cookie.name, cookie.value) + + assert cookie.name is not None + + for n in "version", "verifiability", "name", "path", "domain", "port": + fn_name = "set_ok_"+n + fn = getattr(self, fn_name) + if not fn(cookie, request): + return False + + return True + + def set_ok_version(self, cookie, request): + if cookie.version is None: + # Version is always set to 0 by parse_ns_headers if it's a Netscape + # cookie, so this must be an invalid RFC 2965 cookie. + _debug(" Set-Cookie2 without version attribute (%s=%s)", + cookie.name, cookie.value) + return False + if cookie.version > 0 and not self.rfc2965: + _debug(" RFC 2965 cookies are switched off") + return False + elif cookie.version == 0 and not self.netscape: + _debug(" Netscape cookies are switched off") + return False + return True + + def set_ok_verifiability(self, cookie, request): + if request.unverifiable and is_third_party(request): + if cookie.version > 0 and self.strict_rfc2965_unverifiable: + _debug(" third-party RFC 2965 cookie during " + "unverifiable transaction") + return False + elif cookie.version == 0 and self.strict_ns_unverifiable: + _debug(" third-party Netscape cookie during " + "unverifiable transaction") + return False + return True + + def set_ok_name(self, cookie, request): + # Try and stop servers setting V0 cookies designed to hack other + # servers that know both V0 and V1 protocols. + if (cookie.version == 0 and self.strict_ns_set_initial_dollar and + cookie.name.startswith("$")): + _debug(" illegal name (starts with '$'): '%s'", cookie.name) + return False + return True + + def set_ok_path(self, cookie, request): + if cookie.path_specified: + req_path = request_path(request) + if ((cookie.version > 0 or + (cookie.version == 0 and self.strict_ns_set_path)) and + not req_path.startswith(cookie.path)): + _debug(" path attribute %s is not a prefix of request " + "path %s", cookie.path, req_path) + return False + return True + + def set_ok_domain(self, cookie, request): + if self.is_blocked(cookie.domain): + _debug(" domain %s is in user block-list", cookie.domain) + return False + if self.is_not_allowed(cookie.domain): + _debug(" domain %s is not in user allow-list", cookie.domain) + return False + if cookie.domain_specified: + req_host, erhn = eff_request_host(request) + domain = cookie.domain + if self.strict_domain and (domain.count(".") >= 2): + # XXX This should probably be compared with the Konqueror + # (kcookiejar.cpp) and Mozilla implementations, but it's a + # losing battle. + i = domain.rfind(".") + j = domain.rfind(".", 0, i) + if j == 0: # domain like .foo.bar + tld = domain[i+1:] + sld = domain[j+1:i] + if sld.lower() in ("co", "ac", "com", "edu", "org", "net", + "gov", "mil", "int", "aero", "biz", "cat", "coop", + "info", "jobs", "mobi", "museum", "name", "pro", + "travel", "eu") and len(tld) == 2: + # domain like .co.uk + _debug(" country-code second level domain %s", domain) + return False + if domain.startswith("."): + undotted_domain = domain[1:] + else: + undotted_domain = domain + embedded_dots = (undotted_domain.find(".") >= 0) + if not embedded_dots and domain != ".local": + _debug(" non-local domain %s contains no embedded dot", + domain) + return False + if cookie.version == 0: + if (not erhn.endswith(domain) and + (not erhn.startswith(".") and + not ("."+erhn).endswith(domain))): + _debug(" effective request-host %s (even with added " + "initial dot) does not end with %s", + erhn, domain) + return False + if (cookie.version > 0 or + (self.strict_ns_domain & self.DomainRFC2965Match)): + if not domain_match(erhn, domain): + _debug(" effective request-host %s does not domain-match " + "%s", erhn, domain) + return False + if (cookie.version > 0 or + (self.strict_ns_domain & self.DomainStrictNoDots)): + host_prefix = req_host[:-len(domain)] + if (host_prefix.find(".") >= 0 and + not IPV4_RE.search(req_host)): + _debug(" host prefix %s for domain %s contains a dot", + host_prefix, domain) + return False + return True + + def set_ok_port(self, cookie, request): + if cookie.port_specified: + req_port = request_port(request) + if req_port is None: + req_port = "80" + else: + req_port = str(req_port) + for p in cookie.port.split(","): + try: + int(p) + except ValueError: + _debug(" bad port %s (not numeric)", p) + return False + if p == req_port: + break + else: + _debug(" request port (%s) not found in %s", + req_port, cookie.port) + return False + return True + + def return_ok(self, cookie, request): + """ + If you override .return_ok(), be sure to call this method. If it + returns false, so should your subclass (assuming your subclass wants to + be more strict about which cookies to return). + + """ + # Path has already been checked by .path_return_ok(), and domain + # blocking done by .domain_return_ok(). + _debug(" - checking cookie %s=%s", cookie.name, cookie.value) + + for n in "version", "verifiability", "secure", "expires", "port", "domain": + fn_name = "return_ok_"+n + fn = getattr(self, fn_name) + if not fn(cookie, request): + return False + return True + + def return_ok_version(self, cookie, request): + if cookie.version > 0 and not self.rfc2965: + _debug(" RFC 2965 cookies are switched off") + return False + elif cookie.version == 0 and not self.netscape: + _debug(" Netscape cookies are switched off") + return False + return True + + def return_ok_verifiability(self, cookie, request): + if request.unverifiable and is_third_party(request): + if cookie.version > 0 and self.strict_rfc2965_unverifiable: + _debug(" third-party RFC 2965 cookie during unverifiable " + "transaction") + return False + elif cookie.version == 0 and self.strict_ns_unverifiable: + _debug(" third-party Netscape cookie during unverifiable " + "transaction") + return False + return True + + def return_ok_secure(self, cookie, request): + if cookie.secure and request.type != "https": + _debug(" secure cookie with non-secure request") + return False + return True + + def return_ok_expires(self, cookie, request): + if cookie.is_expired(self._now): + _debug(" cookie expired") + return False + return True + + def return_ok_port(self, cookie, request): + if cookie.port: + req_port = request_port(request) + if req_port is None: + req_port = "80" + for p in cookie.port.split(","): + if p == req_port: + break + else: + _debug(" request port %s does not match cookie port %s", + req_port, cookie.port) + return False + return True + + def return_ok_domain(self, cookie, request): + req_host, erhn = eff_request_host(request) + domain = cookie.domain + + # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't + if (cookie.version == 0 and + (self.strict_ns_domain & self.DomainStrictNonDomain) and + not cookie.domain_specified and domain != erhn): + _debug(" cookie with unspecified domain does not string-compare " + "equal to request domain") + return False + + if cookie.version > 0 and not domain_match(erhn, domain): + _debug(" effective request-host name %s does not domain-match " + "RFC 2965 cookie domain %s", erhn, domain) + return False + if cookie.version == 0 and not ("."+erhn).endswith(domain): + _debug(" request-host %s does not match Netscape cookie domain " + "%s", req_host, domain) + return False + return True + + def domain_return_ok(self, domain, request): + # Liberal check of. This is here as an optimization to avoid + # having to load lots of MSIE cookie files unless necessary. + req_host, erhn = eff_request_host(request) + if not req_host.startswith("."): + req_host = "."+req_host + if not erhn.startswith("."): + erhn = "."+erhn + if not (req_host.endswith(domain) or erhn.endswith(domain)): + #_debug(" request domain %s does not match cookie domain %s", + # req_host, domain) + return False + + if self.is_blocked(domain): + _debug(" domain %s is in user block-list", domain) + return False + if self.is_not_allowed(domain): + _debug(" domain %s is not in user allow-list", domain) + return False + + return True + + def path_return_ok(self, path, request): + _debug("- checking cookie path=%s", path) + req_path = request_path(request) + if not req_path.startswith(path): + _debug(" %s does not path-match %s", req_path, path) + return False + return True + + +def vals_sorted_by_key(adict): + keys = sorted(adict.keys()) + return map(adict.get, keys) + +def deepvalues(mapping): + """Iterates over nested mapping, depth-first, in sorted order by key.""" + values = vals_sorted_by_key(mapping) + for obj in values: + mapping = False + try: + obj.items + except AttributeError: + pass + else: + mapping = True + for subobj in deepvalues(obj): + yield subobj + if not mapping: + yield obj + + +# Used as second parameter to dict.get() method, to distinguish absent +# dict key from one with a None value. +class Absent(object): pass + +class CookieJar(object): + """Collection of HTTP cookies. + + You may not need to know about this class: try + urllib.request.build_opener(HTTPCookieProcessor).open(url). + """ + + non_word_re = re.compile(r"\W") + quote_re = re.compile(r"([\"\\])") + strict_domain_re = re.compile(r"\.?[^.]*") + domain_re = re.compile(r"[^.]*") + dots_re = re.compile(r"^\.+") + + magic_re = re.compile(r"^\#LWP-Cookies-(\d+\.\d+)", re.ASCII) + + def __init__(self, policy=None): + if policy is None: + policy = DefaultCookiePolicy() + self._policy = policy + + self._cookies_lock = _threading.RLock() + self._cookies = {} + + def set_policy(self, policy): + self._policy = policy + + def _cookies_for_domain(self, domain, request): + cookies = [] + if not self._policy.domain_return_ok(domain, request): + return [] + _debug("Checking %s for cookies to return", domain) + cookies_by_path = self._cookies[domain] + for path in cookies_by_path.keys(): + if not self._policy.path_return_ok(path, request): + continue + cookies_by_name = cookies_by_path[path] + for cookie in cookies_by_name.values(): + if not self._policy.return_ok(cookie, request): + _debug(" not returning cookie") + continue + _debug(" it's a match") + cookies.append(cookie) + return cookies + + def _cookies_for_request(self, request): + """Return a list of cookies to be returned to server.""" + cookies = [] + for domain in self._cookies.keys(): + cookies.extend(self._cookies_for_domain(domain, request)) + return cookies + + def _cookie_attrs(self, cookies): + """Return a list of cookie-attributes to be returned to server. + + like ['foo="bar"; $Path="/"', ...] + + The $Version attribute is also added when appropriate (currently only + once per request). + + """ + # add cookies in order of most specific (ie. longest) path first + cookies.sort(key=lambda a: len(a.path), reverse=True) + + version_set = False + + attrs = [] + for cookie in cookies: + # set version of Cookie header + # XXX + # What should it be if multiple matching Set-Cookie headers have + # different versions themselves? + # Answer: there is no answer; was supposed to be settled by + # RFC 2965 errata, but that may never appear... + version = cookie.version + if not version_set: + version_set = True + if version > 0: + attrs.append("$Version=%s" % version) + + # quote cookie value if necessary + # (not for Netscape protocol, which already has any quotes + # intact, due to the poorly-specified Netscape Cookie: syntax) + if ((cookie.value is not None) and + self.non_word_re.search(cookie.value) and version > 0): + value = self.quote_re.sub(r"\\\1", cookie.value) + else: + value = cookie.value + + # add cookie-attributes to be returned in Cookie header + if cookie.value is None: + attrs.append(cookie.name) + else: + attrs.append("%s=%s" % (cookie.name, value)) + if version > 0: + if cookie.path_specified: + attrs.append('$Path="%s"' % cookie.path) + if cookie.domain.startswith("."): + domain = cookie.domain + if (not cookie.domain_initial_dot and + domain.startswith(".")): + domain = domain[1:] + attrs.append('$Domain="%s"' % domain) + if cookie.port is not None: + p = "$Port" + if cookie.port_specified: + p = p + ('="%s"' % cookie.port) + attrs.append(p) + + return attrs + + def add_cookie_header(self, request): + """Add correct Cookie: header to request (urllib.request.Request object). + + The Cookie2 header is also added unless policy.hide_cookie2 is true. + + """ + _debug("add_cookie_header") + self._cookies_lock.acquire() + try: + + self._policy._now = self._now = int(time.time()) + + cookies = self._cookies_for_request(request) + + attrs = self._cookie_attrs(cookies) + if attrs: + if not request.has_header("Cookie"): + request.add_unredirected_header( + "Cookie", "; ".join(attrs)) + + # if necessary, advertise that we know RFC 2965 + if (self._policy.rfc2965 and not self._policy.hide_cookie2 and + not request.has_header("Cookie2")): + for cookie in cookies: + if cookie.version != 1: + request.add_unredirected_header("Cookie2", '$Version="1"') + break + + finally: + self._cookies_lock.release() + + self.clear_expired_cookies() + + def _normalized_cookie_tuples(self, attrs_set): + """Return list of tuples containing normalised cookie information. + + attrs_set is the list of lists of key,value pairs extracted from + the Set-Cookie or Set-Cookie2 headers. + + Tuples are name, value, standard, rest, where name and value are the + cookie name and value, standard is a dictionary containing the standard + cookie-attributes (discard, secure, version, expires or max-age, + domain, path and port) and rest is a dictionary containing the rest of + the cookie-attributes. + + """ + cookie_tuples = [] + + boolean_attrs = "discard", "secure" + value_attrs = ("version", + "expires", "max-age", + "domain", "path", "port", + "comment", "commenturl") + + for cookie_attrs in attrs_set: + name, value = cookie_attrs[0] + + # Build dictionary of standard cookie-attributes (standard) and + # dictionary of other cookie-attributes (rest). + + # Note: expiry time is normalised to seconds since epoch. V0 + # cookies should have the Expires cookie-attribute, and V1 cookies + # should have Max-Age, but since V1 includes RFC 2109 cookies (and + # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we + # accept either (but prefer Max-Age). + max_age_set = False + + bad_cookie = False + + standard = {} + rest = {} + for k, v in cookie_attrs[1:]: + lc = k.lower() + # don't lose case distinction for unknown fields + if lc in value_attrs or lc in boolean_attrs: + k = lc + if k in boolean_attrs and v is None: + # boolean cookie-attribute is present, but has no value + # (like "discard", rather than "port=80") + v = True + if k in standard: + # only first value is significant + continue + if k == "domain": + if v is None: + _debug(" missing value for domain attribute") + bad_cookie = True + break + # RFC 2965 section 3.3.3 + v = v.lower() + if k == "expires": + if max_age_set: + # Prefer max-age to expires (like Mozilla) + continue + if v is None: + _debug(" missing or invalid value for expires " + "attribute: treating as session cookie") + continue + if k == "max-age": + max_age_set = True + try: + v = int(v) + except ValueError: + _debug(" missing or invalid (non-numeric) value for " + "max-age attribute") + bad_cookie = True + break + # convert RFC 2965 Max-Age to seconds since epoch + # XXX Strictly you're supposed to follow RFC 2616 + # age-calculation rules. Remember that zero Max-Age is a + # is a request to discard (old and new) cookie, though. + k = "expires" + v = self._now + v + if (k in value_attrs) or (k in boolean_attrs): + if (v is None and + k not in ("port", "comment", "commenturl")): + _debug(" missing value for %s attribute" % k) + bad_cookie = True + break + standard[k] = v + else: + rest[k] = v + + if bad_cookie: + continue + + cookie_tuples.append((name, value, standard, rest)) + + return cookie_tuples + + def _cookie_from_cookie_tuple(self, tup, request): + # standard is dict of standard cookie-attributes, rest is dict of the + # rest of them + name, value, standard, rest = tup + + domain = standard.get("domain", Absent) + path = standard.get("path", Absent) + port = standard.get("port", Absent) + expires = standard.get("expires", Absent) + + # set the easy defaults + version = standard.get("version", None) + if version is not None: + try: + version = int(version) + except ValueError: + return None # invalid version, ignore cookie + secure = standard.get("secure", False) + # (discard is also set if expires is Absent) + discard = standard.get("discard", False) + comment = standard.get("comment", None) + comment_url = standard.get("commenturl", None) + + # set default path + if path is not Absent and path != "": + path_specified = True + path = escape_path(path) + else: + path_specified = False + path = request_path(request) + i = path.rfind("/") + if i != -1: + if version == 0: + # Netscape spec parts company from reality here + path = path[:i] + else: + path = path[:i+1] + if len(path) == 0: path = "/" + + # set default domain + domain_specified = domain is not Absent + # but first we have to remember whether it starts with a dot + domain_initial_dot = False + if domain_specified: + domain_initial_dot = bool(domain.startswith(".")) + if domain is Absent: + req_host, erhn = eff_request_host(request) + domain = erhn + elif not domain.startswith("."): + domain = "."+domain + + # set default port + port_specified = False + if port is not Absent: + if port is None: + # Port attr present, but has no value: default to request port. + # Cookie should then only be sent back on that port. + port = request_port(request) + else: + port_specified = True + port = re.sub(r"\s+", "", port) + else: + # No port attr present. Cookie can be sent back on any port. + port = None + + # set default expires and discard + if expires is Absent: + expires = None + discard = True + elif expires <= self._now: + # Expiry date in past is request to delete cookie. This can't be + # in DefaultCookiePolicy, because can't delete cookies there. + try: + self.clear(domain, path, name) + except KeyError: + pass + _debug("Expiring cookie, domain='%s', path='%s', name='%s'", + domain, path, name) + return None + + return Cookie(version, + name, value, + port, port_specified, + domain, domain_specified, domain_initial_dot, + path, path_specified, + secure, + expires, + discard, + comment, + comment_url, + rest) + + def _cookies_from_attrs_set(self, attrs_set, request): + cookie_tuples = self._normalized_cookie_tuples(attrs_set) + + cookies = [] + for tup in cookie_tuples: + cookie = self._cookie_from_cookie_tuple(tup, request) + if cookie: cookies.append(cookie) + return cookies + + def _process_rfc2109_cookies(self, cookies): + rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None) + if rfc2109_as_ns is None: + rfc2109_as_ns = not self._policy.rfc2965 + for cookie in cookies: + if cookie.version == 1: + cookie.rfc2109 = True + if rfc2109_as_ns: + # treat 2109 cookies as Netscape cookies rather than + # as RFC2965 cookies + cookie.version = 0 + + def make_cookies(self, response, request): + """Return sequence of Cookie objects extracted from response object.""" + # get cookie-attributes for RFC 2965 and Netscape protocols + headers = response.info() + rfc2965_hdrs = headers.get_all("Set-Cookie2", []) + ns_hdrs = headers.get_all("Set-Cookie", []) + + rfc2965 = self._policy.rfc2965 + netscape = self._policy.netscape + + if ((not rfc2965_hdrs and not ns_hdrs) or + (not ns_hdrs and not rfc2965) or + (not rfc2965_hdrs and not netscape) or + (not netscape and not rfc2965)): + return [] # no relevant cookie headers: quick exit + + try: + cookies = self._cookies_from_attrs_set( + split_header_words(rfc2965_hdrs), request) + except Exception: + _warn_unhandled_exception() + cookies = [] + + if ns_hdrs and netscape: + try: + # RFC 2109 and Netscape cookies + ns_cookies = self._cookies_from_attrs_set( + parse_ns_headers(ns_hdrs), request) + except Exception: + _warn_unhandled_exception() + ns_cookies = [] + self._process_rfc2109_cookies(ns_cookies) + + # Look for Netscape cookies (from Set-Cookie headers) that match + # corresponding RFC 2965 cookies (from Set-Cookie2 headers). + # For each match, keep the RFC 2965 cookie and ignore the Netscape + # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are + # bundled in with the Netscape cookies for this purpose, which is + # reasonable behaviour. + if rfc2965: + lookup = {} + for cookie in cookies: + lookup[(cookie.domain, cookie.path, cookie.name)] = None + + def no_matching_rfc2965(ns_cookie, lookup=lookup): + key = ns_cookie.domain, ns_cookie.path, ns_cookie.name + return key not in lookup + ns_cookies = filter(no_matching_rfc2965, ns_cookies) + + if ns_cookies: + cookies.extend(ns_cookies) + + return cookies + + def set_cookie_if_ok(self, cookie, request): + """Set a cookie if policy says it's OK to do so.""" + self._cookies_lock.acquire() + try: + self._policy._now = self._now = int(time.time()) + + if self._policy.set_ok(cookie, request): + self.set_cookie(cookie) + + + finally: + self._cookies_lock.release() + + def set_cookie(self, cookie): + """Set a cookie, without checking whether or not it should be set.""" + c = self._cookies + self._cookies_lock.acquire() + try: + if cookie.domain not in c: c[cookie.domain] = {} + c2 = c[cookie.domain] + if cookie.path not in c2: c2[cookie.path] = {} + c3 = c2[cookie.path] + c3[cookie.name] = cookie + finally: + self._cookies_lock.release() + + def extract_cookies(self, response, request): + """Extract cookies from response, where allowable given the request.""" + _debug("extract_cookies: %s", response.info()) + self._cookies_lock.acquire() + try: + self._policy._now = self._now = int(time.time()) + + for cookie in self.make_cookies(response, request): + if self._policy.set_ok(cookie, request): + _debug(" setting cookie: %s", cookie) + self.set_cookie(cookie) + finally: + self._cookies_lock.release() + + def clear(self, domain=None, path=None, name=None): + """Clear some cookies. + + Invoking this method without arguments will clear all cookies. If + given a single argument, only cookies belonging to that domain will be + removed. If given two arguments, cookies belonging to the specified + path within that domain are removed. If given three arguments, then + the cookie with the specified name, path and domain is removed. + + Raises KeyError if no matching cookie exists. + + """ + if name is not None: + if (domain is None) or (path is None): + raise ValueError( + "domain and path must be given to remove a cookie by name") + del self._cookies[domain][path][name] + elif path is not None: + if domain is None: + raise ValueError( + "domain must be given to remove cookies by path") + del self._cookies[domain][path] + elif domain is not None: + del self._cookies[domain] + else: + self._cookies = {} + + def clear_session_cookies(self): + """Discard all session cookies. + + Note that the .save() method won't save session cookies anyway, unless + you ask otherwise by passing a true ignore_discard argument. + + """ + self._cookies_lock.acquire() + try: + for cookie in self: + if cookie.discard: + self.clear(cookie.domain, cookie.path, cookie.name) + finally: + self._cookies_lock.release() + + def clear_expired_cookies(self): + """Discard all expired cookies. + + You probably don't need to call this method: expired cookies are never + sent back to the server (provided you're using DefaultCookiePolicy), + this method is called by CookieJar itself every so often, and the + .save() method won't save expired cookies anyway (unless you ask + otherwise by passing a true ignore_expires argument). + + """ + self._cookies_lock.acquire() + try: + now = time.time() + for cookie in self: + if cookie.is_expired(now): + self.clear(cookie.domain, cookie.path, cookie.name) + finally: + self._cookies_lock.release() + + def __iter__(self): + return deepvalues(self._cookies) + + def __len__(self): + """Return number of contained cookies.""" + i = 0 + for cookie in self: i = i + 1 + return i + + @as_native_str() + def __repr__(self): + r = [] + for cookie in self: r.append(repr(cookie)) + return "<%s[%s]>" % (self.__class__, ", ".join(r)) + + def __str__(self): + r = [] + for cookie in self: r.append(str(cookie)) + return "<%s[%s]>" % (self.__class__, ", ".join(r)) + + +# derives from IOError for backwards-compatibility with Python 2.4.0 +class LoadError(IOError): pass + +class FileCookieJar(CookieJar): + """CookieJar that can be loaded from and saved to a file.""" + + def __init__(self, filename=None, delayload=False, policy=None): + """ + Cookies are NOT loaded from the named file until either the .load() or + .revert() method is called. + + """ + CookieJar.__init__(self, policy) + if filename is not None: + try: + filename+"" + except: + raise ValueError("filename must be string-like") + self.filename = filename + self.delayload = bool(delayload) + + def save(self, filename=None, ignore_discard=False, ignore_expires=False): + """Save cookies to a file.""" + raise NotImplementedError() + + def load(self, filename=None, ignore_discard=False, ignore_expires=False): + """Load cookies from a file.""" + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + f = open(filename) + try: + self._really_load(f, filename, ignore_discard, ignore_expires) + finally: + f.close() + + def revert(self, filename=None, + ignore_discard=False, ignore_expires=False): + """Clear all cookies and reload cookies from a saved file. + + Raises LoadError (or IOError) if reversion is not successful; the + object's state will not be altered if this happens. + + """ + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + self._cookies_lock.acquire() + try: + + old_state = copy.deepcopy(self._cookies) + self._cookies = {} + try: + self.load(filename, ignore_discard, ignore_expires) + except (LoadError, IOError): + self._cookies = old_state + raise + + finally: + self._cookies_lock.release() + + +def lwp_cookie_str(cookie): + """Return string representation of Cookie in an the LWP cookie file format. + + Actually, the format is extended a bit -- see module docstring. + + """ + h = [(cookie.name, cookie.value), + ("path", cookie.path), + ("domain", cookie.domain)] + if cookie.port is not None: h.append(("port", cookie.port)) + if cookie.path_specified: h.append(("path_spec", None)) + if cookie.port_specified: h.append(("port_spec", None)) + if cookie.domain_initial_dot: h.append(("domain_dot", None)) + if cookie.secure: h.append(("secure", None)) + if cookie.expires: h.append(("expires", + time2isoz(float(cookie.expires)))) + if cookie.discard: h.append(("discard", None)) + if cookie.comment: h.append(("comment", cookie.comment)) + if cookie.comment_url: h.append(("commenturl", cookie.comment_url)) + + keys = sorted(cookie._rest.keys()) + for k in keys: + h.append((k, str(cookie._rest[k]))) + + h.append(("version", str(cookie.version))) + + return join_header_words([h]) + +class LWPCookieJar(FileCookieJar): + """ + The LWPCookieJar saves a sequence of "Set-Cookie3" lines. + "Set-Cookie3" is the format used by the libwww-perl libary, not known + to be compatible with any browser, but which is easy to read and + doesn't lose information about RFC 2965 cookies. + + Additional methods + + as_lwp_str(ignore_discard=True, ignore_expired=True) + + """ + + def as_lwp_str(self, ignore_discard=True, ignore_expires=True): + """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. + + ignore_discard and ignore_expires: see docstring for FileCookieJar.save + + """ + now = time.time() + r = [] + for cookie in self: + if not ignore_discard and cookie.discard: + continue + if not ignore_expires and cookie.is_expired(now): + continue + r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) + return "\n".join(r+[""]) + + def save(self, filename=None, ignore_discard=False, ignore_expires=False): + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + f = open(filename, "w") + try: + # There really isn't an LWP Cookies 2.0 format, but this indicates + # that there is extra information in here (domain_dot and + # port_spec) while still being compatible with libwww-perl, I hope. + f.write("#LWP-Cookies-2.0\n") + f.write(self.as_lwp_str(ignore_discard, ignore_expires)) + finally: + f.close() + + def _really_load(self, f, filename, ignore_discard, ignore_expires): + magic = f.readline() + if not self.magic_re.search(magic): + msg = ("%r does not look like a Set-Cookie3 (LWP) format " + "file" % filename) + raise LoadError(msg) + + now = time.time() + + header = "Set-Cookie3:" + boolean_attrs = ("port_spec", "path_spec", "domain_dot", + "secure", "discard") + value_attrs = ("version", + "port", "path", "domain", + "expires", + "comment", "commenturl") + + try: + while 1: + line = f.readline() + if line == "": break + if not line.startswith(header): + continue + line = line[len(header):].strip() + + for data in split_header_words([line]): + name, value = data[0] + standard = {} + rest = {} + for k in boolean_attrs: + standard[k] = False + for k, v in data[1:]: + if k is not None: + lc = k.lower() + else: + lc = None + # don't lose case distinction for unknown fields + if (lc in value_attrs) or (lc in boolean_attrs): + k = lc + if k in boolean_attrs: + if v is None: v = True + standard[k] = v + elif k in value_attrs: + standard[k] = v + else: + rest[k] = v + + h = standard.get + expires = h("expires") + discard = h("discard") + if expires is not None: + expires = iso2time(expires) + if expires is None: + discard = True + domain = h("domain") + domain_specified = domain.startswith(".") + c = Cookie(h("version"), name, value, + h("port"), h("port_spec"), + domain, domain_specified, h("domain_dot"), + h("path"), h("path_spec"), + h("secure"), + expires, + discard, + h("comment"), + h("commenturl"), + rest) + if not ignore_discard and c.discard: + continue + if not ignore_expires and c.is_expired(now): + continue + self.set_cookie(c) + + except IOError: + raise + except Exception: + _warn_unhandled_exception() + raise LoadError("invalid Set-Cookie3 format file %r: %r" % + (filename, line)) + + +class MozillaCookieJar(FileCookieJar): + """ + + WARNING: you may want to backup your browser's cookies file if you use + this class to save cookies. I *think* it works, but there have been + bugs in the past! + + This class differs from CookieJar only in the format it uses to save and + load cookies to and from a file. This class uses the Mozilla/Netscape + `cookies.txt' format. lynx uses this file format, too. + + Don't expect cookies saved while the browser is running to be noticed by + the browser (in fact, Mozilla on unix will overwrite your saved cookies if + you change them on disk while it's running; on Windows, you probably can't + save at all while the browser is running). + + Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to + Netscape cookies on saving. + + In particular, the cookie version and port number information is lost, + together with information about whether or not Path, Port and Discard were + specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the + domain as set in the HTTP header started with a dot (yes, I'm aware some + domains in Netscape files start with a dot and some don't -- trust me, you + really don't want to know any more about this). + + Note that though Mozilla and Netscape use the same format, they use + slightly different headers. The class saves cookies using the Netscape + header by default (Mozilla can cope with that). + + """ + magic_re = re.compile("#( Netscape)? HTTP Cookie File") + header = """\ +# Netscape HTTP Cookie File +# http://www.netscape.com/newsref/std/cookie_spec.html +# This is a generated file! Do not edit. + +""" + + def _really_load(self, f, filename, ignore_discard, ignore_expires): + now = time.time() + + magic = f.readline() + if not self.magic_re.search(magic): + f.close() + raise LoadError( + "%r does not look like a Netscape format cookies file" % + filename) + + try: + while 1: + line = f.readline() + if line == "": break + + # last field may be absent, so keep any trailing tab + if line.endswith("\n"): line = line[:-1] + + # skip comments and blank lines XXX what is $ for? + if (line.strip().startswith(("#", "$")) or + line.strip() == ""): + continue + + domain, domain_specified, path, secure, expires, name, value = \ + line.split("\t") + secure = (secure == "TRUE") + domain_specified = (domain_specified == "TRUE") + if name == "": + # cookies.txt regards 'Set-Cookie: foo' as a cookie + # with no name, whereas http.cookiejar regards it as a + # cookie with no value. + name = value + value = None + + initial_dot = domain.startswith(".") + assert domain_specified == initial_dot + + discard = False + if expires == "": + expires = None + discard = True + + # assume path_specified is false + c = Cookie(0, name, value, + None, False, + domain, domain_specified, initial_dot, + path, False, + secure, + expires, + discard, + None, + None, + {}) + if not ignore_discard and c.discard: + continue + if not ignore_expires and c.is_expired(now): + continue + self.set_cookie(c) + + except IOError: + raise + except Exception: + _warn_unhandled_exception() + raise LoadError("invalid Netscape format cookies file %r: %r" % + (filename, line)) + + def save(self, filename=None, ignore_discard=False, ignore_expires=False): + if filename is None: + if self.filename is not None: filename = self.filename + else: raise ValueError(MISSING_FILENAME_TEXT) + + f = open(filename, "w") + try: + f.write(self.header) + now = time.time() + for cookie in self: + if not ignore_discard and cookie.discard: + continue + if not ignore_expires and cookie.is_expired(now): + continue + if cookie.secure: secure = "TRUE" + else: secure = "FALSE" + if cookie.domain.startswith("."): initial_dot = "TRUE" + else: initial_dot = "FALSE" + if cookie.expires is not None: + expires = str(cookie.expires) + else: + expires = "" + if cookie.value is None: + # cookies.txt regards 'Set-Cookie: foo' as a cookie + # with no name, whereas http.cookiejar regards it as a + # cookie with no value. + name = "" + value = cookie.name + else: + name = cookie.name + value = cookie.value + f.write( + "\t".join([cookie.domain, initial_dot, cookie.path, + secure, expires, name, value])+ + "\n") + finally: + f.close() diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/cookies.py b/minor_project/lib/python3.6/site-packages/future/backports/http/cookies.py new file mode 100644 index 0000000..8bb61e2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/http/cookies.py @@ -0,0 +1,598 @@ +#### +# Copyright 2000 by Timothy O'Malley +# +# All Rights Reserved +# +# Permission to use, copy, modify, and distribute this software +# and its documentation for any purpose and without fee is hereby +# granted, provided that the above copyright notice appear in all +# copies and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Timothy O'Malley not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR +# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +#### +# +# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp +# by Timothy O'Malley +# +# Cookie.py is a Python module for the handling of HTTP +# cookies as a Python dictionary. See RFC 2109 for more +# information on cookies. +# +# The original idea to treat Cookies as a dictionary came from +# Dave Mitchell (davem@magnet.com) in 1995, when he released the +# first version of nscookie.py. +# +#### + +r""" +http.cookies module ported to python-future from Py3.3 + +Here's a sample session to show how to use this module. +At the moment, this is the only documentation. + +The Basics +---------- + +Importing is easy... + + >>> from http import cookies + +Most of the time you start by creating a cookie. + + >>> C = cookies.SimpleCookie() + +Once you've created your Cookie, you can add values just as if it were +a dictionary. + + >>> C = cookies.SimpleCookie() + >>> C["fig"] = "newton" + >>> C["sugar"] = "wafer" + >>> C.output() + 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' + +Notice that the printable representation of a Cookie is the +appropriate format for a Set-Cookie: header. This is the +default behavior. You can change the header and printed +attributes by using the .output() function + + >>> C = cookies.SimpleCookie() + >>> C["rocky"] = "road" + >>> C["rocky"]["path"] = "/cookie" + >>> print(C.output(header="Cookie:")) + Cookie: rocky=road; Path=/cookie + >>> print(C.output(attrs=[], header="Cookie:")) + Cookie: rocky=road + +The load() method of a Cookie extracts cookies from a string. In a +CGI script, you would use this method to extract the cookies from the +HTTP_COOKIE environment variable. + + >>> C = cookies.SimpleCookie() + >>> C.load("chips=ahoy; vienna=finger") + >>> C.output() + 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' + +The load() method is darn-tootin smart about identifying cookies +within a string. Escaped quotation marks, nested semicolons, and other +such trickeries do not confuse it. + + >>> C = cookies.SimpleCookie() + >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') + >>> print(C) + Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" + +Each element of the Cookie also supports all of the RFC 2109 +Cookie attributes. Here's an example which sets the Path +attribute. + + >>> C = cookies.SimpleCookie() + >>> C["oreo"] = "doublestuff" + >>> C["oreo"]["path"] = "/" + >>> print(C) + Set-Cookie: oreo=doublestuff; Path=/ + +Each dictionary element has a 'value' attribute, which gives you +back the value associated with the key. + + >>> C = cookies.SimpleCookie() + >>> C["twix"] = "none for you" + >>> C["twix"].value + 'none for you' + +The SimpleCookie expects that all values should be standard strings. +Just to be sure, SimpleCookie invokes the str() builtin to convert +the value to a string, when the values are set dictionary-style. + + >>> C = cookies.SimpleCookie() + >>> C["number"] = 7 + >>> C["string"] = "seven" + >>> C["number"].value + '7' + >>> C["string"].value + 'seven' + >>> C.output() + 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' + +Finis. +""" +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import chr, dict, int, str +from future.utils import PY2, as_native_str + +# +# Import our required modules +# +import re +if PY2: + re.ASCII = 0 # for py2 compatibility +import string + +__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] + +_nulljoin = ''.join +_semispacejoin = '; '.join +_spacejoin = ' '.join + +# +# Define an exception visible to External modules +# +class CookieError(Exception): + pass + + +# These quoting routines conform to the RFC2109 specification, which in +# turn references the character definitions from RFC2068. They provide +# a two-way quoting algorithm. Any non-text character is translated +# into a 4 character sequence: a forward-slash followed by the +# three-digit octal equivalent of the character. Any '\' or '"' is +# quoted with a preceeding '\' slash. +# +# These are taken from RFC2068 and RFC2109. +# _LegalChars is the list of chars which don't require "'s +# _Translator hash-table for fast quoting +# +_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" +_Translator = { + '\000' : '\\000', '\001' : '\\001', '\002' : '\\002', + '\003' : '\\003', '\004' : '\\004', '\005' : '\\005', + '\006' : '\\006', '\007' : '\\007', '\010' : '\\010', + '\011' : '\\011', '\012' : '\\012', '\013' : '\\013', + '\014' : '\\014', '\015' : '\\015', '\016' : '\\016', + '\017' : '\\017', '\020' : '\\020', '\021' : '\\021', + '\022' : '\\022', '\023' : '\\023', '\024' : '\\024', + '\025' : '\\025', '\026' : '\\026', '\027' : '\\027', + '\030' : '\\030', '\031' : '\\031', '\032' : '\\032', + '\033' : '\\033', '\034' : '\\034', '\035' : '\\035', + '\036' : '\\036', '\037' : '\\037', + + # Because of the way browsers really handle cookies (as opposed + # to what the RFC says) we also encode , and ; + + ',' : '\\054', ';' : '\\073', + + '"' : '\\"', '\\' : '\\\\', + + '\177' : '\\177', '\200' : '\\200', '\201' : '\\201', + '\202' : '\\202', '\203' : '\\203', '\204' : '\\204', + '\205' : '\\205', '\206' : '\\206', '\207' : '\\207', + '\210' : '\\210', '\211' : '\\211', '\212' : '\\212', + '\213' : '\\213', '\214' : '\\214', '\215' : '\\215', + '\216' : '\\216', '\217' : '\\217', '\220' : '\\220', + '\221' : '\\221', '\222' : '\\222', '\223' : '\\223', + '\224' : '\\224', '\225' : '\\225', '\226' : '\\226', + '\227' : '\\227', '\230' : '\\230', '\231' : '\\231', + '\232' : '\\232', '\233' : '\\233', '\234' : '\\234', + '\235' : '\\235', '\236' : '\\236', '\237' : '\\237', + '\240' : '\\240', '\241' : '\\241', '\242' : '\\242', + '\243' : '\\243', '\244' : '\\244', '\245' : '\\245', + '\246' : '\\246', '\247' : '\\247', '\250' : '\\250', + '\251' : '\\251', '\252' : '\\252', '\253' : '\\253', + '\254' : '\\254', '\255' : '\\255', '\256' : '\\256', + '\257' : '\\257', '\260' : '\\260', '\261' : '\\261', + '\262' : '\\262', '\263' : '\\263', '\264' : '\\264', + '\265' : '\\265', '\266' : '\\266', '\267' : '\\267', + '\270' : '\\270', '\271' : '\\271', '\272' : '\\272', + '\273' : '\\273', '\274' : '\\274', '\275' : '\\275', + '\276' : '\\276', '\277' : '\\277', '\300' : '\\300', + '\301' : '\\301', '\302' : '\\302', '\303' : '\\303', + '\304' : '\\304', '\305' : '\\305', '\306' : '\\306', + '\307' : '\\307', '\310' : '\\310', '\311' : '\\311', + '\312' : '\\312', '\313' : '\\313', '\314' : '\\314', + '\315' : '\\315', '\316' : '\\316', '\317' : '\\317', + '\320' : '\\320', '\321' : '\\321', '\322' : '\\322', + '\323' : '\\323', '\324' : '\\324', '\325' : '\\325', + '\326' : '\\326', '\327' : '\\327', '\330' : '\\330', + '\331' : '\\331', '\332' : '\\332', '\333' : '\\333', + '\334' : '\\334', '\335' : '\\335', '\336' : '\\336', + '\337' : '\\337', '\340' : '\\340', '\341' : '\\341', + '\342' : '\\342', '\343' : '\\343', '\344' : '\\344', + '\345' : '\\345', '\346' : '\\346', '\347' : '\\347', + '\350' : '\\350', '\351' : '\\351', '\352' : '\\352', + '\353' : '\\353', '\354' : '\\354', '\355' : '\\355', + '\356' : '\\356', '\357' : '\\357', '\360' : '\\360', + '\361' : '\\361', '\362' : '\\362', '\363' : '\\363', + '\364' : '\\364', '\365' : '\\365', '\366' : '\\366', + '\367' : '\\367', '\370' : '\\370', '\371' : '\\371', + '\372' : '\\372', '\373' : '\\373', '\374' : '\\374', + '\375' : '\\375', '\376' : '\\376', '\377' : '\\377' + } + +def _quote(str, LegalChars=_LegalChars): + r"""Quote a string for use in a cookie header. + + If the string does not need to be double-quoted, then just return the + string. Otherwise, surround the string in doublequotes and quote + (with a \) special characters. + """ + if all(c in LegalChars for c in str): + return str + else: + return '"' + _nulljoin(_Translator.get(s, s) for s in str) + '"' + + +_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") +_QuotePatt = re.compile(r"[\\].") + +def _unquote(mystr): + # If there aren't any doublequotes, + # then there can't be any special characters. See RFC 2109. + if len(mystr) < 2: + return mystr + if mystr[0] != '"' or mystr[-1] != '"': + return mystr + + # We have to assume that we must decode this string. + # Down to work. + + # Remove the "s + mystr = mystr[1:-1] + + # Check for special sequences. Examples: + # \012 --> \n + # \" --> " + # + i = 0 + n = len(mystr) + res = [] + while 0 <= i < n: + o_match = _OctalPatt.search(mystr, i) + q_match = _QuotePatt.search(mystr, i) + if not o_match and not q_match: # Neither matched + res.append(mystr[i:]) + break + # else: + j = k = -1 + if o_match: + j = o_match.start(0) + if q_match: + k = q_match.start(0) + if q_match and (not o_match or k < j): # QuotePatt matched + res.append(mystr[i:k]) + res.append(mystr[k+1]) + i = k + 2 + else: # OctalPatt matched + res.append(mystr[i:j]) + res.append(chr(int(mystr[j+1:j+4], 8))) + i = j + 4 + return _nulljoin(res) + +# The _getdate() routine is used to set the expiration time in the cookie's HTTP +# header. By default, _getdate() returns the current time in the appropriate +# "expires" format for a Set-Cookie header. The one optional argument is an +# offset from now, in seconds. For example, an offset of -3600 means "one hour +# ago". The offset may be a floating point number. +# + +_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + +_monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + +def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname): + from time import gmtime, time + now = time() + year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future) + return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \ + (weekdayname[wd], day, monthname[month], year, hh, mm, ss) + + +class Morsel(dict): + """A class to hold ONE (key, value) pair. + + In a cookie, each such pair may have several attributes, so this class is + used to keep the attributes associated with the appropriate key,value pair. + This class also includes a coded_value attribute, which is used to hold + the network representation of the value. This is most useful when Python + objects are pickled for network transit. + """ + # RFC 2109 lists these attributes as reserved: + # path comment domain + # max-age secure version + # + # For historical reasons, these attributes are also reserved: + # expires + # + # This is an extension from Microsoft: + # httponly + # + # This dictionary provides a mapping from the lowercase + # variant on the left to the appropriate traditional + # formatting on the right. + _reserved = { + "expires" : "expires", + "path" : "Path", + "comment" : "Comment", + "domain" : "Domain", + "max-age" : "Max-Age", + "secure" : "secure", + "httponly" : "httponly", + "version" : "Version", + } + + _flags = set(['secure', 'httponly']) + + def __init__(self): + # Set defaults + self.key = self.value = self.coded_value = None + + # Set default attributes + for key in self._reserved: + dict.__setitem__(self, key, "") + + def __setitem__(self, K, V): + K = K.lower() + if not K in self._reserved: + raise CookieError("Invalid Attribute %s" % K) + dict.__setitem__(self, K, V) + + def isReservedKey(self, K): + return K.lower() in self._reserved + + def set(self, key, val, coded_val, LegalChars=_LegalChars): + # First we verify that the key isn't a reserved word + # Second we make sure it only contains legal characters + if key.lower() in self._reserved: + raise CookieError("Attempt to set a reserved key: %s" % key) + if any(c not in LegalChars for c in key): + raise CookieError("Illegal key value: %s" % key) + + # It's a good key, so save it. + self.key = key + self.value = val + self.coded_value = coded_val + + def output(self, attrs=None, header="Set-Cookie:"): + return "%s %s" % (header, self.OutputString(attrs)) + + __str__ = output + + @as_native_str() + def __repr__(self): + if PY2 and isinstance(self.value, unicode): + val = str(self.value) # make it a newstr to remove the u prefix + else: + val = self.value + return '<%s: %s=%s>' % (self.__class__.__name__, + str(self.key), repr(val)) + + def js_output(self, attrs=None): + # Print javascript + return """ + + """ % (self.OutputString(attrs).replace('"', r'\"')) + + def OutputString(self, attrs=None): + # Build up our result + # + result = [] + append = result.append + + # First, the key=value pair + append("%s=%s" % (self.key, self.coded_value)) + + # Now add any defined attributes + if attrs is None: + attrs = self._reserved + items = sorted(self.items()) + for key, value in items: + if value == "": + continue + if key not in attrs: + continue + if key == "expires" and isinstance(value, int): + append("%s=%s" % (self._reserved[key], _getdate(value))) + elif key == "max-age" and isinstance(value, int): + append("%s=%d" % (self._reserved[key], value)) + elif key == "secure": + append(str(self._reserved[key])) + elif key == "httponly": + append(str(self._reserved[key])) + else: + append("%s=%s" % (self._reserved[key], value)) + + # Return the result + return _semispacejoin(result) + + +# +# Pattern for finding cookie +# +# This used to be strict parsing based on the RFC2109 and RFC2068 +# specifications. I have since discovered that MSIE 3.0x doesn't +# follow the character rules outlined in those specs. As a +# result, the parsing rules here are less strict. +# + +_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]" +_CookiePattern = re.compile(r""" + (?x) # This is a verbose pattern + (?P # Start of group 'key' + """ + _LegalCharsPatt + r"""+? # Any word of at least one letter + ) # End of group 'key' + ( # Optional group: there may not be a value. + \s*=\s* # Equal Sign + (?P # Start of group 'val' + "(?:[^\\"]|\\.)*" # Any doublequoted string + | # or + \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr + | # or + """ + _LegalCharsPatt + r"""* # Any word or empty string + ) # End of group 'val' + )? # End of optional value group + \s* # Any number of spaces. + (\s+|;|$) # Ending either at space, semicolon, or EOS. + """, re.ASCII) # May be removed if safe. + + +# At long last, here is the cookie class. Using this class is almost just like +# using a dictionary. See this module's docstring for example usage. +# +class BaseCookie(dict): + """A container class for a set of Morsels.""" + + def value_decode(self, val): + """real_value, coded_value = value_decode(STRING) + Called prior to setting a cookie's value from the network + representation. The VALUE is the value read from HTTP + header. + Override this function to modify the behavior of cookies. + """ + return val, val + + def value_encode(self, val): + """real_value, coded_value = value_encode(VALUE) + Called prior to setting a cookie's value from the dictionary + representation. The VALUE is the value being assigned. + Override this function to modify the behavior of cookies. + """ + strval = str(val) + return strval, strval + + def __init__(self, input=None): + if input: + self.load(input) + + def __set(self, key, real_value, coded_value): + """Private method for setting a cookie's value""" + M = self.get(key, Morsel()) + M.set(key, real_value, coded_value) + dict.__setitem__(self, key, M) + + def __setitem__(self, key, value): + """Dictionary style assignment.""" + rval, cval = self.value_encode(value) + self.__set(key, rval, cval) + + def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): + """Return a string suitable for HTTP.""" + result = [] + items = sorted(self.items()) + for key, value in items: + result.append(value.output(attrs, header)) + return sep.join(result) + + __str__ = output + + @as_native_str() + def __repr__(self): + l = [] + items = sorted(self.items()) + for key, value in items: + if PY2 and isinstance(value.value, unicode): + val = str(value.value) # make it a newstr to remove the u prefix + else: + val = value.value + l.append('%s=%s' % (str(key), repr(val))) + return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l)) + + def js_output(self, attrs=None): + """Return a string suitable for JavaScript.""" + result = [] + items = sorted(self.items()) + for key, value in items: + result.append(value.js_output(attrs)) + return _nulljoin(result) + + def load(self, rawdata): + """Load cookies from a string (presumably HTTP_COOKIE) or + from a dictionary. Loading cookies from a dictionary 'd' + is equivalent to calling: + map(Cookie.__setitem__, d.keys(), d.values()) + """ + if isinstance(rawdata, str): + self.__parse_string(rawdata) + else: + # self.update() wouldn't call our custom __setitem__ + for key, value in rawdata.items(): + self[key] = value + return + + def __parse_string(self, mystr, patt=_CookiePattern): + i = 0 # Our starting point + n = len(mystr) # Length of string + M = None # current morsel + + while 0 <= i < n: + # Start looking for a cookie + match = patt.search(mystr, i) + if not match: + # No more cookies + break + + key, value = match.group("key"), match.group("val") + + i = match.end(0) + + # Parse the key, value in case it's metainfo + if key[0] == "$": + # We ignore attributes which pertain to the cookie + # mechanism as a whole. See RFC 2109. + # (Does anyone care?) + if M: + M[key[1:]] = value + elif key.lower() in Morsel._reserved: + if M: + if value is None: + if key.lower() in Morsel._flags: + M[key] = True + else: + M[key] = _unquote(value) + elif value is not None: + rval, cval = self.value_decode(value) + self.__set(key, rval, cval) + M = self[key] + + +class SimpleCookie(BaseCookie): + """ + SimpleCookie supports strings as cookie values. When setting + the value using the dictionary assignment notation, SimpleCookie + calls the builtin str() to convert the value to a string. Values + received from HTTP are kept as strings. + """ + def value_decode(self, val): + return _unquote(val), val + + def value_encode(self, val): + strval = str(val) + return strval, _quote(strval) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/http/server.py b/minor_project/lib/python3.6/site-packages/future/backports/http/server.py new file mode 100644 index 0000000..b1c11e0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/http/server.py @@ -0,0 +1,1226 @@ +"""HTTP server classes. + +From Python 3.3 + +Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see +SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, +and CGIHTTPRequestHandler for CGI scripts. + +It does, however, optionally implement HTTP/1.1 persistent connections, +as of version 0.3. + +Notes on CGIHTTPRequestHandler +------------------------------ + +This class implements GET and POST requests to cgi-bin scripts. + +If the os.fork() function is not present (e.g. on Windows), +subprocess.Popen() is used as a fallback, with slightly altered semantics. + +In all cases, the implementation is intentionally naive -- all +requests are executed synchronously. + +SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL +-- it may execute arbitrary Python code or external programs. + +Note that status code 200 is sent prior to execution of a CGI script, so +scripts cannot send other status codes such as 302 (redirect). + +XXX To do: + +- log requests even later (to capture byte count) +- log user-agent header and other interesting goodies +- send error log to separate file +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future import utils +from future.builtins import * + + +# See also: +# +# HTTP Working Group T. Berners-Lee +# INTERNET-DRAFT R. T. Fielding +# H. Frystyk Nielsen +# Expires September 8, 1995 March 8, 1995 +# +# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt +# +# and +# +# Network Working Group R. Fielding +# Request for Comments: 2616 et al +# Obsoletes: 2068 June 1999 +# Category: Standards Track +# +# URL: http://www.faqs.org/rfcs/rfc2616.html + +# Log files +# --------- +# +# Here's a quote from the NCSA httpd docs about log file format. +# +# | The logfile format is as follows. Each line consists of: +# | +# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb +# | +# | host: Either the DNS name or the IP number of the remote client +# | rfc931: Any information returned by identd for this person, +# | - otherwise. +# | authuser: If user sent a userid for authentication, the user name, +# | - otherwise. +# | DD: Day +# | Mon: Month (calendar name) +# | YYYY: Year +# | hh: hour (24-hour format, the machine's timezone) +# | mm: minutes +# | ss: seconds +# | request: The first line of the HTTP request as sent by the client. +# | ddd: the status code returned by the server, - if not available. +# | bbbb: the total number of bytes sent, +# | *not including the HTTP/1.0 header*, - if not available +# | +# | You can determine the name of the file accessed through request. +# +# (Actually, the latter is only true if you know the server configuration +# at the time the request was made!) + +__version__ = "0.6" + +__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] + +from future.backports import html +from future.backports.http import client as http_client +from future.backports.urllib import parse as urllib_parse +from future.backports import socketserver + +import io +import mimetypes +import os +import posixpath +import select +import shutil +import socket # For gethostbyaddr() +import sys +import time +import copy +import argparse + + +# Default error message template +DEFAULT_ERROR_MESSAGE = """\ + + + + + Error response + + +

Error response

+

Error code: %(code)d

+

Message: %(message)s.

+

Error code explanation: %(code)s - %(explain)s.

+ + +""" + +DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" + +def _quote_html(html): + return html.replace("&", "&").replace("<", "<").replace(">", ">") + +class HTTPServer(socketserver.TCPServer): + + allow_reuse_address = 1 # Seems to make sense in testing environment + + def server_bind(self): + """Override server_bind to store the server name.""" + socketserver.TCPServer.server_bind(self) + host, port = self.socket.getsockname()[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port + + +class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + + """HTTP request handler base class. + + The following explanation of HTTP serves to guide you through the + code as well as to expose any misunderstandings I may have about + HTTP (so you don't need to read the code to figure out I'm wrong + :-). + + HTTP (HyperText Transfer Protocol) is an extensible protocol on + top of a reliable stream transport (e.g. TCP/IP). The protocol + recognizes three parts to a request: + + 1. One line identifying the request type and path + 2. An optional set of RFC-822-style headers + 3. An optional data part + + The headers and data are separated by a blank line. + + The first line of the request has the form + + + + where is a (case-sensitive) keyword such as GET or POST, + is a string containing path information for the request, + and should be the string "HTTP/1.0" or "HTTP/1.1". + is encoded using the URL encoding scheme (using %xx to signify + the ASCII character with hex code xx). + + The specification specifies that lines are separated by CRLF but + for compatibility with the widest range of clients recommends + servers also handle LF. Similarly, whitespace in the request line + is treated sensibly (allowing multiple spaces between components + and allowing trailing whitespace). + + Similarly, for output, lines ought to be separated by CRLF pairs + but most clients grok LF characters just fine. + + If the first line of the request has the form + + + + (i.e. is left out) then this is assumed to be an HTTP + 0.9 request; this form has no optional headers and data part and + the reply consists of just the data. + + The reply form of the HTTP 1.x protocol again has three parts: + + 1. One line giving the response code + 2. An optional set of RFC-822-style headers + 3. The data + + Again, the headers and data are separated by a blank line. + + The response code line has the form + + + + where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), + is a 3-digit response code indicating success or + failure of the request, and is an optional + human-readable string explaining what the response code means. + + This server parses the request and the headers, and then calls a + function specific to the request type (). Specifically, + a request SPAM will be handled by a method do_SPAM(). If no + such method exists the server sends an error response to the + client. If it exists, it is called with no arguments: + + do_SPAM() + + Note that the request name is case sensitive (i.e. SPAM and spam + are different requests). + + The various request details are stored in instance variables: + + - client_address is the client IP address in the form (host, + port); + + - command, path and version are the broken-down request line; + + - headers is an instance of email.message.Message (or a derived + class) containing the header information; + + - rfile is a file object open for reading positioned at the + start of the optional input data part; + + - wfile is a file object open for writing. + + IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + + The first thing to be written must be the response line. Then + follow 0 or more header lines, then a blank line, and then the + actual data (if any). The meaning of the header lines depends on + the command executed by the server; in most cases, when data is + returned, there should be at least one header line of the form + + Content-type: / + + where and should be registered MIME types, + e.g. "text/html" or "text/plain". + + """ + + # The Python system version, truncated to its first component. + sys_version = "Python/" + sys.version.split()[0] + + # The server software version. You may want to override this. + # The format is multiple whitespace-separated strings, + # where each string is of the form name[/version]. + server_version = "BaseHTTP/" + __version__ + + error_message_format = DEFAULT_ERROR_MESSAGE + error_content_type = DEFAULT_ERROR_CONTENT_TYPE + + # The default request version. This only affects responses up until + # the point where the request line is parsed, so it mainly decides what + # the client gets back when sending a malformed request line. + # Most web servers default to HTTP 0.9, i.e. don't send a status line. + default_request_version = "HTTP/0.9" + + def parse_request(self): + """Parse a request (internal). + + The request should be stored in self.raw_requestline; the results + are in self.command, self.path, self.request_version and + self.headers. + + Return True for success, False for failure; on failure, an + error is sent back. + + """ + self.command = None # set in case of error on the first line + self.request_version = version = self.default_request_version + self.close_connection = 1 + requestline = str(self.raw_requestline, 'iso-8859-1') + requestline = requestline.rstrip('\r\n') + self.requestline = requestline + words = requestline.split() + if len(words) == 3: + command, path, version = words + if version[:5] != 'HTTP/': + self.send_error(400, "Bad request version (%r)" % version) + return False + try: + base_version_number = version.split('/', 1)[1] + version_number = base_version_number.split(".") + # RFC 2145 section 3.1 says there can be only one "." and + # - major and minor numbers MUST be treated as + # separate integers; + # - HTTP/2.4 is a lower version than HTTP/2.13, which in + # turn is lower than HTTP/12.3; + # - Leading zeros MUST be ignored by recipients. + if len(version_number) != 2: + raise ValueError + version_number = int(version_number[0]), int(version_number[1]) + except (ValueError, IndexError): + self.send_error(400, "Bad request version (%r)" % version) + return False + if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": + self.close_connection = 0 + if version_number >= (2, 0): + self.send_error(505, + "Invalid HTTP Version (%s)" % base_version_number) + return False + elif len(words) == 2: + command, path = words + self.close_connection = 1 + if command != 'GET': + self.send_error(400, + "Bad HTTP/0.9 request type (%r)" % command) + return False + elif not words: + return False + else: + self.send_error(400, "Bad request syntax (%r)" % requestline) + return False + self.command, self.path, self.request_version = command, path, version + + # Examine the headers and look for a Connection directive. + try: + self.headers = http_client.parse_headers(self.rfile, + _class=self.MessageClass) + except http_client.LineTooLong: + self.send_error(400, "Line too long") + return False + + conntype = self.headers.get('Connection', "") + if conntype.lower() == 'close': + self.close_connection = 1 + elif (conntype.lower() == 'keep-alive' and + self.protocol_version >= "HTTP/1.1"): + self.close_connection = 0 + # Examine the headers and look for an Expect directive + expect = self.headers.get('Expect', "") + if (expect.lower() == "100-continue" and + self.protocol_version >= "HTTP/1.1" and + self.request_version >= "HTTP/1.1"): + if not self.handle_expect_100(): + return False + return True + + def handle_expect_100(self): + """Decide what to do with an "Expect: 100-continue" header. + + If the client is expecting a 100 Continue response, we must + respond with either a 100 Continue or a final response before + waiting for the request body. The default is to always respond + with a 100 Continue. You can behave differently (for example, + reject unauthorized requests) by overriding this method. + + This method should either return True (possibly after sending + a 100 Continue response) or send an error response and return + False. + + """ + self.send_response_only(100) + self.flush_headers() + return True + + def handle_one_request(self): + """Handle a single HTTP request. + + You normally don't need to override this method; see the class + __doc__ string for information on how to handle specific HTTP + commands such as GET and POST. + + """ + try: + self.raw_requestline = self.rfile.readline(65537) + if len(self.raw_requestline) > 65536: + self.requestline = '' + self.request_version = '' + self.command = '' + self.send_error(414) + return + if not self.raw_requestline: + self.close_connection = 1 + return + if not self.parse_request(): + # An error code has been sent, just exit + return + mname = 'do_' + self.command + if not hasattr(self, mname): + self.send_error(501, "Unsupported method (%r)" % self.command) + return + method = getattr(self, mname) + method() + self.wfile.flush() #actually send the response if not already done. + except socket.timeout as e: + #a read or a write timed out. Discard this connection + self.log_error("Request timed out: %r", e) + self.close_connection = 1 + return + + def handle(self): + """Handle multiple requests if necessary.""" + self.close_connection = 1 + + self.handle_one_request() + while not self.close_connection: + self.handle_one_request() + + def send_error(self, code, message=None): + """Send and log an error reply. + + Arguments are the error code, and a detailed message. + The detailed message defaults to the short entry matching the + response code. + + This sends an error response (so it must be called before any + output has been generated), logs the error, and finally sends + a piece of HTML explaining the error to the user. + + """ + + try: + shortmsg, longmsg = self.responses[code] + except KeyError: + shortmsg, longmsg = '???', '???' + if message is None: + message = shortmsg + explain = longmsg + self.log_error("code %d, message %s", code, message) + # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201) + content = (self.error_message_format % + {'code': code, 'message': _quote_html(message), 'explain': explain}) + self.send_response(code, message) + self.send_header("Content-Type", self.error_content_type) + self.send_header('Connection', 'close') + self.end_headers() + if self.command != 'HEAD' and code >= 200 and code not in (204, 304): + self.wfile.write(content.encode('UTF-8', 'replace')) + + def send_response(self, code, message=None): + """Add the response header to the headers buffer and log the + response code. + + Also send two standard headers with the server software + version and the current date. + + """ + self.log_request(code) + self.send_response_only(code, message) + self.send_header('Server', self.version_string()) + self.send_header('Date', self.date_time_string()) + + def send_response_only(self, code, message=None): + """Send the response header only.""" + if message is None: + if code in self.responses: + message = self.responses[code][0] + else: + message = '' + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append(("%s %d %s\r\n" % + (self.protocol_version, code, message)).encode( + 'latin-1', 'strict')) + + def send_header(self, keyword, value): + """Send a MIME header to the headers buffer.""" + if self.request_version != 'HTTP/0.9': + if not hasattr(self, '_headers_buffer'): + self._headers_buffer = [] + self._headers_buffer.append( + ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict')) + + if keyword.lower() == 'connection': + if value.lower() == 'close': + self.close_connection = 1 + elif value.lower() == 'keep-alive': + self.close_connection = 0 + + def end_headers(self): + """Send the blank line ending the MIME headers.""" + if self.request_version != 'HTTP/0.9': + self._headers_buffer.append(b"\r\n") + self.flush_headers() + + def flush_headers(self): + if hasattr(self, '_headers_buffer'): + self.wfile.write(b"".join(self._headers_buffer)) + self._headers_buffer = [] + + def log_request(self, code='-', size='-'): + """Log an accepted request. + + This is called by send_response(). + + """ + + self.log_message('"%s" %s %s', + self.requestline, str(code), str(size)) + + def log_error(self, format, *args): + """Log an error. + + This is called when a request cannot be fulfilled. By + default it passes the message on to log_message(). + + Arguments are the same as for log_message(). + + XXX This should go to the separate error log. + + """ + + self.log_message(format, *args) + + def log_message(self, format, *args): + """Log an arbitrary message. + + This is used by all other logging functions. Override + it if you have specific logging wishes. + + The first argument, FORMAT, is a format string for the + message to be logged. If the format string contains + any % escapes requiring parameters, they should be + specified as subsequent arguments (it's just like + printf!). + + The client ip and current date/time are prefixed to + every message. + + """ + + sys.stderr.write("%s - - [%s] %s\n" % + (self.address_string(), + self.log_date_time_string(), + format%args)) + + def version_string(self): + """Return the server software version string.""" + return self.server_version + ' ' + self.sys_version + + def date_time_string(self, timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + self.weekdayname[wd], + day, self.monthname[month], year, + hh, mm, ss) + return s + + def log_date_time_string(self): + """Return the current time formatted for logging.""" + now = time.time() + year, month, day, hh, mm, ss, x, y, z = time.localtime(now) + s = "%02d/%3s/%04d %02d:%02d:%02d" % ( + day, self.monthname[month], year, hh, mm, ss) + return s + + weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + + monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + + def address_string(self): + """Return the client address.""" + + return self.client_address[0] + + # Essentially static class variables + + # The version of the HTTP protocol we support. + # Set this to HTTP/1.1 to enable automatic keepalive + protocol_version = "HTTP/1.0" + + # MessageClass used to parse headers + MessageClass = http_client.HTTPMessage + + # Table mapping response codes to messages; entries have the + # form {code: (shortmessage, longmessage)}. + # See RFC 2616 and 6585. + responses = { + 100: ('Continue', 'Request received, please continue'), + 101: ('Switching Protocols', + 'Switching to new protocol; obey Upgrade header'), + + 200: ('OK', 'Request fulfilled, document follows'), + 201: ('Created', 'Document created, URL follows'), + 202: ('Accepted', + 'Request accepted, processing continues off-line'), + 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), + 204: ('No Content', 'Request fulfilled, nothing follows'), + 205: ('Reset Content', 'Clear input form for further input.'), + 206: ('Partial Content', 'Partial content follows.'), + + 300: ('Multiple Choices', + 'Object has several resources -- see URI list'), + 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), + 302: ('Found', 'Object moved temporarily -- see URI list'), + 303: ('See Other', 'Object moved -- see Method and URL list'), + 304: ('Not Modified', + 'Document has not changed since given time'), + 305: ('Use Proxy', + 'You must use proxy specified in Location to access this ' + 'resource.'), + 307: ('Temporary Redirect', + 'Object moved temporarily -- see URI list'), + + 400: ('Bad Request', + 'Bad request syntax or unsupported method'), + 401: ('Unauthorized', + 'No permission -- see authorization schemes'), + 402: ('Payment Required', + 'No payment -- see charging schemes'), + 403: ('Forbidden', + 'Request forbidden -- authorization will not help'), + 404: ('Not Found', 'Nothing matches the given URI'), + 405: ('Method Not Allowed', + 'Specified method is invalid for this resource.'), + 406: ('Not Acceptable', 'URI not available in preferred format.'), + 407: ('Proxy Authentication Required', 'You must authenticate with ' + 'this proxy before proceeding.'), + 408: ('Request Timeout', 'Request timed out; try again later.'), + 409: ('Conflict', 'Request conflict.'), + 410: ('Gone', + 'URI no longer exists and has been permanently removed.'), + 411: ('Length Required', 'Client must specify Content-Length.'), + 412: ('Precondition Failed', 'Precondition in headers is false.'), + 413: ('Request Entity Too Large', 'Entity is too large.'), + 414: ('Request-URI Too Long', 'URI is too long.'), + 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), + 416: ('Requested Range Not Satisfiable', + 'Cannot satisfy request range.'), + 417: ('Expectation Failed', + 'Expect condition could not be satisfied.'), + 428: ('Precondition Required', + 'The origin server requires the request to be conditional.'), + 429: ('Too Many Requests', 'The user has sent too many requests ' + 'in a given amount of time ("rate limiting").'), + 431: ('Request Header Fields Too Large', 'The server is unwilling to ' + 'process the request because its header fields are too large.'), + + 500: ('Internal Server Error', 'Server got itself in trouble'), + 501: ('Not Implemented', + 'Server does not support this operation'), + 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), + 503: ('Service Unavailable', + 'The server cannot process the request due to a high load'), + 504: ('Gateway Timeout', + 'The gateway server did not receive a timely response'), + 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), + 511: ('Network Authentication Required', + 'The client needs to authenticate to gain network access.'), + } + + +class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + + """Simple HTTP request handler with GET and HEAD commands. + + This serves files from the current directory and any of its + subdirectories. The MIME type for files is determined by + calling the .guess_type() method. + + The GET and HEAD requests are identical except that the HEAD + request omits the actual contents of the file. + + """ + + server_version = "SimpleHTTP/" + __version__ + + def do_GET(self): + """Serve a GET request.""" + f = self.send_head() + if f: + self.copyfile(f, self.wfile) + f.close() + + def do_HEAD(self): + """Serve a HEAD request.""" + f = self.send_head() + if f: + f.close() + + def send_head(self): + """Common code for GET and HEAD commands. + + This sends the response code and MIME headers. + + Return value is either a file object (which has to be copied + to the outputfile by the caller unless the command was HEAD, + and must be closed by the caller under all circumstances), or + None, in which case the caller has nothing further to do. + + """ + path = self.translate_path(self.path) + f = None + if os.path.isdir(path): + if not self.path.endswith('/'): + # redirect browser - doing basically what apache does + self.send_response(301) + self.send_header("Location", self.path + "/") + self.end_headers() + return None + for index in "index.html", "index.htm": + index = os.path.join(path, index) + if os.path.exists(index): + path = index + break + else: + return self.list_directory(path) + ctype = self.guess_type(path) + try: + f = open(path, 'rb') + except IOError: + self.send_error(404, "File not found") + return None + self.send_response(200) + self.send_header("Content-type", ctype) + fs = os.fstat(f.fileno()) + self.send_header("Content-Length", str(fs[6])) + self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) + self.end_headers() + return f + + def list_directory(self, path): + """Helper to produce a directory listing (absent index.html). + + Return value is either a file object, or None (indicating an + error). In either case, the headers are sent, making the + interface the same as for send_head(). + + """ + try: + list = os.listdir(path) + except os.error: + self.send_error(404, "No permission to list directory") + return None + list.sort(key=lambda a: a.lower()) + r = [] + displaypath = html.escape(urllib_parse.unquote(self.path)) + enc = sys.getfilesystemencoding() + title = 'Directory listing for %s' % displaypath + r.append('') + r.append('\n') + r.append('' % enc) + r.append('%s\n' % title) + r.append('\n

%s

' % title) + r.append('
\n
    ') + for name in list: + fullname = os.path.join(path, name) + displayname = linkname = name + # Append / for directories or @ for symbolic links + if os.path.isdir(fullname): + displayname = name + "/" + linkname = name + "/" + if os.path.islink(fullname): + displayname = name + "@" + # Note: a link to a directory displays with @ and links with / + r.append('
  • %s
  • ' + % (urllib_parse.quote(linkname), html.escape(displayname))) + # # Use this instead: + # r.append('
  • %s
  • ' + # % (urllib.quote(linkname), cgi.escape(displayname))) + r.append('
\n
\n\n\n') + encoded = '\n'.join(r).encode(enc) + f = io.BytesIO() + f.write(encoded) + f.seek(0) + self.send_response(200) + self.send_header("Content-type", "text/html; charset=%s" % enc) + self.send_header("Content-Length", str(len(encoded))) + self.end_headers() + return f + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = path.split('?',1)[0] + path = path.split('#',1)[0] + path = posixpath.normpath(urllib_parse.unquote(path)) + words = path.split('/') + words = filter(None, words) + path = os.getcwd() + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + if word in (os.curdir, os.pardir): continue + path = os.path.join(path, word) + return path + + def copyfile(self, source, outputfile): + """Copy all data between two file objects. + + The SOURCE argument is a file object open for reading + (or anything with a read() method) and the DESTINATION + argument is a file object open for writing (or + anything with a write() method). + + The only reason for overriding this would be to change + the block size or perhaps to replace newlines by CRLF + -- note however that this the default server uses this + to copy binary data as well. + + """ + shutil.copyfileobj(source, outputfile) + + def guess_type(self, path): + """Guess the type of a file. + + Argument is a PATH (a filename). + + Return value is a string of the form type/subtype, + usable for a MIME Content-type header. + + The default implementation looks the file's extension + up in the table self.extensions_map, using application/octet-stream + as a default; however it would be permissible (if + slow) to look inside the data to make a better guess. + + """ + + base, ext = posixpath.splitext(path) + if ext in self.extensions_map: + return self.extensions_map[ext] + ext = ext.lower() + if ext in self.extensions_map: + return self.extensions_map[ext] + else: + return self.extensions_map[''] + + if not mimetypes.inited: + mimetypes.init() # try to read system mime.types + extensions_map = mimetypes.types_map.copy() + extensions_map.update({ + '': 'application/octet-stream', # Default + '.py': 'text/plain', + '.c': 'text/plain', + '.h': 'text/plain', + }) + + +# Utilities for CGIHTTPRequestHandler + +def _url_collapse_path(path): + """ + Given a URL path, remove extra '/'s and '.' path elements and collapse + any '..' references and returns a colllapsed path. + + Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. + The utility of this function is limited to is_cgi method and helps + preventing some security attacks. + + Returns: A tuple of (head, tail) where tail is everything after the final / + and head is everything before it. Head will always start with a '/' and, + if it contains anything else, never have a trailing '/'. + + Raises: IndexError if too many '..' occur within the path. + + """ + # Similar to os.path.split(os.path.normpath(path)) but specific to URL + # path semantics rather than local operating system semantics. + path_parts = path.split('/') + head_parts = [] + for part in path_parts[:-1]: + if part == '..': + head_parts.pop() # IndexError if more '..' than prior parts + elif part and part != '.': + head_parts.append( part ) + if path_parts: + tail_part = path_parts.pop() + if tail_part: + if tail_part == '..': + head_parts.pop() + tail_part = '' + elif tail_part == '.': + tail_part = '' + else: + tail_part = '' + + splitpath = ('/' + '/'.join(head_parts), tail_part) + collapsed_path = "/".join(splitpath) + + return collapsed_path + + + +nobody = None + +def nobody_uid(): + """Internal routine to get nobody's uid""" + global nobody + if nobody: + return nobody + try: + import pwd + except ImportError: + return -1 + try: + nobody = pwd.getpwnam('nobody')[2] + except KeyError: + nobody = 1 + max(x[2] for x in pwd.getpwall()) + return nobody + + +def executable(path): + """Test for executable file.""" + return os.access(path, os.X_OK) + + +class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + + """Complete HTTP server with GET, HEAD and POST commands. + + GET and HEAD also support running CGI scripts. + + The POST command is *only* implemented for CGI scripts. + + """ + + # Determine platform specifics + have_fork = hasattr(os, 'fork') + + # Make rfile unbuffered -- we need to read one line and then pass + # the rest to a subprocess, so we can't use buffered input. + rbufsize = 0 + + def do_POST(self): + """Serve a POST request. + + This is only implemented for CGI scripts. + + """ + + if self.is_cgi(): + self.run_cgi() + else: + self.send_error(501, "Can only POST to CGI scripts") + + def send_head(self): + """Version of send_head that support CGI scripts""" + if self.is_cgi(): + return self.run_cgi() + else: + return SimpleHTTPRequestHandler.send_head(self) + + def is_cgi(self): + """Test whether self.path corresponds to a CGI script. + + Returns True and updates the cgi_info attribute to the tuple + (dir, rest) if self.path requires running a CGI script. + Returns False otherwise. + + If any exception is raised, the caller should assume that + self.path was rejected as invalid and act accordingly. + + The default implementation tests whether the normalized url + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + + """ + collapsed_path = _url_collapse_path(self.path) + dir_sep = collapsed_path.find('/', 1) + head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] + if head in self.cgi_directories: + self.cgi_info = head, tail + return True + return False + + + cgi_directories = ['/cgi-bin', '/htbin'] + + def is_executable(self, path): + """Test whether argument path is an executable file.""" + return executable(path) + + def is_python(self, path): + """Test whether argument path is a Python script.""" + head, tail = os.path.splitext(path) + return tail.lower() in (".py", ".pyw") + + def run_cgi(self): + """Execute a CGI script.""" + path = self.path + dir, rest = self.cgi_info + + i = path.find('/', len(dir) + 1) + while i >= 0: + nextdir = path[:i] + nextrest = path[i+1:] + + scriptdir = self.translate_path(nextdir) + if os.path.isdir(scriptdir): + dir, rest = nextdir, nextrest + i = path.find('/', len(dir) + 1) + else: + break + + # find an explicit query string, if present. + i = rest.rfind('?') + if i >= 0: + rest, query = rest[:i], rest[i+1:] + else: + query = '' + + # dissect the part after the directory name into a script name & + # a possible additional path, to be stored in PATH_INFO. + i = rest.find('/') + if i >= 0: + script, rest = rest[:i], rest[i:] + else: + script, rest = rest, '' + + scriptname = dir + '/' + script + scriptfile = self.translate_path(scriptname) + if not os.path.exists(scriptfile): + self.send_error(404, "No such CGI script (%r)" % scriptname) + return + if not os.path.isfile(scriptfile): + self.send_error(403, "CGI script is not a plain file (%r)" % + scriptname) + return + ispy = self.is_python(scriptname) + if self.have_fork or not ispy: + if not self.is_executable(scriptfile): + self.send_error(403, "CGI script is not executable (%r)" % + scriptname) + return + + # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html + # XXX Much of the following could be prepared ahead of time! + env = copy.deepcopy(os.environ) + env['SERVER_SOFTWARE'] = self.version_string() + env['SERVER_NAME'] = self.server.server_name + env['GATEWAY_INTERFACE'] = 'CGI/1.1' + env['SERVER_PROTOCOL'] = self.protocol_version + env['SERVER_PORT'] = str(self.server.server_port) + env['REQUEST_METHOD'] = self.command + uqrest = urllib_parse.unquote(rest) + env['PATH_INFO'] = uqrest + env['PATH_TRANSLATED'] = self.translate_path(uqrest) + env['SCRIPT_NAME'] = scriptname + if query: + env['QUERY_STRING'] = query + env['REMOTE_ADDR'] = self.client_address[0] + authorization = self.headers.get("authorization") + if authorization: + authorization = authorization.split() + if len(authorization) == 2: + import base64, binascii + env['AUTH_TYPE'] = authorization[0] + if authorization[0].lower() == "basic": + try: + authorization = authorization[1].encode('ascii') + if utils.PY3: + # In Py3.3, was: + authorization = base64.decodebytes(authorization).\ + decode('ascii') + else: + # Backport to Py2.7: + authorization = base64.decodestring(authorization).\ + decode('ascii') + except (binascii.Error, UnicodeError): + pass + else: + authorization = authorization.split(':') + if len(authorization) == 2: + env['REMOTE_USER'] = authorization[0] + # XXX REMOTE_IDENT + if self.headers.get('content-type') is None: + env['CONTENT_TYPE'] = self.headers.get_content_type() + else: + env['CONTENT_TYPE'] = self.headers['content-type'] + length = self.headers.get('content-length') + if length: + env['CONTENT_LENGTH'] = length + referer = self.headers.get('referer') + if referer: + env['HTTP_REFERER'] = referer + accept = [] + for line in self.headers.getallmatchingheaders('accept'): + if line[:1] in "\t\n\r ": + accept.append(line.strip()) + else: + accept = accept + line[7:].split(',') + env['HTTP_ACCEPT'] = ','.join(accept) + ua = self.headers.get('user-agent') + if ua: + env['HTTP_USER_AGENT'] = ua + co = filter(None, self.headers.get_all('cookie', [])) + cookie_str = ', '.join(co) + if cookie_str: + env['HTTP_COOKIE'] = cookie_str + # XXX Other HTTP_* headers + # Since we're setting the env in the parent, provide empty + # values to override previously set values + for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', + 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): + env.setdefault(k, "") + + self.send_response(200, "Script output follows") + self.flush_headers() + + decoded_query = query.replace('+', ' ') + + if self.have_fork: + # Unix -- fork as we should + args = [script] + if '=' not in decoded_query: + args.append(decoded_query) + nobody = nobody_uid() + self.wfile.flush() # Always flush before forking + pid = os.fork() + if pid != 0: + # Parent + pid, sts = os.waitpid(pid, 0) + # throw away additional data [see bug #427345] + while select.select([self.rfile], [], [], 0)[0]: + if not self.rfile.read(1): + break + if sts: + self.log_error("CGI script exit status %#x", sts) + return + # Child + try: + try: + os.setuid(nobody) + except os.error: + pass + os.dup2(self.rfile.fileno(), 0) + os.dup2(self.wfile.fileno(), 1) + os.execve(scriptfile, args, env) + except: + self.server.handle_error(self.request, self.client_address) + os._exit(127) + + else: + # Non-Unix -- use subprocess + import subprocess + cmdline = [scriptfile] + if self.is_python(scriptfile): + interp = sys.executable + if interp.lower().endswith("w.exe"): + # On Windows, use python.exe, not pythonw.exe + interp = interp[:-5] + interp[-4:] + cmdline = [interp, '-u'] + cmdline + if '=' not in query: + cmdline.append(query) + self.log_message("command: %s", subprocess.list2cmdline(cmdline)) + try: + nbytes = int(length) + except (TypeError, ValueError): + nbytes = 0 + p = subprocess.Popen(cmdline, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env = env + ) + if self.command.lower() == "post" and nbytes > 0: + data = self.rfile.read(nbytes) + else: + data = None + # throw away additional data [see bug #427345] + while select.select([self.rfile._sock], [], [], 0)[0]: + if not self.rfile._sock.recv(1): + break + stdout, stderr = p.communicate(data) + self.wfile.write(stdout) + if stderr: + self.log_error('%s', stderr) + p.stderr.close() + p.stdout.close() + status = p.returncode + if status: + self.log_error("CGI script exit status %#x", status) + else: + self.log_message("CGI script exited OK") + + +def test(HandlerClass = BaseHTTPRequestHandler, + ServerClass = HTTPServer, protocol="HTTP/1.0", port=8000): + """Test the HTTP request handler class. + + This runs an HTTP server on port 8000 (or the first command line + argument). + + """ + server_address = ('', port) + + HandlerClass.protocol_version = protocol + httpd = ServerClass(server_address, HandlerClass) + + sa = httpd.socket.getsockname() + print("Serving HTTP on", sa[0], "port", sa[1], "...") + try: + httpd.serve_forever() + except KeyboardInterrupt: + print("\nKeyboard interrupt received, exiting.") + httpd.server_close() + sys.exit(0) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--cgi', action='store_true', + help='Run as CGI Server') + parser.add_argument('port', action='store', + default=8000, type=int, + nargs='?', + help='Specify alternate port [default: 8000]') + args = parser.parse_args() + if args.cgi: + test(HandlerClass=CGIHTTPRequestHandler, port=args.port) + else: + test(HandlerClass=SimpleHTTPRequestHandler, port=args.port) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/misc.py b/minor_project/lib/python3.6/site-packages/future/backports/misc.py new file mode 100644 index 0000000..098a066 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/misc.py @@ -0,0 +1,944 @@ +""" +Miscellaneous function (re)definitions from the Py3.4+ standard library +for Python 2.6/2.7. + +- math.ceil (for Python 2.7) +- collections.OrderedDict (for Python 2.6) +- collections.Counter (for Python 2.6) +- collections.ChainMap (for all versions prior to Python 3.3) +- itertools.count (for Python 2.6, with step parameter) +- subprocess.check_output (for Python 2.6) +- reprlib.recursive_repr (for Python 2.6+) +- functools.cmp_to_key (for Python 2.6) +""" + +from __future__ import absolute_import + +import subprocess +from math import ceil as oldceil + +from operator import itemgetter as _itemgetter, eq as _eq +import sys +import heapq as _heapq +from _weakref import proxy as _proxy +from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +from socket import getaddrinfo, SOCK_STREAM, error, socket + +from future.utils import iteritems, itervalues, PY2, PY26, PY3 + +if PY2: + from collections import Mapping, MutableMapping +else: + from collections.abc import Mapping, MutableMapping + + +def ceil(x): + """ + Return the ceiling of x as an int. + This is the smallest integral value >= x. + """ + return int(oldceil(x)) + + +######################################################################## +### reprlib.recursive_repr decorator from Py3.4 +######################################################################## + +from itertools import islice + +if PY3: + try: + from _thread import get_ident + except ImportError: + from _dummy_thread import get_ident +else: + try: + from thread import get_ident + except ImportError: + from dummy_thread import get_ident + + +def recursive_repr(fillvalue='...'): + 'Decorator to make a repr function return fillvalue for a recursive call' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + +################################################################################ +### OrderedDict +################################################################################ + +class _Link(object): + __slots__ = 'prev', 'next', 'key', '__weakref__' + +class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as regular dictionaries. + + # The internal self.__map dict maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # The sentinel is in self.__hardroot with a weakref proxy in self.__root. + # The prev links are weakref proxies (to prevent circular references). + # Individual links are kept alive by the hard reference in self.__map. + # Those hard references disappear when a key is deleted from an OrderedDict. + + def __init__(*args, **kwds): + '''Initialize an ordered dictionary. The signature is the same as + regular dictionaries, but keyword arguments are not recommended because + their insertion order is arbitrary. + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__hardroot = _Link() + self.__root = root = _proxy(self.__hardroot) + root.prev = root.next = root + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, + dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link at the end of the linked list, + # and the inherited dictionary is updated with the new key/value pair. + if key not in self: + self.__map[key] = link = Link() + root = self.__root + last = root.prev + link.prev, link.next, link.key = last, root, key + last.next = link + root.prev = proxy(link) + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which gets + # removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link = self.__map.pop(key) + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + # Traverse the linked list in order. + root = self.__root + curr = root.next + while curr is not root: + yield curr.key + curr = curr.next + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + # Traverse the linked list in reverse order. + root = self.__root + curr = root.prev + while curr is not root: + yield curr.key + curr = curr.prev + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + root = self.__root + root.prev = root.next = root + self.__map.clear() + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root.prev + link_prev = link.prev + link_prev.next = root + root.prev = link_prev + else: + link = root.next + link_next = link.next + root.next = link_next + link_next.prev = root + key = link.key + del self.__map[key] + value = dict.pop(self, key) + return key, value + + def move_to_end(self, key, last=True): + '''Move an existing element to the end (or beginning if last==False). + + Raises KeyError if the element does not exist. + When last=True, acts like a fast version of self[key]=self.pop(key). + + ''' + link = self.__map[key] + link_prev = link.prev + link_next = link.next + link_prev.next = link_next + link_next.prev = link_prev + root = self.__root + if last: + last = root.prev + link.prev = last + link.next = root + last.next = root.prev = link + else: + first = root.next + link.prev = root + link.next = first + root.next = first.prev = link + + def __sizeof__(self): + sizeof = sys.getsizeof + n = len(self) + 1 # number of links including root + size = sizeof(self.__dict__) # instance dictionary + size += sizeof(self.__map) * 2 # internal dict and inherited dict + size += sizeof(self.__hardroot) * n # link objects + size += sizeof(self.__root) * n # proxy objects + return size + + update = __update = MutableMapping.update + keys = MutableMapping.keys + values = MutableMapping.values + items = MutableMapping.items + __ne__ = MutableMapping.__ne__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding + value. If key is not found, d is returned if given, otherwise KeyError + is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + @recursive_repr() + def __repr__(self): + 'od.__repr__() <==> repr(od)' + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self.items())) + + def __reduce__(self): + 'Return state information for pickling' + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + return self.__class__, (), inst_dict or None, None, iter(self.items()) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S. + If not specified, the value defaults to None. + + ''' + self = cls() + for key in iterable: + self[key] = value + return self + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return dict.__eq__(self, other) and all(map(_eq, self, other)) + return dict.__eq__(self, other) + + +# {{{ http://code.activestate.com/recipes/576611/ (r11) + +try: + from operator import itemgetter + from heapq import nlargest +except ImportError: + pass + +######################################################################## +### Counter +######################################################################## + +def _count_elements(mapping, iterable): + 'Tally elements from the iterable.' + mapping_get = mapping.get + for elem in iterable: + mapping[elem] = mapping_get(elem, 0) + 1 + +class Counter(dict): + '''Dict subclass for counting hashable items. Sometimes called a bag + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + ''' + # References: + # http://en.wikipedia.org/wiki/Multiset + # http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html + # http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm + # http://code.activestate.com/recipes/259174/ + # Knuth, TAOCP Vol. II section 4.6.3 + + def __init__(*args, **kwds): + '''Create a new, empty Counter object. And if given, count elements + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. + + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + ''' + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) + + def __missing__(self, key): + 'The count of elements not in the Counter is zero.' + # Needed so that self[missing_item] does not raise KeyError + return 0 + + def most_common(self, n=None): + '''List the n most common elements and their counts from the most + common to the least. If n is None, then list all element counts. + + >>> Counter('abcdeabcdabcaba').most_common(3) + [('a', 5), ('b', 4), ('c', 3)] + + ''' + # Emulate Bag.sortedByCount from Smalltalk + if n is None: + return sorted(self.items(), key=_itemgetter(1), reverse=True) + return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + def elements(self): + '''Iterator over elements repeating each as many times as its count. + + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] + + # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> product = 1 + >>> for factor in prime_factors.elements(): # loop over factors + ... product *= factor # and multiply them + >>> product + 1836 + + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + ''' + # Emulate Bag.do from Smalltalk and Multiset.begin from C++. + return _chain.from_iterable(_starmap(_repeat, self.items())) + + # Override dict methods where necessary + + @classmethod + def fromkeys(cls, iterable, v=None): + # There is no equivalent method for counters because setting v=1 + # means that no element can have a count greater than one. + raise NotImplementedError( + 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + + def update(*args, **kwds): + '''Like dict.update() but add counts instead of replacing them. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + ''' + # The regular dict.update() operation makes no sense here because the + # replace behavior results in the some of original untouched counts + # being mixed-in with all of the other counts for a mismash that + # doesn't have a straight-forward interpretation in most counting + # contexts. Instead, we implement straight-addition. Both the inputs + # and outputs are allowed to contain zero and negative counts. + + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + if isinstance(iterable, Mapping): + if self: + self_get = self.get + for elem, count in iterable.items(): + self[elem] = count + self_get(elem, 0) + else: + super(Counter, self).update(iterable) # fast path when counter is empty + else: + _count_elements(self, iterable) + if kwds: + self.update(kwds) + + def subtract(*args, **kwds): + '''Like dict.update() but subtracts counts instead of replacing them. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. + + Source can be an iterable, a dictionary, or another Counter instance. + + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self = args[0] + args = args[1:] + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None + if iterable is not None: + self_get = self.get + if isinstance(iterable, Mapping): + for elem, count in iterable.items(): + self[elem] = self_get(elem, 0) - count + else: + for elem in iterable: + self[elem] = self_get(elem, 0) - 1 + if kwds: + self.subtract(kwds) + + def copy(self): + 'Return a shallow copy.' + return self.__class__(self) + + def __reduce__(self): + return self.__class__, (dict(self),) + + def __delitem__(self, elem): + 'Like dict.__delitem__() but does not raise KeyError for missing values.' + if elem in self: + super(Counter, self).__delitem__(elem) + + def __repr__(self): + if not self: + return '%s()' % self.__class__.__name__ + try: + items = ', '.join(map('%r: %r'.__mod__, self.most_common())) + return '%s({%s})' % (self.__class__.__name__, items) + except TypeError: + # handle case where values are not orderable + return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + + # Multiset-style mathematical operations discussed in: + # Knuth TAOCP Volume II section 4.6.3 exercise 19 + # and at http://en.wikipedia.org/wiki/Multiset + # + # Outputs guaranteed to only include positive counts. + # + # To strip negative and zero counts, add-in an empty counter: + # c += Counter() + + def __add__(self, other): + '''Add counts from two counters. + + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count + other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __sub__(self, other): + ''' Subtract count, but keep only results with positive counts. + + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + newcount = count - other[elem] + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count < 0: + result[elem] = 0 - count + return result + + def __or__(self, other): + '''Union is the maximum of value in either of the input counters. + + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = other_count if count < other_count else count + if newcount > 0: + result[elem] = newcount + for elem, count in other.items(): + if elem not in self and count > 0: + result[elem] = count + return result + + def __and__(self, other): + ''' Intersection is the minimum of corresponding counts. + + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + ''' + if not isinstance(other, Counter): + return NotImplemented + result = Counter() + for elem, count in self.items(): + other_count = other[elem] + newcount = count if count < other_count else other_count + if newcount > 0: + result[elem] = newcount + return result + + def __pos__(self): + 'Adds an empty counter, effectively stripping negative and zero counts' + return self + Counter() + + def __neg__(self): + '''Subtracts from an empty counter. Strips positive and zero counts, + and flips the sign on negative counts. + + ''' + return Counter() - self + + def _keep_positive(self): + '''Internal method to strip elements with a negative or zero count''' + nonpositive = [elem for elem, count in self.items() if not count > 0] + for elem in nonpositive: + del self[elem] + return self + + def __iadd__(self, other): + '''Inplace add from another counter, keeping only positive counts. + + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] += count + return self._keep_positive() + + def __isub__(self, other): + '''Inplace subtract counter, but keep only results with positive counts. + + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + ''' + for elem, count in other.items(): + self[elem] -= count + return self._keep_positive() + + def __ior__(self, other): + '''Inplace union is the maximum of value from either counter. + + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) + + ''' + for elem, other_count in other.items(): + count = self[elem] + if other_count > count: + self[elem] = other_count + return self._keep_positive() + + def __iand__(self, other): + '''Inplace intersection is the minimum of corresponding counts. + + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + ''' + for elem, count in self.items(): + other_count = other[elem] + if other_count < count: + self[elem] = other_count + return self._keep_positive() + + +def check_output(*popenargs, **kwargs): + """ + For Python 2.6 compatibility: see + http://stackoverflow.com/questions/4814970/ + """ + + if 'stdout' in kwargs: + raise ValueError('stdout argument not allowed, it will be overridden.') + process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + output, unused_err = process.communicate() + retcode = process.poll() + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + raise subprocess.CalledProcessError(retcode, cmd) + return output + + +def count(start=0, step=1): + """ + ``itertools.count`` in Py 2.6 doesn't accept a step + parameter. This is an enhanced version of ``itertools.count`` + for Py2.6 equivalent to ``itertools.count`` in Python 2.7+. + """ + while True: + yield start + start += step + + +######################################################################## +### ChainMap (helper for configparser and string.Template) +### From the Py3.4 source code. See also: +### https://github.com/kkxue/Py2ChainMap/blob/master/py2chainmap.py +######################################################################## + +class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + # Py2 compatibility: + __nonzero__ = __bool__ + + @recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self, m=None): # like Django's Context.push() + ''' + New ChainMap with a new map followed by all previous maps. If no + map is provided, an empty dict is used. + ''' + if m is None: + m = {} + return self.__class__(m, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {0!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + + +# Re-use the same sentinel as in the Python stdlib socket module: +from socket import _GLOBAL_DEFAULT_TIMEOUT +# Was: _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Backport of 3-argument create_connection() for Py2.6. + + Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in getaddrinfo(host, port, 0, SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise error("getaddrinfo returns an empty list") + +# Backport from Py2.7 for Py2.6: +def cmp_to_key(mycmp): + """Convert a cmp= function into a key= function""" + class K(object): + __slots__ = ['obj'] + def __init__(self, obj, *args): + self.obj = obj + def __lt__(self, other): + return mycmp(self.obj, other.obj) < 0 + def __gt__(self, other): + return mycmp(self.obj, other.obj) > 0 + def __eq__(self, other): + return mycmp(self.obj, other.obj) == 0 + def __le__(self, other): + return mycmp(self.obj, other.obj) <= 0 + def __ge__(self, other): + return mycmp(self.obj, other.obj) >= 0 + def __ne__(self, other): + return mycmp(self.obj, other.obj) != 0 + def __hash__(self): + raise TypeError('hash not implemented') + return K + +# Back up our definitions above in case they're useful +_OrderedDict = OrderedDict +_Counter = Counter +_check_output = check_output +_count = count +_ceil = ceil +__count_elements = _count_elements +_recursive_repr = recursive_repr +_ChainMap = ChainMap +_create_connection = create_connection +_cmp_to_key = cmp_to_key + +# Overwrite the definitions above with the usual ones +# from the standard library: +if sys.version_info >= (2, 7): + from collections import OrderedDict, Counter + from itertools import count + from functools import cmp_to_key + try: + from subprocess import check_output + except ImportError: + # Not available. This happens with Google App Engine: see issue #231 + pass + from socket import create_connection + +if sys.version_info >= (3, 0): + from math import ceil + from collections import _count_elements + +if sys.version_info >= (3, 3): + from reprlib import recursive_repr + from collections import ChainMap diff --git a/minor_project/lib/python3.6/site-packages/future/backports/socket.py b/minor_project/lib/python3.6/site-packages/future/backports/socket.py new file mode 100644 index 0000000..930e1da --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/socket.py @@ -0,0 +1,454 @@ +# Wrapper module for _socket, providing some additional facilities +# implemented in Python. + +"""\ +This module provides socket operations and some related functions. +On Unix, it supports IP (Internet Protocol) and Unix domain sockets. +On other systems, it only supports IP. Functions specific for a +socket are available as methods of the socket object. + +Functions: + +socket() -- create a new socket object +socketpair() -- create a pair of new socket objects [*] +fromfd() -- create a socket object from an open file descriptor [*] +fromshare() -- create a socket object from data received from socket.share() [*] +gethostname() -- return the current hostname +gethostbyname() -- map a hostname to its IP number +gethostbyaddr() -- map an IP number or hostname to DNS info +getservbyname() -- map a service name and a protocol name to a port number +getprotobyname() -- map a protocol name (e.g. 'tcp') to a number +ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order +htons(), htonl() -- convert 16, 32 bit int from host to network byte order +inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format +inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) +socket.getdefaulttimeout() -- get the default timeout value +socket.setdefaulttimeout() -- set the default timeout value +create_connection() -- connects to an address, with an optional timeout and + optional source address. + + [*] not available on all platforms! + +Special objects: + +SocketType -- type object for socket objects +error -- exception raised for I/O errors +has_ipv6 -- boolean value indicating if IPv6 is supported + +Integer constants: + +AF_INET, AF_UNIX -- socket domains (first argument to socket() call) +SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) + +Many other constants may be defined; these may be used in calls to +the setsockopt() and getsockopt() methods. +""" + +from __future__ import unicode_literals +from __future__ import print_function +from __future__ import division +from __future__ import absolute_import +from future.builtins import super + +import _socket +from _socket import * + +import os, sys, io + +try: + import errno +except ImportError: + errno = None +EBADF = getattr(errno, 'EBADF', 9) +EAGAIN = getattr(errno, 'EAGAIN', 11) +EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11) + +__all__ = ["getfqdn", "create_connection"] +__all__.extend(os._get_exports_list(_socket)) + + +_realsocket = socket + +# WSA error codes +if sys.platform.lower().startswith("win"): + errorTab = {} + errorTab[10004] = "The operation was interrupted." + errorTab[10009] = "A bad file handle was passed." + errorTab[10013] = "Permission denied." + errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT + errorTab[10022] = "An invalid operation was attempted." + errorTab[10035] = "The socket operation would block" + errorTab[10036] = "A blocking operation is already in progress." + errorTab[10048] = "The network address is in use." + errorTab[10054] = "The connection has been reset." + errorTab[10058] = "The network has been shut down." + errorTab[10060] = "The operation timed out." + errorTab[10061] = "Connection refused." + errorTab[10063] = "The name is too long." + errorTab[10064] = "The host is down." + errorTab[10065] = "The host is unreachable." + __all__.append("errorTab") + + +class socket(_socket.socket): + + """A subclass of _socket.socket adding the makefile() method.""" + + __slots__ = ["__weakref__", "_io_refs", "_closed"] + + def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None): + if fileno is None: + _socket.socket.__init__(self, family, type, proto) + else: + _socket.socket.__init__(self, family, type, proto, fileno) + self._io_refs = 0 + self._closed = False + + def __enter__(self): + return self + + def __exit__(self, *args): + if not self._closed: + self.close() + + def __repr__(self): + """Wrap __repr__() to reveal the real class name.""" + s = _socket.socket.__repr__(self) + if s.startswith(" socket object + + Return a new socket object connected to the same system resource. + """ + fd = dup(self.fileno()) + sock = self.__class__(self.family, self.type, self.proto, fileno=fd) + sock.settimeout(self.gettimeout()) + return sock + + def accept(self): + """accept() -> (socket object, address info) + + Wait for an incoming connection. Return a new socket + representing the connection, and the address of the client. + For IP sockets, the address info is a pair (hostaddr, port). + """ + fd, addr = self._accept() + sock = socket(self.family, self.type, self.proto, fileno=fd) + # Issue #7995: if no default timeout is set and the listening + # socket had a (non-zero) timeout, force the new socket in blocking + # mode to override platform-specific socket flags inheritance. + if getdefaulttimeout() is None and self.gettimeout(): + sock.setblocking(True) + return sock, addr + + def makefile(self, mode="r", buffering=None, **_3to2kwargs): + """makefile(...) -> an I/O stream connected to the socket + + The arguments are as for io.open() after the filename, + except the only mode characters supported are 'r', 'w' and 'b'. + The semantics are similar too. (XXX refactor to share code?) + """ + if 'newline' in _3to2kwargs: newline = _3to2kwargs['newline']; del _3to2kwargs['newline'] + else: newline = None + if 'errors' in _3to2kwargs: errors = _3to2kwargs['errors']; del _3to2kwargs['errors'] + else: errors = None + if 'encoding' in _3to2kwargs: encoding = _3to2kwargs['encoding']; del _3to2kwargs['encoding'] + else: encoding = None + for c in mode: + if c not in ("r", "w", "b"): + raise ValueError("invalid mode %r (only r, w, b allowed)") + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = SocketIO(self, rawmode) + self._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def _decref_socketios(self): + if self._io_refs > 0: + self._io_refs -= 1 + if self._closed: + self.close() + + def _real_close(self, _ss=_socket.socket): + # This function should not reference any globals. See issue #808164. + _ss.close(self) + + def close(self): + # This function should not reference any globals. See issue #808164. + self._closed = True + if self._io_refs <= 0: + self._real_close() + + def detach(self): + """detach() -> file descriptor + + Close the socket object without closing the underlying file descriptor. + The object cannot be used after this call, but the file descriptor + can be reused for other purposes. The file descriptor is returned. + """ + self._closed = True + return super().detach() + +def fromfd(fd, family, type, proto=0): + """ fromfd(fd, family, type[, proto]) -> socket object + + Create a socket object from a duplicate of the given file + descriptor. The remaining arguments are the same as for socket(). + """ + nfd = dup(fd) + return socket(family, type, proto, nfd) + +if hasattr(_socket.socket, "share"): + def fromshare(info): + """ fromshare(info) -> socket object + + Create a socket object from a the bytes object returned by + socket.share(pid). + """ + return socket(0, 0, 0, info) + +if hasattr(_socket, "socketpair"): + + def socketpair(family=None, type=SOCK_STREAM, proto=0): + """socketpair([family[, type[, proto]]]) -> (socket object, socket object) + + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is + AF_UNIX if defined on the platform; otherwise, the default is AF_INET. + """ + if family is None: + try: + family = AF_UNIX + except NameError: + family = AF_INET + a, b = _socket.socketpair(family, type, proto) + a = socket(family, type, proto, a.detach()) + b = socket(family, type, proto, b.detach()) + return a, b + + +_blocking_errnos = set([EAGAIN, EWOULDBLOCK]) + +class SocketIO(io.RawIOBase): + + """Raw I/O implementation for stream sockets. + + This class supports the makefile() method on sockets. It provides + the raw I/O interface on top of a socket object. + """ + + # One might wonder why not let FileIO do the job instead. There are two + # main reasons why FileIO is not adapted: + # - it wouldn't work under Windows (where you can't used read() and + # write() on a socket handle) + # - it wouldn't work with socket timeouts (FileIO would ignore the + # timeout and consider the socket non-blocking) + + # XXX More docs + + def __init__(self, sock, mode): + if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): + raise ValueError("invalid mode: %r" % mode) + io.RawIOBase.__init__(self) + self._sock = sock + if "b" not in mode: + mode += "b" + self._mode = mode + self._reading = "r" in mode + self._writing = "w" in mode + self._timeout_occurred = False + + def readinto(self, b): + """Read up to len(b) bytes into the writable buffer *b* and return + the number of bytes read. If the socket is non-blocking and no bytes + are available, None is returned. + + If *b* is non-empty, a 0 return value indicates that the connection + was shutdown at the other end. + """ + self._checkClosed() + self._checkReadable() + if self._timeout_occurred: + raise IOError("cannot read from timed out object") + while True: + try: + return self._sock.recv_into(b) + except timeout: + self._timeout_occurred = True + raise + # except InterruptedError: + # continue + except error as e: + if e.args[0] in _blocking_errnos: + return None + raise + + def write(self, b): + """Write the given bytes or bytearray object *b* to the socket + and return the number of bytes written. This can be less than + len(b) if not all data could be written. If the socket is + non-blocking and no bytes could be written None is returned. + """ + self._checkClosed() + self._checkWritable() + try: + return self._sock.send(b) + except error as e: + # XXX what about EINTR? + if e.args[0] in _blocking_errnos: + return None + raise + + def readable(self): + """True if the SocketIO is open for reading. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._reading + + def writable(self): + """True if the SocketIO is open for writing. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return self._writing + + def seekable(self): + """True if the SocketIO is open for seeking. + """ + if self.closed: + raise ValueError("I/O operation on closed socket.") + return super().seekable() + + def fileno(self): + """Return the file descriptor of the underlying socket. + """ + self._checkClosed() + return self._sock.fileno() + + @property + def name(self): + if not self.closed: + return self.fileno() + else: + return -1 + + @property + def mode(self): + return self._mode + + def close(self): + """Close the SocketIO object. This doesn't close the underlying + socket, except if all references to it have disappeared. + """ + if self.closed: + return + io.RawIOBase.close(self) + self._sock._decref_socketios() + self._sock = None + + +def getfqdn(name=''): + """Get fully qualified domain name from name. + + An empty argument is interpreted as meaning the local host. + + First the hostname returned by gethostbyaddr() is checked, then + possibly existing aliases. In case no FQDN is available, hostname + from gethostname() is returned. + """ + name = name.strip() + if not name or name == '0.0.0.0': + name = gethostname() + try: + hostname, aliases, ipaddrs = gethostbyaddr(name) + except error: + pass + else: + aliases.insert(0, hostname) + for name in aliases: + if '.' in name: + break + else: + name = hostname + return name + + +# Re-use the same sentinel as in the Python stdlib socket module: +from socket import _GLOBAL_DEFAULT_TIMEOUT +# Was: _GLOBAL_DEFAULT_TIMEOUT = object() + + +def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + err = None + for res in getaddrinfo(host, port, 0, SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except error as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + else: + raise error("getaddrinfo returns an empty list") diff --git a/minor_project/lib/python3.6/site-packages/future/backports/socketserver.py b/minor_project/lib/python3.6/site-packages/future/backports/socketserver.py new file mode 100644 index 0000000..d1e24a6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/socketserver.py @@ -0,0 +1,747 @@ +"""Generic socket server classes. + +This module tries to capture the various aspects of defining a server: + +For socket-based servers: + +- address family: + - AF_INET{,6}: IP (Internet Protocol) sockets (default) + - AF_UNIX: Unix domain sockets + - others, e.g. AF_DECNET are conceivable (see +- socket type: + - SOCK_STREAM (reliable stream, e.g. TCP) + - SOCK_DGRAM (datagrams, e.g. UDP) + +For request-based servers (including socket-based): + +- client address verification before further looking at the request + (This is actually a hook for any processing that needs to look + at the request before anything else, e.g. logging) +- how to handle multiple requests: + - synchronous (one request is handled at a time) + - forking (each request is handled by a new process) + - threading (each request is handled by a new thread) + +The classes in this module favor the server type that is simplest to +write: a synchronous TCP/IP server. This is bad class design, but +save some typing. (There's also the issue that a deep class hierarchy +slows down method lookups.) + +There are five classes in an inheritance diagram, four of which represent +synchronous servers of four types: + + +------------+ + | BaseServer | + +------------+ + | + v + +-----------+ +------------------+ + | TCPServer |------->| UnixStreamServer | + +-----------+ +------------------+ + | + v + +-----------+ +--------------------+ + | UDPServer |------->| UnixDatagramServer | + +-----------+ +--------------------+ + +Note that UnixDatagramServer derives from UDPServer, not from +UnixStreamServer -- the only difference between an IP and a Unix +stream server is the address family, which is simply repeated in both +unix server classes. + +Forking and threading versions of each type of server can be created +using the ForkingMixIn and ThreadingMixIn mix-in classes. For +instance, a threading UDP server class is created as follows: + + class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass + +The Mix-in class must come first, since it overrides a method defined +in UDPServer! Setting the various member variables also changes +the behavior of the underlying server mechanism. + +To implement a service, you must derive a class from +BaseRequestHandler and redefine its handle() method. You can then run +various versions of the service by combining one of the server classes +with your request handler class. + +The request handler class must be different for datagram or stream +services. This can be hidden by using the request handler +subclasses StreamRequestHandler or DatagramRequestHandler. + +Of course, you still have to use your head! + +For instance, it makes no sense to use a forking server if the service +contains state in memory that can be modified by requests (since the +modifications in the child process would never reach the initial state +kept in the parent process and passed to each child). In this case, +you can use a threading server, but you will probably have to use +locks to avoid two requests that come in nearly simultaneous to apply +conflicting changes to the server state. + +On the other hand, if you are building e.g. an HTTP server, where all +data is stored externally (e.g. in the file system), a synchronous +class will essentially render the service "deaf" while one request is +being handled -- which may be for a very long time if a client is slow +to read all the data it has requested. Here a threading or forking +server is appropriate. + +In some cases, it may be appropriate to process part of a request +synchronously, but to finish processing in a forked child depending on +the request data. This can be implemented by using a synchronous +server and doing an explicit fork in the request handler class +handle() method. + +Another approach to handling multiple simultaneous requests in an +environment that supports neither threads nor fork (or where these are +too expensive or inappropriate for the service) is to maintain an +explicit table of partially finished requests and to use select() to +decide which request to work on next (or whether to handle a new +incoming request). This is particularly important for stream services +where each client can potentially be connected for a long time (if +threads or subprocesses cannot be used). + +Future work: +- Standard classes for Sun RPC (which uses either UDP or TCP) +- Standard mix-in classes to implement various authentication + and encryption schemes +- Standard framework for select-based multiplexing + +XXX Open problems: +- What to do with out-of-band data? + +BaseServer: +- split generic "request" functionality out into BaseServer class. + Copyright (C) 2000 Luke Kenneth Casson Leighton + + example: read entries from a SQL database (requires overriding + get_request() to return a table entry from the database). + entry is processed by a RequestHandlerClass. + +""" + +# Author of the BaseServer patch: Luke Kenneth Casson Leighton + +# XXX Warning! +# There is a test suite for this module, but it cannot be run by the +# standard regression test. +# To run it manually, run Lib/test/test_socketserver.py. + +from __future__ import (absolute_import, print_function) + +__version__ = "0.4" + + +import socket +import select +import sys +import os +import errno +try: + import threading +except ImportError: + import dummy_threading as threading + +__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", + "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", + "StreamRequestHandler","DatagramRequestHandler", + "ThreadingMixIn", "ForkingMixIn"] +if hasattr(socket, "AF_UNIX"): + __all__.extend(["UnixStreamServer","UnixDatagramServer", + "ThreadingUnixStreamServer", + "ThreadingUnixDatagramServer"]) + +def _eintr_retry(func, *args): + """restart a system call interrupted by EINTR""" + while True: + try: + return func(*args) + except OSError as e: + if e.errno != errno.EINTR: + raise + +class BaseServer(object): + + """Base class for server classes. + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you do not use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - server_close() + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - service_actions() + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - allow_reuse_address + + Instance variables: + + - RequestHandlerClass + - socket + + """ + + timeout = None + + def __init__(self, server_address, RequestHandlerClass): + """Constructor. May be extended, do not override.""" + self.server_address = server_address + self.RequestHandlerClass = RequestHandlerClass + self.__is_shut_down = threading.Event() + self.__shutdown_request = False + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + pass + + def serve_forever(self, poll_interval=0.5): + """Handle one request at a time until shutdown. + + Polls for shutdown every poll_interval seconds. Ignores + self.timeout. If you need to do periodic tasks, do them in + another thread. + """ + self.__is_shut_down.clear() + try: + while not self.__shutdown_request: + # XXX: Consider using another file descriptor or + # connecting to the socket to wake this up instead of + # polling. Polling reduces our responsiveness to a + # shutdown request and wastes cpu at all other times. + r, w, e = _eintr_retry(select.select, [self], [], [], + poll_interval) + if self in r: + self._handle_request_noblock() + + self.service_actions() + finally: + self.__shutdown_request = False + self.__is_shut_down.set() + + def shutdown(self): + """Stops the serve_forever loop. + + Blocks until the loop has finished. This must be called while + serve_forever() is running in another thread, or it will + deadlock. + """ + self.__shutdown_request = True + self.__is_shut_down.wait() + + def service_actions(self): + """Called by the serve_forever() loop. + + May be overridden by a subclass / Mixin to implement any code that + needs to be run during the loop. + """ + pass + + # The distinction between handling, getting, processing and + # finishing a request is fairly arbitrary. Remember: + # + # - handle_request() is the top-level call. It calls + # select, get_request(), verify_request() and process_request() + # - get_request() is different for stream or datagram sockets + # - process_request() is the place that may fork a new process + # or create a new thread to finish the request + # - finish_request() instantiates the request handler class; + # this constructor will handle the request all by itself + + def handle_request(self): + """Handle one request, possibly blocking. + + Respects self.timeout. + """ + # Support people who used socket.settimeout() to escape + # handle_request before self.timeout was available. + timeout = self.socket.gettimeout() + if timeout is None: + timeout = self.timeout + elif self.timeout is not None: + timeout = min(timeout, self.timeout) + fd_sets = _eintr_retry(select.select, [self], [], [], timeout) + if not fd_sets[0]: + self.handle_timeout() + return + self._handle_request_noblock() + + def _handle_request_noblock(self): + """Handle one request, without blocking. + + I assume that select.select has returned that the socket is + readable before this function was called, so there should be + no risk of blocking in get_request(). + """ + try: + request, client_address = self.get_request() + except socket.error: + return + if self.verify_request(request, client_address): + try: + self.process_request(request, client_address) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def handle_timeout(self): + """Called if no new request arrives within self.timeout. + + Overridden by ForkingMixIn. + """ + pass + + def verify_request(self, request, client_address): + """Verify the request. May be overridden. + + Return True if we should proceed with this request. + + """ + return True + + def process_request(self, request, client_address): + """Call finish_request. + + Overridden by ForkingMixIn and ThreadingMixIn. + + """ + self.finish_request(request, client_address) + self.shutdown_request(request) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + pass + + def finish_request(self, request, client_address): + """Finish one request by instantiating RequestHandlerClass.""" + self.RequestHandlerClass(request, client_address, self) + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + pass + + def handle_error(self, request, client_address): + """Handle an error gracefully. May be overridden. + + The default is to print a traceback and continue. + + """ + print('-'*40) + print('Exception happened during processing of request from', end=' ') + print(client_address) + import traceback + traceback.print_exc() # XXX But this goes to stderr! + print('-'*40) + + +class TCPServer(BaseServer): + + """Base class for various socket-based server classes. + + Defaults to synchronous IP stream (i.e., TCP). + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass, bind_and_activate=True) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you don't use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - request_queue_size (only for stream sockets) + - allow_reuse_address + + Instance variables: + + - server_address + - RequestHandlerClass + - socket + + """ + + address_family = socket.AF_INET + + socket_type = socket.SOCK_STREAM + + request_queue_size = 5 + + allow_reuse_address = False + + def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): + """Constructor. May be extended, do not override.""" + BaseServer.__init__(self, server_address, RequestHandlerClass) + self.socket = socket.socket(self.address_family, + self.socket_type) + if bind_and_activate: + self.server_bind() + self.server_activate() + + def server_bind(self): + """Called by constructor to bind the socket. + + May be overridden. + + """ + if self.allow_reuse_address: + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.bind(self.server_address) + self.server_address = self.socket.getsockname() + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + self.socket.listen(self.request_queue_size) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + self.socket.close() + + def fileno(self): + """Return socket file number. + + Interface required by select(). + + """ + return self.socket.fileno() + + def get_request(self): + """Get the request and client address from the socket. + + May be overridden. + + """ + return self.socket.accept() + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + try: + #explicitly shutdown. socket.close() merely releases + #the socket and waits for GC to perform the actual close. + request.shutdown(socket.SHUT_WR) + except socket.error: + pass #some platforms may raise ENOTCONN here + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + request.close() + + +class UDPServer(TCPServer): + + """UDP server class.""" + + allow_reuse_address = False + + socket_type = socket.SOCK_DGRAM + + max_packet_size = 8192 + + def get_request(self): + data, client_addr = self.socket.recvfrom(self.max_packet_size) + return (data, self.socket), client_addr + + def server_activate(self): + # No need to call listen() for UDP. + pass + + def shutdown_request(self, request): + # No need to shutdown anything. + self.close_request(request) + + def close_request(self, request): + # No need to close anything. + pass + +class ForkingMixIn(object): + + """Mix-in class to handle each request in a new process.""" + + timeout = 300 + active_children = None + max_children = 40 + + def collect_children(self): + """Internal routine to wait for children that have exited.""" + if self.active_children is None: return + while len(self.active_children) >= self.max_children: + # XXX: This will wait for any child process, not just ones + # spawned by this library. This could confuse other + # libraries that expect to be able to wait for their own + # children. + try: + pid, status = os.waitpid(0, 0) + except os.error: + pid = None + if pid not in self.active_children: continue + self.active_children.remove(pid) + + # XXX: This loop runs more system calls than it ought + # to. There should be a way to put the active_children into a + # process group and then use os.waitpid(-pgid) to wait for any + # of that set, but I couldn't find a way to allocate pgids + # that couldn't collide. + for child in self.active_children: + try: + pid, status = os.waitpid(child, os.WNOHANG) + except os.error: + pid = None + if not pid: continue + try: + self.active_children.remove(pid) + except ValueError as e: + raise ValueError('%s. x=%d and list=%r' % (e.message, pid, + self.active_children)) + + def handle_timeout(self): + """Wait for zombies after self.timeout seconds of inactivity. + + May be extended, do not override. + """ + self.collect_children() + + def service_actions(self): + """Collect the zombie child processes regularly in the ForkingMixIn. + + service_actions is called in the BaseServer's serve_forver loop. + """ + self.collect_children() + + def process_request(self, request, client_address): + """Fork a new subprocess to process the request.""" + pid = os.fork() + if pid: + # Parent process + if self.active_children is None: + self.active_children = [] + self.active_children.append(pid) + self.close_request(request) + return + else: + # Child process. + # This must never return, hence os._exit()! + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + os._exit(0) + except: + try: + self.handle_error(request, client_address) + self.shutdown_request(request) + finally: + os._exit(1) + + +class ThreadingMixIn(object): + """Mix-in class to handle each request in a new thread.""" + + # Decides how threads will act upon termination of the + # main process + daemon_threads = False + + def process_request_thread(self, request, client_address): + """Same as in BaseServer but as a thread. + + In addition, exception handling is done here. + + """ + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def process_request(self, request, client_address): + """Start a new thread to process the request.""" + t = threading.Thread(target = self.process_request_thread, + args = (request, client_address)) + t.daemon = self.daemon_threads + t.start() + + +class ForkingUDPServer(ForkingMixIn, UDPServer): pass +class ForkingTCPServer(ForkingMixIn, TCPServer): pass + +class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass +class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass + +if hasattr(socket, 'AF_UNIX'): + + class UnixStreamServer(TCPServer): + address_family = socket.AF_UNIX + + class UnixDatagramServer(UDPServer): + address_family = socket.AF_UNIX + + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass + + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass + +class BaseRequestHandler(object): + + """Base class for request handler classes. + + This class is instantiated for each request to be handled. The + constructor sets the instance variables request, client_address + and server, and then calls the handle() method. To implement a + specific service, all you need to do is to derive a class which + defines a handle() method. + + The handle() method can find the request as self.request, the + client address as self.client_address, and the server (in case it + needs access to per-server information) as self.server. Since a + separate instance is created for each request, the handle() method + can define arbitrary other instance variariables. + + """ + + def __init__(self, request, client_address, server): + self.request = request + self.client_address = client_address + self.server = server + self.setup() + try: + self.handle() + finally: + self.finish() + + def setup(self): + pass + + def handle(self): + pass + + def finish(self): + pass + + +# The following two classes make it possible to use the same service +# class for stream or datagram servers. +# Each class sets up these instance variables: +# - rfile: a file object from which receives the request is read +# - wfile: a file object to which the reply is written +# When the handle() method returns, wfile is flushed properly + + +class StreamRequestHandler(BaseRequestHandler): + + """Define self.rfile and self.wfile for stream sockets.""" + + # Default buffer sizes for rfile, wfile. + # We default rfile to buffered because otherwise it could be + # really slow for large data (a getc() call per byte); we make + # wfile unbuffered because (a) often after a write() we want to + # read and we need to flush the line; (b) big writes to unbuffered + # files are typically optimized by stdio even when big reads + # aren't. + rbufsize = -1 + wbufsize = 0 + + # A timeout to apply to the request socket, if not None. + timeout = None + + # Disable nagle algorithm for this socket, if True. + # Use only when wbufsize != 0, to avoid small packets. + disable_nagle_algorithm = False + + def setup(self): + self.connection = self.request + if self.timeout is not None: + self.connection.settimeout(self.timeout) + if self.disable_nagle_algorithm: + self.connection.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, True) + self.rfile = self.connection.makefile('rb', self.rbufsize) + self.wfile = self.connection.makefile('wb', self.wbufsize) + + def finish(self): + if not self.wfile.closed: + try: + self.wfile.flush() + except socket.error: + # An final socket error may have occurred here, such as + # the local error ECONNABORTED. + pass + self.wfile.close() + self.rfile.close() + + +class DatagramRequestHandler(BaseRequestHandler): + + # XXX Regrettably, I cannot get this working on Linux; + # s.recvfrom() doesn't return a meaningful client address. + + """Define self.rfile and self.wfile for datagram sockets.""" + + def setup(self): + from io import BytesIO + self.packet, self.socket = self.request + self.rfile = BytesIO(self.packet) + self.wfile = BytesIO() + + def finish(self): + self.socket.sendto(self.wfile.getvalue(), self.client_address) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/test/__init__.py new file mode 100644 index 0000000..0bba5e6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/__init__.py @@ -0,0 +1,9 @@ +""" +test package backported for python-future. + +Its primary purpose is to allow use of "import test.support" for running +the Python standard library unit tests using the new Python 3 stdlib +import location. + +Python 3 renamed test.test_support to test.support. +""" diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..7237219 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/pystone.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/pystone.cpython-36.pyc new file mode 100644 index 0000000..34ce6a4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/pystone.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/ssl_servers.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/ssl_servers.cpython-36.pyc new file mode 100644 index 0000000..16ea3ff Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/ssl_servers.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/support.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/support.cpython-36.pyc new file mode 100644 index 0000000..739ddd7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/test/__pycache__/support.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/badcert.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/badcert.pem new file mode 100644 index 0000000..c419146 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/badcert.pem @@ -0,0 +1,36 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +MIICXwIBAAKBgQC8ddrhm+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9L +opdJhTvbGfEj0DQs1IE8M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVH +fhi/VwovESJlaBOp+WMnfhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQAB +AoGBAK0FZpaKj6WnJZN0RqhhK+ggtBWwBnc0U/ozgKz2j1s3fsShYeiGtW6CK5nU +D1dZ5wzhbGThI7LiOXDvRucc9n7vUgi0alqPQ/PFodPxAN/eEYkmXQ7W2k7zwsDA +IUK0KUhktQbLu8qF/m8qM86ba9y9/9YkXuQbZ3COl5ahTZrhAkEA301P08RKv3KM +oXnGU2UHTuJ1MAD2hOrPxjD4/wxA/39EWG9bZczbJyggB4RHu0I3NOSFjAm3HQm0 +ANOu5QK9owJBANgOeLfNNcF4pp+UikRFqxk5hULqRAWzVxVrWe85FlPm0VVmHbb/ +loif7mqjU8o1jTd/LM7RD9f2usZyE2psaw8CQQCNLhkpX3KO5kKJmS9N7JMZSc4j +oog58yeYO8BBqKKzpug0LXuQultYv2K4veaIO04iL9VLe5z9S/Q1jaCHBBuXAkEA +z8gjGoi1AOp6PBBLZNsncCvcV/0aC+1se4HxTNo2+duKSDnbq+ljqOM+E7odU+Nq +ewvIWOG//e8fssd0mq3HywJBAJ8l/c8GVmrpFTx8r/nZ2Pyyjt3dH1widooDXYSV +q6Gbf41Llo5sYAtmxdndTLASuHKecacTgZVhy0FryZpLKrU= +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +Just bad cert data +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/badkey.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/badkey.pem new file mode 100644 index 0000000..1c8a955 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/badkey.pem @@ -0,0 +1,40 @@ +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- +-----BEGIN RSA PRIVATE KEY----- +Bad Key, though the cert should be OK +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICpzCCAhCgAwIBAgIJAP+qStv1cIGNMA0GCSqGSIb3DQEBBQUAMIGJMQswCQYD +VQQGEwJVUzERMA8GA1UECBMIRGVsYXdhcmUxEzARBgNVBAcTCldpbG1pbmd0b24x +IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMQwwCgYDVQQLEwNT +U0wxHzAdBgNVBAMTFnNvbWVtYWNoaW5lLnB5dGhvbi5vcmcwHhcNMDcwODI3MTY1 +NDUwWhcNMTMwMjE2MTY1NDUwWjCBiTELMAkGA1UEBhMCVVMxETAPBgNVBAgTCERl +bGF3YXJlMRMwEQYDVQQHEwpXaWxtaW5ndG9uMSMwIQYDVQQKExpQeXRob24gU29m +dHdhcmUgRm91bmRhdGlvbjEMMAoGA1UECxMDU1NMMR8wHQYDVQQDExZzb21lbWFj +aGluZS5weXRob24ub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC8ddrh +m+LutBvjYcQlnH21PPIseJ1JVG2HMmN2CmZk2YukO+9LopdJhTvbGfEj0DQs1IE8 +M+kTUyOmuKfVrFMKwtVeCJphrAnhoz7TYOuLBSqt7lVHfhi/VwovESJlaBOp+WMn +fhcduPEYHYx/6cnVapIkZnLt30zu2um+DzA9jQIDAQABoxUwEzARBglghkgBhvhC +AQEEBAMCBkAwDQYJKoZIhvcNAQEFBQADgYEAF4Q5BVqmCOLv1n8je/Jw9K669VXb +08hyGzQhkemEBYQd6fzQ9A/1ZzHkJKb1P6yreOLSEh4KcxYPyrLRC1ll8nr5OlCx +CMhKkTnR6qBsdNV0XtdU2+N25hqW+Ma4ZeqsN/iiJVCGNOZGnvQuvCAGWF8+J/f/ +iHkC6gGdBJhogs4= +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/dh512.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/dh512.pem new file mode 100644 index 0000000..200d16c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/dh512.pem @@ -0,0 +1,9 @@ +-----BEGIN DH PARAMETERS----- +MEYCQQD1Kv884bEpQBgRjXyEpwpy1obEAxnIByl6ypUM2Zafq9AKUJsCRtMIPWak +XUGfnHy9iUsiGSa6q6Jew1XpKgVfAgEC +-----END DH PARAMETERS----- + +These are the 512 bit DH parameters from "Assigned Number for SKIP Protocols" +(http://www.skip-vpn.org/spec/numbers.html). +See there for how they were generated. +Note that g is not a generator, but this is not a problem since p is a safe prime. diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/https_svn_python_org_root.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/https_svn_python_org_root.pem new file mode 100644 index 0000000..e7dfc82 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/https_svn_python_org_root.pem @@ -0,0 +1,41 @@ +-----BEGIN CERTIFICATE----- +MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 +IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB +IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA +Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO +BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi +MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ +ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ +8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 +zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y +fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 +w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc +G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k +epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q +laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ +QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU +fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 +YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w +ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY +gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe +MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 +IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy +dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw +czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 +dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl +aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC +AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg +b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB +ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc +nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg +18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c +gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl +Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY +sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T +SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF +CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum +GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk +zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW +omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/keycert.passwd.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/keycert.passwd.pem new file mode 100644 index 0000000..e905748 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/keycert.passwd.pem @@ -0,0 +1,33 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A + +kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c +u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA +AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr +Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ +YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P +6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ +noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 +94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l +7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo +cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO +zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt +L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo +2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== +-----END RSA PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/keycert.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/keycert.pem new file mode 100644 index 0000000..64318aa --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/keycert.pem @@ -0,0 +1,31 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm +LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 +ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP +USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt +CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq +SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK +UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y +BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ +ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 +oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik +eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F +0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS +x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ +SPIXQuT8RMPDVNQ= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/keycert2.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/keycert2.pem new file mode 100644 index 0000000..e8a9e08 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/keycert2.pem @@ -0,0 +1,31 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAJnsJZVrppL+W5I9 +zGQrrawWwE5QJpBK9nWw17mXrZ03R1cD9BamLGivVISbPlRlAVnZBEyh1ATpsB7d +CUQ+WHEvALquvx4+Yw5l+fXeiYRjrLRBYZuVy8yNtXzU3iWcGObcYRkUdiXdOyP7 +sLF2YZHRvQZpzgDBKkrraeQ81w21AgMBAAECgYBEm7n07FMHWlE+0kT0sXNsLYfy +YE+QKZnJw9WkaDN+zFEEPELkhZVt5BjsMraJr6v2fIEqF0gGGJPkbenffVq2B5dC +lWUOxvJHufMK4sM3Cp6s/gOp3LP+QkzVnvJSfAyZU6l+4PGX5pLdUsXYjPxgzjzL +S36tF7/2Uv1WePyLUQJBAMsPhYzUXOPRgmbhcJiqi9A9c3GO8kvSDYTCKt3VMnqz +HBn6MQ4VQasCD1F+7jWTI0FU/3vdw8non/Fj8hhYqZcCQQDCDRdvmZqDiZnpMqDq +L6ZSrLTVtMvZXZbgwForaAD9uHj51TME7+eYT7EG2YCgJTXJ4YvRJEnPNyskwdKt +vTSTAkEAtaaN/vyemEJ82BIGStwONNw0ILsSr5cZ9tBHzqiA/tipY+e36HRFiXhP +QcU9zXlxyWkDH8iz9DSAmE2jbfoqwwJANlMJ65E543cjIlitGcKLMnvtCCLcKpb7 +xSG0XJB6Lo11OKPJ66jp0gcFTSCY1Lx2CXVd+gfJrfwI1Pp562+bhwJBAJ9IfDPU +R8OpO9v1SGd8x33Owm7uXOpB9d63/T70AD1QOXjKUC4eXYbt0WWfWuny/RNPRuyh +w7DXSfUF+kPKolU= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIICXTCCAcagAwIBAgIJAIO3upAG445fMA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTAeFw0x +MDEwMDkxNTAxMDBaFw0yMDEwMDYxNTAxMDBaMGIxCzAJBgNVBAYTAlhZMRcwFQYD +VQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZv +dW5kYXRpb24xFTATBgNVBAMTDGZha2Vob3N0bmFtZTCBnzANBgkqhkiG9w0BAQEF +AAOBjQAwgYkCgYEAmewllWumkv5bkj3MZCutrBbATlAmkEr2dbDXuZetnTdHVwP0 +FqYsaK9UhJs+VGUBWdkETKHUBOmwHt0JRD5YcS8Auq6/Hj5jDmX59d6JhGOstEFh +m5XLzI21fNTeJZwY5txhGRR2Jd07I/uwsXZhkdG9BmnOAMEqSutp5DzXDbUCAwEA +AaMbMBkwFwYDVR0RBBAwDoIMZmFrZWhvc3RuYW1lMA0GCSqGSIb3DQEBBQUAA4GB +AH+iMClLLGSaKWgwXsmdVo4FhTZZHo8Uprrtg3N9FxEeE50btpDVQysgRt5ias3K +m+bME9zbKwvbVWD5zZdjus4pDgzwF/iHyccL8JyYhxOvS/9zmvAtFXj/APIIbZFp +IT75d9f88ScIGEtknZQejnrdhB64tYki/EqluiuKBqKD +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/nokia.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/nokia.pem new file mode 100644 index 0000000..0d044df --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/nokia.pem @@ -0,0 +1,31 @@ +# Certificate for projects.developer.nokia.com:443 (see issue 13034) +-----BEGIN CERTIFICATE----- +MIIFLDCCBBSgAwIBAgIQLubqdkCgdc7lAF9NfHlUmjANBgkqhkiG9w0BAQUFADCB +vDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTswOQYDVQQLEzJUZXJtcyBvZiB1c2Ug +YXQgaHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYSAoYykxMDE2MDQGA1UEAxMt +VmVyaVNpZ24gQ2xhc3MgMyBJbnRlcm5hdGlvbmFsIFNlcnZlciBDQSAtIEczMB4X +DTExMDkyMTAwMDAwMFoXDTEyMDkyMDIzNTk1OVowcTELMAkGA1UEBhMCRkkxDjAM +BgNVBAgTBUVzcG9vMQ4wDAYDVQQHFAVFc3BvbzEOMAwGA1UEChQFTm9raWExCzAJ +BgNVBAsUAkJJMSUwIwYDVQQDFBxwcm9qZWN0cy5kZXZlbG9wZXIubm9raWEuY29t +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCr92w1bpHYSYxUEx8N/8Iddda2 +lYi+aXNtQfV/l2Fw9Ykv3Ipw4nLeGTj18FFlAZgMdPRlgrzF/NNXGw/9l3/qKdow +CypkQf8lLaxb9Ze1E/KKmkRJa48QTOqvo6GqKuTI6HCeGlG1RxDb8YSKcQWLiytn +yj3Wp4MgRQO266xmMQIDAQABo4IB9jCCAfIwQQYDVR0RBDowOIIccHJvamVjdHMu +ZGV2ZWxvcGVyLm5va2lhLmNvbYIYcHJvamVjdHMuZm9ydW0ubm9raWEuY29tMAkG +A1UdEwQCMAAwCwYDVR0PBAQDAgWgMEEGA1UdHwQ6MDgwNqA0oDKGMGh0dHA6Ly9T +VlJJbnRsLUczLWNybC52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNybDBEBgNVHSAE +PTA7MDkGC2CGSAGG+EUBBxcDMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LnZl +cmlzaWduLmNvbS9ycGEwKAYDVR0lBCEwHwYJYIZIAYb4QgQBBggrBgEFBQcDAQYI +KwYBBQUHAwIwcgYIKwYBBQUHAQEEZjBkMCQGCCsGAQUFBzABhhhodHRwOi8vb2Nz +cC52ZXJpc2lnbi5jb20wPAYIKwYBBQUHMAKGMGh0dHA6Ly9TVlJJbnRsLUczLWFp +YS52ZXJpc2lnbi5jb20vU1ZSSW50bEczLmNlcjBuBggrBgEFBQcBDARiMGChXqBc +MFowWDBWFglpbWFnZS9naWYwITAfMAcGBSsOAwIaBBRLa7kolgYMu9BSOJsprEsH +iyEFGDAmFiRodHRwOi8vbG9nby52ZXJpc2lnbi5jb20vdnNsb2dvMS5naWYwDQYJ +KoZIhvcNAQEFBQADggEBACQuPyIJqXwUyFRWw9x5yDXgMW4zYFopQYOw/ItRY522 +O5BsySTh56BWS6mQB07XVfxmYUGAvRQDA5QHpmY8jIlNwSmN3s8RKo+fAtiNRlcL +x/mWSfuMs3D/S6ev3D6+dpEMZtjrhOdctsarMKp8n/hPbwhAbg5hVjpkW5n8vz2y +0KxvvkA1AxpLwpVv7OlK17ttzIHw8bp9HTlHBU5s8bKz4a565V/a5HI0CSEv/+0y +ko4/ghTnZc1CkmUngKKeFMSah/mT/xAh8XnE2l1AazFa8UKuYki1e+ArHaGZc4ix +UYOtiRphwfuYQhRZ7qX9q2MMkCMI65XNK/SaFrAbbG0= +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/nullbytecert.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/nullbytecert.pem new file mode 100644 index 0000000..447186c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/nullbytecert.pem @@ -0,0 +1,90 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 0 (0x0) + Signature Algorithm: sha1WithRSAEncryption + Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org + Validity + Not Before: Aug 7 13:11:52 2013 GMT + Not After : Aug 7 13:12:52 2013 GMT + Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: + 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: + 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: + 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: + 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: + 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: + a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: + 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: + ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: + 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: + 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: + 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: + f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: + f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: + ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: + d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: + 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: + 2f:85 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: critical + CA:FALSE + X509v3 Subject Key Identifier: + 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C + X509v3 Key Usage: + Digital Signature, Non Repudiation, Key Encipherment + X509v3 Subject Alternative Name: + ************************************************************* + WARNING: The values for DNS, email and URI are WRONG. OpenSSL + doesn't print the text after a NULL byte. + ************************************************************* + DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 + Signature Algorithm: sha1WithRSAEncryption + ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: + a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: + 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: + 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: + 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: + de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: + 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: + 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: + d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: + 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: + 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: + 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: + 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: + 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: + c1:ca:a9:94 +-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/nullcert.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/nullcert.pem new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/pystone.py b/minor_project/lib/python3.6/site-packages/future/backports/test/pystone.py new file mode 100644 index 0000000..7652027 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/pystone.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 + +""" +"PYSTONE" Benchmark Program + +Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes) + +Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013. + + Translated from ADA to C by Rick Richardson. + Every method to preserve ADA-likeness has been used, + at the expense of C-ness. + + Translated from C to Python by Guido van Rossum. + +Version History: + + Version 1.1 corrects two bugs in version 1.0: + + First, it leaked memory: in Proc1(), NextRecord ends + up having a pointer to itself. I have corrected this + by zapping NextRecord.PtrComp at the end of Proc1(). + + Second, Proc3() used the operator != to compare a + record to None. This is rather inefficient and not + true to the intention of the original benchmark (where + a pointer comparison to None is intended; the != + operator attempts to find a method __cmp__ to do value + comparison of the record). Version 1.1 runs 5-10 + percent faster than version 1.0, so benchmark figures + of different versions can't be compared directly. + +""" + +from __future__ import print_function + +from time import clock + +LOOPS = 50000 + +__version__ = "1.1" + +[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6) + +class Record(object): + + def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0, + IntComp = 0, StringComp = 0): + self.PtrComp = PtrComp + self.Discr = Discr + self.EnumComp = EnumComp + self.IntComp = IntComp + self.StringComp = StringComp + + def copy(self): + return Record(self.PtrComp, self.Discr, self.EnumComp, + self.IntComp, self.StringComp) + +TRUE = 1 +FALSE = 0 + +def main(loops=LOOPS): + benchtime, stones = pystones(loops) + print("Pystone(%s) time for %d passes = %g" % \ + (__version__, loops, benchtime)) + print("This machine benchmarks at %g pystones/second" % stones) + + +def pystones(loops=LOOPS): + return Proc0(loops) + +IntGlob = 0 +BoolGlob = FALSE +Char1Glob = '\0' +Char2Glob = '\0' +Array1Glob = [0]*51 +Array2Glob = [x[:] for x in [Array1Glob]*51] +PtrGlb = None +PtrGlbNext = None + +def Proc0(loops=LOOPS): + global IntGlob + global BoolGlob + global Char1Glob + global Char2Glob + global Array1Glob + global Array2Glob + global PtrGlb + global PtrGlbNext + + starttime = clock() + for i in range(loops): + pass + nulltime = clock() - starttime + + PtrGlbNext = Record() + PtrGlb = Record() + PtrGlb.PtrComp = PtrGlbNext + PtrGlb.Discr = Ident1 + PtrGlb.EnumComp = Ident3 + PtrGlb.IntComp = 40 + PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING" + String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING" + Array2Glob[8][7] = 10 + + starttime = clock() + + for i in range(loops): + Proc5() + Proc4() + IntLoc1 = 2 + IntLoc2 = 3 + String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING" + EnumLoc = Ident2 + BoolGlob = not Func2(String1Loc, String2Loc) + while IntLoc1 < IntLoc2: + IntLoc3 = 5 * IntLoc1 - IntLoc2 + IntLoc3 = Proc7(IntLoc1, IntLoc2) + IntLoc1 = IntLoc1 + 1 + Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3) + PtrGlb = Proc1(PtrGlb) + CharIndex = 'A' + while CharIndex <= Char2Glob: + if EnumLoc == Func1(CharIndex, 'C'): + EnumLoc = Proc6(Ident1) + CharIndex = chr(ord(CharIndex)+1) + IntLoc3 = IntLoc2 * IntLoc1 + IntLoc2 = IntLoc3 / IntLoc1 + IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1 + IntLoc1 = Proc2(IntLoc1) + + benchtime = clock() - starttime - nulltime + if benchtime == 0.0: + loopsPerBenchtime = 0.0 + else: + loopsPerBenchtime = (loops / benchtime) + return benchtime, loopsPerBenchtime + +def Proc1(PtrParIn): + PtrParIn.PtrComp = NextRecord = PtrGlb.copy() + PtrParIn.IntComp = 5 + NextRecord.IntComp = PtrParIn.IntComp + NextRecord.PtrComp = PtrParIn.PtrComp + NextRecord.PtrComp = Proc3(NextRecord.PtrComp) + if NextRecord.Discr == Ident1: + NextRecord.IntComp = 6 + NextRecord.EnumComp = Proc6(PtrParIn.EnumComp) + NextRecord.PtrComp = PtrGlb.PtrComp + NextRecord.IntComp = Proc7(NextRecord.IntComp, 10) + else: + PtrParIn = NextRecord.copy() + NextRecord.PtrComp = None + return PtrParIn + +def Proc2(IntParIO): + IntLoc = IntParIO + 10 + while 1: + if Char1Glob == 'A': + IntLoc = IntLoc - 1 + IntParIO = IntLoc - IntGlob + EnumLoc = Ident1 + if EnumLoc == Ident1: + break + return IntParIO + +def Proc3(PtrParOut): + global IntGlob + + if PtrGlb is not None: + PtrParOut = PtrGlb.PtrComp + else: + IntGlob = 100 + PtrGlb.IntComp = Proc7(10, IntGlob) + return PtrParOut + +def Proc4(): + global Char2Glob + + BoolLoc = Char1Glob == 'A' + BoolLoc = BoolLoc or BoolGlob + Char2Glob = 'B' + +def Proc5(): + global Char1Glob + global BoolGlob + + Char1Glob = 'A' + BoolGlob = FALSE + +def Proc6(EnumParIn): + EnumParOut = EnumParIn + if not Func3(EnumParIn): + EnumParOut = Ident4 + if EnumParIn == Ident1: + EnumParOut = Ident1 + elif EnumParIn == Ident2: + if IntGlob > 100: + EnumParOut = Ident1 + else: + EnumParOut = Ident4 + elif EnumParIn == Ident3: + EnumParOut = Ident2 + elif EnumParIn == Ident4: + pass + elif EnumParIn == Ident5: + EnumParOut = Ident3 + return EnumParOut + +def Proc7(IntParI1, IntParI2): + IntLoc = IntParI1 + 2 + IntParOut = IntParI2 + IntLoc + return IntParOut + +def Proc8(Array1Par, Array2Par, IntParI1, IntParI2): + global IntGlob + + IntLoc = IntParI1 + 5 + Array1Par[IntLoc] = IntParI2 + Array1Par[IntLoc+1] = Array1Par[IntLoc] + Array1Par[IntLoc+30] = IntLoc + for IntIndex in range(IntLoc, IntLoc+2): + Array2Par[IntLoc][IntIndex] = IntLoc + Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1 + Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc] + IntGlob = 5 + +def Func1(CharPar1, CharPar2): + CharLoc1 = CharPar1 + CharLoc2 = CharLoc1 + if CharLoc2 != CharPar2: + return Ident1 + else: + return Ident2 + +def Func2(StrParI1, StrParI2): + IntLoc = 1 + while IntLoc <= 1: + if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1: + CharLoc = 'A' + IntLoc = IntLoc + 1 + if CharLoc >= 'W' and CharLoc <= 'Z': + IntLoc = 7 + if CharLoc == 'X': + return TRUE + else: + if StrParI1 > StrParI2: + IntLoc = IntLoc + 7 + return TRUE + else: + return FALSE + +def Func3(EnumParIn): + EnumLoc = EnumParIn + if EnumLoc == Ident3: return TRUE + return FALSE + +if __name__ == '__main__': + import sys + def error(msg): + print(msg, end=' ', file=sys.stderr) + print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr) + sys.exit(100) + nargs = len(sys.argv) - 1 + if nargs > 1: + error("%d arguments are too many;" % nargs) + elif nargs == 1: + try: loops = int(sys.argv[1]) + except ValueError: + error("Invalid argument %r;" % sys.argv[1]) + else: + loops = LOOPS + main(loops) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/sha256.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/sha256.pem new file mode 100644 index 0000000..d3db4b8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/sha256.pem @@ -0,0 +1,128 @@ +# Certificate chain for https://sha256.tbs-internet.com + 0 s:/C=FR/postalCode=14000/ST=Calvados/L=CAEN/street=22 rue de Bretagne/O=TBS INTERNET/OU=0002 440443810/OU=sha-256 production/CN=sha256.tbs-internet.com + i:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC +-----BEGIN CERTIFICATE----- +MIIGXDCCBUSgAwIBAgIRAKpVmHgg9nfCodAVwcP4siwwDQYJKoZIhvcNAQELBQAw +gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl +bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u +ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg +Q0EgU0dDMB4XDTEyMDEwNDAwMDAwMFoXDTE0MDIxNzIzNTk1OVowgcsxCzAJBgNV +BAYTAkZSMQ4wDAYDVQQREwUxNDAwMDERMA8GA1UECBMIQ2FsdmFkb3MxDTALBgNV +BAcTBENBRU4xGzAZBgNVBAkTEjIyIHJ1ZSBkZSBCcmV0YWduZTEVMBMGA1UEChMM +VEJTIElOVEVSTkVUMRcwFQYDVQQLEw4wMDAyIDQ0MDQ0MzgxMDEbMBkGA1UECxMS +c2hhLTI1NiBwcm9kdWN0aW9uMSAwHgYDVQQDExdzaGEyNTYudGJzLWludGVybmV0 +LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQIX/zdJcyxty0m +PM1XQSoSSifueS3AVcgqMsaIKS/u+rYzsv4hQ/qA6vLn5m5/ewUcZDj7zdi6rBVf +PaVNXJ6YinLX0tkaW8TEjeVuZG5yksGZlhCt1CJ1Ho9XLiLaP4uJ7MCoNUntpJ+E +LfrOdgsIj91kPmwjDJeztVcQCvKzhjVJA/KxdInc0JvOATn7rpaSmQI5bvIjufgo +qVsTPwVFzuUYULXBk7KxRT7MiEqnd5HvviNh0285QC478zl3v0I0Fb5El4yD3p49 +IthcRnxzMKc0UhU5ogi0SbONyBfm/mzONVfSxpM+MlyvZmJqrbuuLoEDzJD+t8PU +xSuzgbcCAwEAAaOCAj4wggI6MB8GA1UdIwQYMBaAFAdEdoWTKLx/bXjSCuv6TEvf +2YIfMB0GA1UdDgQWBBT/qTGYdaj+f61c2IRFL/B1eEsM8DAOBgNVHQ8BAf8EBAMC +BaAwDAYDVR0TAQH/BAIwADA0BgNVHSUELTArBggrBgEFBQcDAQYIKwYBBQUHAwIG +CisGAQQBgjcKAwMGCWCGSAGG+EIEATBLBgNVHSAERDBCMEAGCisGAQQB5TcCBAEw +MjAwBggrBgEFBQcCARYkaHR0cHM6Ly93d3cudGJzLWludGVybmV0LmNvbS9DQS9D +UFM0MG0GA1UdHwRmMGQwMqAwoC6GLGh0dHA6Ly9jcmwudGJzLWludGVybmV0LmNv +bS9UQlNYNTA5Q0FTR0MuY3JsMC6gLKAqhihodHRwOi8vY3JsLnRicy14NTA5LmNv +bS9UQlNYNTA5Q0FTR0MuY3JsMIGmBggrBgEFBQcBAQSBmTCBljA4BggrBgEFBQcw +AoYsaHR0cDovL2NydC50YnMtaW50ZXJuZXQuY29tL1RCU1g1MDlDQVNHQy5jcnQw +NAYIKwYBBQUHMAKGKGh0dHA6Ly9jcnQudGJzLXg1MDkuY29tL1RCU1g1MDlDQVNH +Qy5jcnQwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLnRicy14NTA5LmNvbTA/BgNV +HREEODA2ghdzaGEyNTYudGJzLWludGVybmV0LmNvbYIbd3d3LnNoYTI1Ni50YnMt +aW50ZXJuZXQuY29tMA0GCSqGSIb3DQEBCwUAA4IBAQA0pOuL8QvAa5yksTbGShzX +ABApagunUGoEydv4YJT1MXy9tTp7DrWaozZSlsqBxrYAXP1d9r2fuKbEniYHxaQ0 +UYaf1VSIlDo1yuC8wE7wxbHDIpQ/E5KAyxiaJ8obtDhFstWAPAH+UoGXq0kj2teN +21sFQ5dXgA95nldvVFsFhrRUNB6xXAcaj0VZFhttI0ZfQZmQwEI/P+N9Jr40OGun +aa+Dn0TMeUH4U20YntfLbu2nDcJcYfyurm+8/0Tr4HznLnedXu9pCPYj0TaddrgT +XO0oFiyy7qGaY6+qKh71yD64Y3ycCJ/HR9Wm39mjZYc9ezYwT4noP6r7Lk8YO7/q +-----END CERTIFICATE----- + 1 s:/C=FR/ST=Calvados/L=Caen/O=TBS INTERNET/OU=Terms and Conditions: http://www.tbs-internet.com/CA/repository/OU=TBS INTERNET CA/CN=TBS X509 CA SGC + i:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQXpDZ0ETJMV02WTx3GTnhhTANBgkqhkiG9w0BAQUFADBv +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFk +ZFRydXN0IEV4dGVybmFsIFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBF +eHRlcm5hbCBDQSBSb290MB4XDTA1MTIwMTAwMDAwMFoXDTE5MDYyNDE5MDYzMFow +gcQxCzAJBgNVBAYTAkZSMREwDwYDVQQIEwhDYWx2YWRvczENMAsGA1UEBxMEQ2Fl +bjEVMBMGA1UEChMMVEJTIElOVEVSTkVUMUgwRgYDVQQLEz9UZXJtcyBhbmQgQ29u +ZGl0aW9uczogaHR0cDovL3d3dy50YnMtaW50ZXJuZXQuY29tL0NBL3JlcG9zaXRv +cnkxGDAWBgNVBAsTD1RCUyBJTlRFUk5FVCBDQTEYMBYGA1UEAxMPVEJTIFg1MDkg +Q0EgU0dDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsgOkO3f7wzN6 +rOjg45tR5vjBfzK7qmV9IBxb/QW9EEXxG+E7FNhZqQLtwGBKoSsHTnQqV75wWMk0 +9tinWvftBkSpj5sTi/8cbzJfUvTSVYh3Qxv6AVVjMMH/ruLjE6y+4PoaPs8WoYAQ +ts5R4Z1g8c/WnTepLst2x0/Wv7GmuoQi+gXvHU6YrBiu7XkeYhzc95QdviWSJRDk +owhb5K43qhcvjRmBfO/paGlCliDGZp8mHwrI21mwobWpVjTxZRwYO3bd4+TGcI4G +Ie5wmHwE8F7SK1tgSqbBacKjDa93j7txKkfz/Yd2n7TGqOXiHPsJpG655vrKtnXk +9vs1zoDeJQIDAQABo4IBljCCAZIwHQYDVR0OBBYEFAdEdoWTKLx/bXjSCuv6TEvf +2YIfMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMCAGA1UdJQQZ +MBcGCisGAQQBgjcKAwMGCWCGSAGG+EIEATAYBgNVHSAEETAPMA0GCysGAQQBgOU3 +AgQBMHsGA1UdHwR0MHIwOKA2oDSGMmh0dHA6Ly9jcmwuY29tb2RvY2EuY29tL0Fk +ZFRydXN0RXh0ZXJuYWxDQVJvb3QuY3JsMDagNKAyhjBodHRwOi8vY3JsLmNvbW9k +by5uZXQvQWRkVHJ1c3RFeHRlcm5hbENBUm9vdC5jcmwwgYAGCCsGAQUFBwEBBHQw +cjA4BggrBgEFBQcwAoYsaHR0cDovL2NydC5jb21vZG9jYS5jb20vQWRkVHJ1c3RV +VE5TR0NDQS5jcnQwNgYIKwYBBQUHMAKGKmh0dHA6Ly9jcnQuY29tb2RvLm5ldC9B +ZGRUcnVzdFVUTlNHQ0NBLmNydDARBglghkgBhvhCAQEEBAMCAgQwDQYJKoZIhvcN +AQEFBQADggEBAK2zEzs+jcIrVK9oDkdDZNvhuBYTdCfpxfFs+OAujW0bIfJAy232 +euVsnJm6u/+OrqKudD2tad2BbejLLXhMZViaCmK7D9nrXHx4te5EP8rL19SUVqLY +1pTnv5dhNgEgvA7n5lIzDSYs7yRLsr7HJsYPr6SeYSuZizyX1SNz7ooJ32/F3X98 +RB0Mlc/E0OyOrkQ9/y5IrnpnaSora8CnUrV5XNOg+kyCz9edCyx4D5wXYcwZPVWz +8aDqquESrezPyjtfi4WRO4s/VD3HLZvOxzMrWAVYCDG9FxaOhF0QGuuG1F7F3GKV +v6prNyCl016kRl2j1UT+a7gLd8fA25A4C9E= +-----END CERTIFICATE----- + 2 s:/C=SE/O=AddTrust AB/OU=AddTrust External TTP Network/CN=AddTrust External CA Root + i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC +-----BEGIN CERTIFICATE----- +MIIEZjCCA06gAwIBAgIQUSYKkxzif5zDpV954HKugjANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw0wNTA2MDcwODA5MTBaFw0xOTA2MjQxOTA2MzBaMG8xCzAJBgNVBAYT +AlNFMRQwEgYDVQQKEwtBZGRUcnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0 +ZXJuYWwgVFRQIE5ldHdvcmsxIjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENB +IFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC39xoz5vIABC05 +4E5b7R+8bA/Ntfojts7emxEzl6QpTH2Tn71KvJPtAxrjj8/lbVBa1pcplFqAsEl6 +2y6V/bjKvzc4LR4+kUGtcFbH8E8/6DKedMrIkFTpxl8PeJ2aQDwOrGGqXhSPnoeh +alDc15pOrwWzpnGUnHGzUGAKxxOdOAeGAqjpqGkmGJCrTLBPI6s6T4TY386f4Wlv +u9dC12tE5Met7m1BX3JacQg3s3llpFmglDf3AC8NwpJy2tA4ctsUqEXEXSp9t7TW +xO6szRNEt8kr3UMAJfphuWlqWCMRt6czj1Z1WfXNKddGtworZbbTQm8Vsrh7++/p +XVPVNFonAgMBAAGjgdgwgdUwHwYDVR0jBBgwFoAUUzLRs89/+uDxoF2FTpLSnkUd +tE8wHQYDVR0OBBYEFK29mHo0tCb3+sQmVO8DveAky1QaMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MBEGCWCGSAGG+EIBAQQEAwIBAjAgBgNVHSUEGTAX +BgorBgEEAYI3CgMDBglghkgBhvhCBAEwPQYDVR0fBDYwNDAyoDCgLoYsaHR0cDov +L2NybC51c2VydHJ1c3QuY29tL1VUTi1EQVRBQ29ycFNHQy5jcmwwDQYJKoZIhvcN +AQEFBQADggEBAMbuUxdoFLJRIh6QWA2U/b3xcOWGLcM2MY9USEbnLQg3vGwKYOEO +rVE04BKT6b64q7gmtOmWPSiPrmQH/uAB7MXjkesYoPF1ftsK5p+R26+udd8jkWjd +FwBaS/9kbHDrARrQkNnHptZt9hPk/7XJ0h4qy7ElQyZ42TCbTg0evmnv3+r+LbPM ++bDdtRTKkdSytaX7ARmjR3mfnYyVhzT4HziS2jamEfpr62vp3EV4FTkG101B5CHI +3C+H0be/SGB1pWLLJN47YaApIKa+xWycxOkKaSLvkTr6Jq/RW0GnOuL4OAdCq8Fb ++M5tug8EPzI0rNwEKNdwMBQmBsTkm5jVz3g= +-----END CERTIFICATE----- + 3 s:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC + i:/C=US/ST=UT/L=Salt Lake City/O=The USERTRUST Network/OU=http://www.usertrust.com/CN=UTN - DATACorp SGC +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG +EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD +VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu +dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 +E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ +D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK +4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq +lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW +bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB +o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT +MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js +LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr +BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB +AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj +j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH +KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv +2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 +mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_cert.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_cert.pem new file mode 100644 index 0000000..47a7d7e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_cert.pem @@ -0,0 +1,15 @@ +-----BEGIN CERTIFICATE----- +MIICVDCCAb2gAwIBAgIJANfHOBkZr8JOMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNV +BAYTAlhZMRcwFQYDVQQHEw5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMDEw +MDgyMzAxNTZaFw0yMDEwMDUyMzAxNTZaMF8xCzAJBgNVBAYTAlhZMRcwFQYDVQQH +Ew5DYXN0bGUgQW50aHJheDEjMCEGA1UEChMaUHl0aG9uIFNvZnR3YXJlIEZvdW5k +YXRpb24xEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAw +gYkCgYEA21vT5isq7F68amYuuNpSFlKDPrMUCa4YWYqZRt2OZ+/3NKaZ2xAiSwr7 +6MrQF70t5nLbSPpqE5+5VrS58SY+g/sXLiFd6AplH1wJZwh78DofbFYXUggktFMt +pTyiX8jtP66bkcPkDADA089RI1TQR6Ca+n7HFa7c1fabVV6i3zkCAwEAAaMYMBYw +FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBBQUAA4GBAHPctQBEQ4wd +BJ6+JcpIraopLn8BGhbjNWj40mmRqWB/NAWF6M5ne7KpGAu7tLeG4hb1zLaldK8G +lxy2GPSRF6LFS48dpEj2HbMv2nvv6xxalDMJ9+DicWgAKTQ6bcX2j3GUkCR0g/T1 +CRlNBAAlvhKzO7Clpf9l0YKBEfraJByX +-----END CERTIFICATE----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_key.passwd.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_key.passwd.pem new file mode 100644 index 0000000..2524672 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_key.passwd.pem @@ -0,0 +1,18 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,1A8D9D2A02EC698A + +kJYbfZ8L0sfe9Oty3gw0aloNnY5E8fegRfQLZlNoxTl6jNt0nIwI8kDJ36CZgR9c +u3FDJm/KqrfUoz8vW+qEnWhSG7QPX2wWGPHd4K94Yz/FgrRzZ0DoK7XxXq9gOtVA +AVGQhnz32p+6WhfGsCr9ArXEwRZrTk/FvzEPaU5fHcoSkrNVAGX8IpSVkSDwEDQr +Gv17+cfk99UV1OCza6yKHoFkTtrC+PZU71LomBabivS2Oc4B9hYuSR2hF01wTHP+ +YlWNagZOOVtNz4oKK9x9eNQpmfQXQvPPTfusexKIbKfZrMvJoxcm1gfcZ0H/wK6P +6wmXSG35qMOOztCZNtperjs1wzEBXznyK8QmLcAJBjkfarABJX9vBEzZV0OUKhy+ +noORFwHTllphbmydLhu6ehLUZMHPhzAS5UN7srtpSN81eerDMy0RMUAwA7/PofX1 +94Me85Q8jP0PC9ETdsJcPqLzAPETEYu0ELewKRcrdyWi+tlLFrpE5KT/s5ecbl9l +7B61U4Kfd1PIXc/siINhU3A3bYK+845YyUArUOnKf1kEox7p1RpD7yFqVT04lRTo +cibNKATBusXSuBrp2G6GNuhWEOSafWCKJQAzgCYIp6ZTV2khhMUGppc/2H3CF6cO +zX0KtlPVZC7hLkB6HT8SxYUwF1zqWY7+/XPPdc37MeEZ87Q3UuZwqORLY+Z0hpgt +L5JXBCoklZhCAaN2GqwFLXtGiRSRFGY7xXIhbDTlE65Wv1WGGgDLMKGE1gOz3yAo +2jjG1+yAHJUdE69XTFHSqSkvaloA1W03LdMXZ9VuQJ/ySXCie6ABAQ== +-----END RSA PRIVATE KEY----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_key.pem b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_key.pem new file mode 100644 index 0000000..3fd3bbd --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_key.pem @@ -0,0 +1,16 @@ +-----BEGIN PRIVATE KEY----- +MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANtb0+YrKuxevGpm +LrjaUhZSgz6zFAmuGFmKmUbdjmfv9zSmmdsQIksK++jK0Be9LeZy20j6ahOfuVa0 +ufEmPoP7Fy4hXegKZR9cCWcIe/A6H2xWF1IIJLRTLaU8ol/I7T+um5HD5AwAwNPP +USNU0Eegmvp+xxWu3NX2m1Veot85AgMBAAECgYA3ZdZ673X0oexFlq7AAmrutkHt +CL7LvwrpOiaBjhyTxTeSNWzvtQBkIU8DOI0bIazA4UreAFffwtvEuPmonDb3F+Iq +SMAu42XcGyVZEl+gHlTPU9XRX7nTOXVt+MlRRRxL6t9GkGfUAXI3XxJDXW3c0vBK +UL9xqD8cORXOfE06rQJBAP8mEX1ERkR64Ptsoe4281vjTlNfIbs7NMPkUnrn9N/Y +BLhjNIfQ3HFZG8BTMLfX7kCS9D593DW5tV4Z9BP/c6cCQQDcFzCcVArNh2JSywOQ +ZfTfRbJg/Z5Lt9Fkngv1meeGNPgIMLN8Sg679pAOOWmzdMO3V706rNPzSVMME7E5 +oPIfAkEA8pDddarP5tCvTTgUpmTFbakm0KoTZm2+FzHcnA4jRh+XNTjTOv98Y6Ik +eO5d1ZnKXseWvkZncQgxfdnMqqpj5wJAcNq/RVne1DbYlwWchT2Si65MYmmJ8t+F +0mcsULqjOnEMwf5e+ptq5LzwbyrHZYq5FNk7ocufPv/ZQrcSSC+cFwJBAKvOJByS +x56qyGeZLOQlWS2JS3KJo59XuLFGqcbgN9Om9xFa41Yb4N9NvplFivsvZdw3m1Q/ +SPIXQuT8RMPDVNQ= +-----END PRIVATE KEY----- diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_servers.py b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_servers.py new file mode 100644 index 0000000..87a3fb8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/ssl_servers.py @@ -0,0 +1,207 @@ +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import filter, str +from future import utils +import os +import sys +import ssl +import pprint +import socket +from future.backports.urllib import parse as urllib_parse +from future.backports.http.server import (HTTPServer as _HTTPServer, + SimpleHTTPRequestHandler, BaseHTTPRequestHandler) +from future.backports.test import support +threading = support.import_module("threading") + +here = os.path.dirname(__file__) + +HOST = support.HOST +CERTFILE = os.path.join(here, 'keycert.pem') + +# This one's based on HTTPServer, which is based on SocketServer + +class HTTPSServer(_HTTPServer): + + def __init__(self, server_address, handler_class, context): + _HTTPServer.__init__(self, server_address, handler_class) + self.context = context + + def __str__(self): + return ('<%s %s:%s>' % + (self.__class__.__name__, + self.server_name, + self.server_port)) + + def get_request(self): + # override this to wrap socket with SSL + try: + sock, addr = self.socket.accept() + sslconn = self.context.wrap_socket(sock, server_side=True) + except socket.error as e: + # socket errors are silenced by the caller, print them here + if support.verbose: + sys.stderr.write("Got an error:\n%s\n" % e) + raise + return sslconn, addr + +class RootedHTTPRequestHandler(SimpleHTTPRequestHandler): + # need to override translate_path to get a known root, + # instead of using os.curdir, since the test could be + # run from anywhere + + server_version = "TestHTTPS/1.0" + root = here + # Avoid hanging when a request gets interrupted by the client + timeout = 5 + + def translate_path(self, path): + """Translate a /-separated PATH to the local filename syntax. + + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ + # abandon query parameters + path = urllib.parse.urlparse(path)[2] + path = os.path.normpath(urllib.parse.unquote(path)) + words = path.split('/') + words = filter(None, words) + path = self.root + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + path = os.path.join(path, word) + return path + + def log_message(self, format, *args): + # we override this to suppress logging unless "verbose" + if support.verbose: + sys.stdout.write(" server (%s:%d %s):\n [%s] %s\n" % + (self.server.server_address, + self.server.server_port, + self.request.cipher(), + self.log_date_time_string(), + format%args)) + + +class StatsRequestHandler(BaseHTTPRequestHandler): + """Example HTTP request handler which returns SSL statistics on GET + requests. + """ + + server_version = "StatsHTTPS/1.0" + + def do_GET(self, send_body=True): + """Serve a GET request.""" + sock = self.rfile.raw._sock + context = sock.context + stats = { + 'session_cache': context.session_stats(), + 'cipher': sock.cipher(), + 'compression': sock.compression(), + } + body = pprint.pformat(stats) + body = body.encode('utf-8') + self.send_response(200) + self.send_header("Content-type", "text/plain; charset=utf-8") + self.send_header("Content-Length", str(len(body))) + self.end_headers() + if send_body: + self.wfile.write(body) + + def do_HEAD(self): + """Serve a HEAD request.""" + self.do_GET(send_body=False) + + def log_request(self, format, *args): + if support.verbose: + BaseHTTPRequestHandler.log_request(self, format, *args) + + +class HTTPSServerThread(threading.Thread): + + def __init__(self, context, host=HOST, handler_class=None): + self.flag = None + self.server = HTTPSServer((host, 0), + handler_class or RootedHTTPRequestHandler, + context) + self.port = self.server.server_port + threading.Thread.__init__(self) + self.daemon = True + + def __str__(self): + return "<%s %s>" % (self.__class__.__name__, self.server) + + def start(self, flag=None): + self.flag = flag + threading.Thread.start(self) + + def run(self): + if self.flag: + self.flag.set() + try: + self.server.serve_forever(0.05) + finally: + self.server.server_close() + + def stop(self): + self.server.shutdown() + + +def make_https_server(case, certfile=CERTFILE, host=HOST, handler_class=None): + # we assume the certfile contains both private key and certificate + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.load_cert_chain(certfile) + server = HTTPSServerThread(context, host, handler_class) + flag = threading.Event() + server.start(flag) + flag.wait() + def cleanup(): + if support.verbose: + sys.stdout.write('stopping HTTPS server\n') + server.stop() + if support.verbose: + sys.stdout.write('joining HTTPS thread\n') + server.join() + case.addCleanup(cleanup) + return server + + +if __name__ == "__main__": + import argparse + parser = argparse.ArgumentParser( + description='Run a test HTTPS server. ' + 'By default, the current directory is served.') + parser.add_argument('-p', '--port', type=int, default=4433, + help='port to listen on (default: %(default)s)') + parser.add_argument('-q', '--quiet', dest='verbose', default=True, + action='store_false', help='be less verbose') + parser.add_argument('-s', '--stats', dest='use_stats_handler', default=False, + action='store_true', help='always return stats page') + parser.add_argument('--curve-name', dest='curve_name', type=str, + action='store', + help='curve name for EC-based Diffie-Hellman') + parser.add_argument('--dh', dest='dh_file', type=str, action='store', + help='PEM file containing DH parameters') + args = parser.parse_args() + + support.verbose = args.verbose + if args.use_stats_handler: + handler_class = StatsRequestHandler + else: + handler_class = RootedHTTPRequestHandler + if utils.PY2: + handler_class.root = os.getcwdu() + else: + handler_class.root = os.getcwd() + context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + context.load_cert_chain(CERTFILE) + if args.curve_name: + context.set_ecdh_curve(args.curve_name) + if args.dh_file: + context.load_dh_params(args.dh_file) + + server = HTTPSServer(("", args.port), handler_class, context) + if args.verbose: + print("Listening on https://localhost:{0.port}".format(args)) + server.serve_forever(0.1) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/test/support.py b/minor_project/lib/python3.6/site-packages/future/backports/test/support.py new file mode 100644 index 0000000..1999e20 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/test/support.py @@ -0,0 +1,2048 @@ +# -*- coding: utf-8 -*- +"""Supporting definitions for the Python regression tests. + +Backported for python-future from Python 3.3 test/support.py. +""" + +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future import utils +from future.builtins import str, range, open, int, map, list + +import contextlib +import errno +import functools +import gc +import socket +import sys +import os +import platform +import shutil +import warnings +import unittest +# For Python 2.6 compatibility: +if not hasattr(unittest, 'skip'): + import unittest2 as unittest + +import importlib +# import collections.abc # not present on Py2.7 +import re +import subprocess +import imp +import time +try: + import sysconfig +except ImportError: + # sysconfig is not available on Python 2.6. Try using distutils.sysconfig instead: + from distutils import sysconfig +import fnmatch +import logging.handlers +import struct +import tempfile + +try: + if utils.PY3: + import _thread, threading + else: + import thread as _thread, threading +except ImportError: + _thread = None + threading = None +try: + import multiprocessing.process +except ImportError: + multiprocessing = None + +try: + import zlib +except ImportError: + zlib = None + +try: + import gzip +except ImportError: + gzip = None + +try: + import bz2 +except ImportError: + bz2 = None + +try: + import lzma +except ImportError: + lzma = None + +__all__ = [ + "Error", "TestFailed", "ResourceDenied", "import_module", "verbose", + "use_resources", "max_memuse", "record_original_stdout", + "get_original_stdout", "unload", "unlink", "rmtree", "forget", + "is_resource_enabled", "requires", "requires_freebsd_version", + "requires_linux_version", "requires_mac_ver", "find_unused_port", + "bind_port", "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", + "temp_cwd", "findfile", "create_empty_file", "sortdict", + "check_syntax_error", "open_urlresource", "check_warnings", "CleanImport", + "EnvironmentVarGuard", "TransientResource", "captured_stdout", + "captured_stdin", "captured_stderr", "time_out", "socket_peer_reset", + "ioerror_peer_reset", "run_with_locale", 'temp_umask', + "transient_internet", "set_memlimit", "bigmemtest", "bigaddrspacetest", + "BasicTestRunner", "run_unittest", "run_doctest", "threading_setup", + "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail", + "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754", + "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink", + "skip_unless_xattr", "import_fresh_module", "requires_zlib", + "PIPE_MAX_SIZE", "failfast", "anticipate_failure", "run_with_tz", + "requires_gzip", "requires_bz2", "requires_lzma", "suppress_crash_popup", + ] + +class Error(Exception): + """Base class for regression test exceptions.""" + +class TestFailed(Error): + """Test failed.""" + +class ResourceDenied(unittest.SkipTest): + """Test skipped because it requested a disallowed resource. + + This is raised when a test calls requires() for a resource that + has not be enabled. It is used to distinguish between expected + and unexpected skips. + """ + +@contextlib.contextmanager +def _ignore_deprecated_imports(ignore=True): + """Context manager to suppress package and module deprecation + warnings when importing them. + + If ignore is False, this context manager has no effect.""" + if ignore: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", ".+ (module|package)", + DeprecationWarning) + yield + else: + yield + + +def import_module(name, deprecated=False): + """Import and return the module to be tested, raising SkipTest if + it is not available. + + If deprecated is True, any module or package deprecation messages + will be suppressed.""" + with _ignore_deprecated_imports(deprecated): + try: + return importlib.import_module(name) + except ImportError as msg: + raise unittest.SkipTest(str(msg)) + + +def _save_and_remove_module(name, orig_modules): + """Helper function to save and remove a module from sys.modules + + Raise ImportError if the module can't be imported. + """ + # try to import the module and raise an error if it can't be imported + if name not in sys.modules: + __import__(name) + del sys.modules[name] + for modname in list(sys.modules): + if modname == name or modname.startswith(name + '.'): + orig_modules[modname] = sys.modules[modname] + del sys.modules[modname] + +def _save_and_block_module(name, orig_modules): + """Helper function to save and block a module in sys.modules + + Return True if the module was in sys.modules, False otherwise. + """ + saved = True + try: + orig_modules[name] = sys.modules[name] + except KeyError: + saved = False + sys.modules[name] = None + return saved + + +def anticipate_failure(condition): + """Decorator to mark a test that is known to be broken in some cases + + Any use of this decorator should have a comment identifying the + associated tracker issue. + """ + if condition: + return unittest.expectedFailure + return lambda f: f + + +def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): + """Import and return a module, deliberately bypassing sys.modules. + This function imports and returns a fresh copy of the named Python module + by removing the named module from sys.modules before doing the import. + Note that unlike reload, the original module is not affected by + this operation. + + *fresh* is an iterable of additional module names that are also removed + from the sys.modules cache before doing the import. + + *blocked* is an iterable of module names that are replaced with None + in the module cache during the import to ensure that attempts to import + them raise ImportError. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + sys.modules when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is True. + + This function will raise ImportError if the named module cannot be + imported. + + If deprecated is True, any module or package deprecation messages + will be suppressed. + """ + # NOTE: test_heapq, test_json and test_warnings include extra sanity checks + # to make sure that this utility function is working as expected + with _ignore_deprecated_imports(deprecated): + # Keep track of modules saved for later restoration as well + # as those which just need a blocking entry removed + orig_modules = {} + names_to_remove = [] + _save_and_remove_module(name, orig_modules) + try: + for fresh_name in fresh: + _save_and_remove_module(fresh_name, orig_modules) + for blocked_name in blocked: + if not _save_and_block_module(blocked_name, orig_modules): + names_to_remove.append(blocked_name) + fresh_module = importlib.import_module(name) + except ImportError: + fresh_module = None + finally: + for orig_name, module in orig_modules.items(): + sys.modules[orig_name] = module + for name_to_remove in names_to_remove: + del sys.modules[name_to_remove] + return fresh_module + + +def get_attribute(obj, name): + """Get an attribute, raising SkipTest if AttributeError is raised.""" + try: + attribute = getattr(obj, name) + except AttributeError: + raise unittest.SkipTest("object %r has no attribute %r" % (obj, name)) + else: + return attribute + +verbose = 1 # Flag set to 0 by regrtest.py +use_resources = None # Flag set to [] by regrtest.py +max_memuse = 0 # Disable bigmem tests (they will still be run with + # small sizes, to make sure they work.) +real_max_memuse = 0 +failfast = False +match_tests = None + +# _original_stdout is meant to hold stdout at the time regrtest began. +# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. +# The point is to have some flavor of stdout the user can actually see. +_original_stdout = None +def record_original_stdout(stdout): + global _original_stdout + _original_stdout = stdout + +def get_original_stdout(): + return _original_stdout or sys.stdout + +def unload(name): + try: + del sys.modules[name] + except KeyError: + pass + +if sys.platform.startswith("win"): + def _waitfor(func, pathname, waitall=False): + # Perform the operation + func(pathname) + # Now setup the wait loop + if waitall: + dirname = pathname + else: + dirname, name = os.path.split(pathname) + dirname = dirname or '.' + # Check for `pathname` to be removed from the filesystem. + # The exponential backoff of the timeout amounts to a total + # of ~1 second after which the deletion is probably an error + # anyway. + # Testing on a i7@4.3GHz shows that usually only 1 iteration is + # required when contention occurs. + timeout = 0.001 + while timeout < 1.0: + # Note we are only testing for the existence of the file(s) in + # the contents of the directory regardless of any security or + # access rights. If we have made it this far, we have sufficient + # permissions to do that much using Python's equivalent of the + # Windows API FindFirstFile. + # Other Windows APIs can fail or give incorrect results when + # dealing with files that are pending deletion. + L = os.listdir(dirname) + if not (L if waitall else name in L): + return + # Increase the timeout and try again + time.sleep(timeout) + timeout *= 2 + warnings.warn('tests may fail, delete still pending for ' + pathname, + RuntimeWarning, stacklevel=4) + + def _unlink(filename): + _waitfor(os.unlink, filename) + + def _rmdir(dirname): + _waitfor(os.rmdir, dirname) + + def _rmtree(path): + def _rmtree_inner(path): + for name in os.listdir(path): + fullname = os.path.join(path, name) + if os.path.isdir(fullname): + _waitfor(_rmtree_inner, fullname, waitall=True) + os.rmdir(fullname) + else: + os.unlink(fullname) + _waitfor(_rmtree_inner, path, waitall=True) + _waitfor(os.rmdir, path) +else: + _unlink = os.unlink + _rmdir = os.rmdir + _rmtree = shutil.rmtree + +def unlink(filename): + try: + _unlink(filename) + except OSError as error: + # The filename need not exist. + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + +def rmdir(dirname): + try: + _rmdir(dirname) + except OSError as error: + # The directory need not exist. + if error.errno != errno.ENOENT: + raise + +def rmtree(path): + try: + _rmtree(path) + except OSError as error: + if error.errno != errno.ENOENT: + raise + +def make_legacy_pyc(source): + """Move a PEP 3147 pyc/pyo file to its legacy pyc/pyo location. + + The choice of .pyc or .pyo extension is done based on the __debug__ flag + value. + + :param source: The file system path to the source file. The source file + does not need to exist, however the PEP 3147 pyc file must exist. + :return: The file system path to the legacy pyc file. + """ + pyc_file = imp.cache_from_source(source) + up_one = os.path.dirname(os.path.abspath(source)) + legacy_pyc = os.path.join(up_one, source + ('c' if __debug__ else 'o')) + os.rename(pyc_file, legacy_pyc) + return legacy_pyc + +def forget(modname): + """'Forget' a module was ever imported. + + This removes the module from sys.modules and deletes any PEP 3147 or + legacy .pyc and .pyo files. + """ + unload(modname) + for dirname in sys.path: + source = os.path.join(dirname, modname + '.py') + # It doesn't matter if they exist or not, unlink all possible + # combinations of PEP 3147 and legacy pyc and pyo files. + unlink(source + 'c') + unlink(source + 'o') + unlink(imp.cache_from_source(source, debug_override=True)) + unlink(imp.cache_from_source(source, debug_override=False)) + +# On some platforms, should not run gui test even if it is allowed +# in `use_resources'. +if sys.platform.startswith('win'): + import ctypes + import ctypes.wintypes + def _is_gui_available(): + UOI_FLAGS = 1 + WSF_VISIBLE = 0x0001 + class USEROBJECTFLAGS(ctypes.Structure): + _fields_ = [("fInherit", ctypes.wintypes.BOOL), + ("fReserved", ctypes.wintypes.BOOL), + ("dwFlags", ctypes.wintypes.DWORD)] + dll = ctypes.windll.user32 + h = dll.GetProcessWindowStation() + if not h: + raise ctypes.WinError() + uof = USEROBJECTFLAGS() + needed = ctypes.wintypes.DWORD() + res = dll.GetUserObjectInformationW(h, + UOI_FLAGS, + ctypes.byref(uof), + ctypes.sizeof(uof), + ctypes.byref(needed)) + if not res: + raise ctypes.WinError() + return bool(uof.dwFlags & WSF_VISIBLE) +else: + def _is_gui_available(): + return True + +def is_resource_enabled(resource): + """Test whether a resource is enabled. Known resources are set by + regrtest.py.""" + return use_resources is not None and resource in use_resources + +def requires(resource, msg=None): + """Raise ResourceDenied if the specified resource is not available. + + If the caller's module is __main__ then automatically return True. The + possibility of False being returned occurs when regrtest.py is + executing. + """ + if resource == 'gui' and not _is_gui_available(): + raise unittest.SkipTest("Cannot use the 'gui' resource") + # see if the caller's module is __main__ - if so, treat as if + # the resource was set + if sys._getframe(1).f_globals.get("__name__") == "__main__": + return + if not is_resource_enabled(resource): + if msg is None: + msg = "Use of the %r resource not enabled" % resource + raise ResourceDenied(msg) + +def _requires_unix_version(sysname, min_version): + """Decorator raising SkipTest if the OS is `sysname` and the version is less + than `min_version`. + + For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if + the FreeBSD version is less than 7.2. + """ + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kw): + if platform.system() == sysname: + version_txt = platform.release().split('-', 1)[0] + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + pass + else: + if version < min_version: + min_version_txt = '.'.join(map(str, min_version)) + raise unittest.SkipTest( + "%s version %s or higher required, not %s" + % (sysname, min_version_txt, version_txt)) + return func(*args, **kw) + wrapper.min_version = min_version + return wrapper + return decorator + +def requires_freebsd_version(*min_version): + """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is + less than `min_version`. + + For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD + version is less than 7.2. + """ + return _requires_unix_version('FreeBSD', min_version) + +def requires_linux_version(*min_version): + """Decorator raising SkipTest if the OS is Linux and the Linux version is + less than `min_version`. + + For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux + version is less than 2.6.32. + """ + return _requires_unix_version('Linux', min_version) + +def requires_mac_ver(*min_version): + """Decorator raising SkipTest if the OS is Mac OS X and the OS X + version if less than min_version. + + For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version + is lesser than 10.5. + """ + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kw): + if sys.platform == 'darwin': + version_txt = platform.mac_ver()[0] + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + pass + else: + if version < min_version: + min_version_txt = '.'.join(map(str, min_version)) + raise unittest.SkipTest( + "Mac OS X %s or higher required, not %s" + % (min_version_txt, version_txt)) + return func(*args, **kw) + wrapper.min_version = min_version + return wrapper + return decorator + +# Don't use "localhost", since resolving it uses the DNS under recent +# Windows versions (see issue #18792). +HOST = "127.0.0.1" +HOSTv6 = "::1" + + +def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): + """Returns an unused port that should be suitable for binding. This is + achieved by creating a temporary socket with the same family and type as + the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to + the specified host address (defaults to 0.0.0.0) with the port set to 0, + eliciting an unused ephemeral port from the OS. The temporary socket is + then closed and deleted, and the ephemeral port is returned. + + Either this method or bind_port() should be used for any tests where a + server socket needs to be bound to a particular port for the duration of + the test. Which one to use depends on whether the calling code is creating + a python socket, or if an unused port needs to be provided in a constructor + or passed to an external program (i.e. the -accept argument to openssl's + s_server mode). Always prefer bind_port() over find_unused_port() where + possible. Hard coded ports should *NEVER* be used. As soon as a server + socket is bound to a hard coded port, the ability to run multiple instances + of the test simultaneously on the same host is compromised, which makes the + test a ticking time bomb in a buildbot environment. On Unix buildbots, this + may simply manifest as a failed test, which can be recovered from without + intervention in most cases, but on Windows, the entire python process can + completely and utterly wedge, requiring someone to log in to the buildbot + and manually kill the affected process. + + (This is easy to reproduce on Windows, unfortunately, and can be traced to + the SO_REUSEADDR socket option having different semantics on Windows versus + Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, + listen and then accept connections on identical host/ports. An EADDRINUSE + socket.error will be raised at some point (depending on the platform and + the order bind and listen were called on each socket). + + However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE + will ever be raised when attempting to bind two identical host/ports. When + accept() is called on each socket, the second caller's process will steal + the port from the first caller, leaving them both in an awkwardly wedged + state where they'll no longer respond to any signals or graceful kills, and + must be forcibly killed via OpenProcess()/TerminateProcess(). + + The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option + instead of SO_REUSEADDR, which effectively affords the same semantics as + SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open + Source world compared to Windows ones, this is a common mistake. A quick + look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when + openssl.exe is called with the 's_server' option, for example. See + http://bugs.python.org/issue2550 for more info. The following site also + has a very thorough description about the implications of both REUSEADDR + and EXCLUSIVEADDRUSE on Windows: + http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) + + XXX: although this approach is a vast improvement on previous attempts to + elicit unused ports, it rests heavily on the assumption that the ephemeral + port returned to us by the OS won't immediately be dished back out to some + other process when we close and delete our temporary socket but before our + calling code has a chance to bind the returned port. We can deal with this + issue if/when we come across it. + """ + + tempsock = socket.socket(family, socktype) + port = bind_port(tempsock) + tempsock.close() + del tempsock + return port + +def bind_port(sock, host=HOST): + """Bind the socket to a free port and return the port number. Relies on + ephemeral ports in order to ensure we are using an unbound port. This is + important as many tests may be running simultaneously, especially in a + buildbot environment. This method raises an exception if the sock.family + is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR + or SO_REUSEPORT set on it. Tests should *never* set these socket options + for TCP/IP sockets. The only case for setting these options is testing + multicasting via multiple UDP sockets. + + Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. + on Windows), it will be set on the socket. This will prevent anyone else + from bind()'ing to our host/port for the duration of the test. + """ + + if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: + if hasattr(socket, 'SO_REUSEADDR'): + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: + raise TestFailed("tests should never set the SO_REUSEADDR " \ + "socket option on TCP/IP sockets!") + if hasattr(socket, 'SO_REUSEPORT'): + try: + if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: + raise TestFailed("tests should never set the SO_REUSEPORT " \ + "socket option on TCP/IP sockets!") + except socket.error: + # Python's socket module was compiled using modern headers + # thus defining SO_REUSEPORT but this process is running + # under an older kernel that does not support SO_REUSEPORT. + pass + if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + + sock.bind((host, 0)) + port = sock.getsockname()[1] + return port + +def _is_ipv6_enabled(): + """Check whether IPv6 is enabled on this host.""" + if socket.has_ipv6: + sock = None + try: + sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + sock.bind(('::1', 0)) + return True + except (socket.error, socket.gaierror): + pass + finally: + if sock: + sock.close() + return False + +IPV6_ENABLED = _is_ipv6_enabled() + + +# A constant likely larger than the underlying OS pipe buffer size, to +# make writes blocking. +# Windows limit seems to be around 512 B, and many Unix kernels have a +# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure. +# (see issue #17835 for a discussion of this number). +PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1 + +# A constant likely larger than the underlying OS socket buffer size, to make +# writes blocking. +# The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl +# on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643 +# for a discussion of this number). +SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1 + +# # decorator for skipping tests on non-IEEE 754 platforms +# requires_IEEE_754 = unittest.skipUnless( +# float.__getformat__("double").startswith("IEEE"), +# "test requires IEEE 754 doubles") + +requires_zlib = unittest.skipUnless(zlib, 'requires zlib') + +requires_bz2 = unittest.skipUnless(bz2, 'requires bz2') + +requires_lzma = unittest.skipUnless(lzma, 'requires lzma') + +is_jython = sys.platform.startswith('java') + +# Filename used for testing +if os.name == 'java': + # Jython disallows @ in module names + TESTFN = '$test' +else: + TESTFN = '@test' + +# Disambiguate TESTFN for parallel testing, while letting it remain a valid +# module name. +TESTFN = "{0}_{1}_tmp".format(TESTFN, os.getpid()) + +# # FS_NONASCII: non-ASCII character encodable by os.fsencode(), +# # or None if there is no such character. +# FS_NONASCII = None +# for character in ( +# # First try printable and common characters to have a readable filename. +# # For each character, the encoding list are just example of encodings able +# # to encode the character (the list is not exhaustive). +# +# # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 +# '\u00E6', +# # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 +# '\u0130', +# # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 +# '\u0141', +# # U+03C6 (Greek Small Letter Phi): cp1253 +# '\u03C6', +# # U+041A (Cyrillic Capital Letter Ka): cp1251 +# '\u041A', +# # U+05D0 (Hebrew Letter Alef): Encodable to cp424 +# '\u05D0', +# # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic +# '\u060C', +# # U+062A (Arabic Letter Teh): cp720 +# '\u062A', +# # U+0E01 (Thai Character Ko Kai): cp874 +# '\u0E01', +# +# # Then try more "special" characters. "special" because they may be +# # interpreted or displayed differently depending on the exact locale +# # encoding and the font. +# +# # U+00A0 (No-Break Space) +# '\u00A0', +# # U+20AC (Euro Sign) +# '\u20AC', +# ): +# try: +# os.fsdecode(os.fsencode(character)) +# except UnicodeError: +# pass +# else: +# FS_NONASCII = character +# break +# +# # TESTFN_UNICODE is a non-ascii filename +# TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" +# if sys.platform == 'darwin': +# # In Mac OS X's VFS API file names are, by definition, canonically +# # decomposed Unicode, encoded using UTF-8. See QA1173: +# # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html +# import unicodedata +# TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) +# TESTFN_ENCODING = sys.getfilesystemencoding() +# +# # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be +# # encoded by the filesystem encoding (in strict mode). It can be None if we +# # cannot generate such filename. +# TESTFN_UNENCODABLE = None +# if os.name in ('nt', 'ce'): +# # skip win32s (0) or Windows 9x/ME (1) +# if sys.getwindowsversion().platform >= 2: +# # Different kinds of characters from various languages to minimize the +# # probability that the whole name is encodable to MBCS (issue #9819) +# TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" +# try: +# TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) +# except UnicodeEncodeError: +# pass +# else: +# print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' +# 'Unicode filename tests may not be effective' +# % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) +# TESTFN_UNENCODABLE = None +# # Mac OS X denies unencodable filenames (invalid utf-8) +# elif sys.platform != 'darwin': +# try: +# # ascii and utf-8 cannot encode the byte 0xff +# b'\xff'.decode(TESTFN_ENCODING) +# except UnicodeDecodeError: +# # 0xff will be encoded using the surrogate character u+DCFF +# TESTFN_UNENCODABLE = TESTFN \ +# + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') +# else: +# # File system encoding (eg. ISO-8859-* encodings) can encode +# # the byte 0xff. Skip some unicode filename tests. +# pass +# +# # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be +# # decoded from the filesystem encoding (in strict mode). It can be None if we +# # cannot generate such filename (ex: the latin1 encoding can decode any byte +# # sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks +# # to the surrogateescape error handler (PEP 383), but not from the filesystem +# # encoding in strict mode. +# TESTFN_UNDECODABLE = None +# for name in ( +# # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows +# # accepts it to create a file or a directory, or don't accept to enter to +# # such directory (when the bytes name is used). So test b'\xe7' first: it is +# # not decodable from cp932. +# b'\xe7w\xf0', +# # undecodable from ASCII, UTF-8 +# b'\xff', +# # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 +# # and cp857 +# b'\xae\xd5' +# # undecodable from UTF-8 (UNIX and Mac OS X) +# b'\xed\xb2\x80', b'\xed\xb4\x80', +# # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, +# # cp1253, cp1254, cp1255, cp1257, cp1258 +# b'\x81\x98', +# ): +# try: +# name.decode(TESTFN_ENCODING) +# except UnicodeDecodeError: +# TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name +# break +# +# if FS_NONASCII: +# TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII +# else: +# TESTFN_NONASCII = None + +# Save the initial cwd +SAVEDCWD = os.getcwd() + +@contextlib.contextmanager +def temp_cwd(name='tempcwd', quiet=False, path=None): + """ + Context manager that temporarily changes the CWD. + + An existing path may be provided as *path*, in which case this + function makes no changes to the file system. + + Otherwise, the new CWD is created in the current directory and it's + named *name*. If *quiet* is False (default) and it's not possible to + create or change the CWD, an error is raised. If it's True, only a + warning is raised and the original CWD is used. + """ + saved_dir = os.getcwd() + is_temporary = False + if path is None: + path = name + try: + os.mkdir(name) + is_temporary = True + except OSError: + if not quiet: + raise + warnings.warn('tests may fail, unable to create temp CWD ' + name, + RuntimeWarning, stacklevel=3) + try: + os.chdir(path) + except OSError: + if not quiet: + raise + warnings.warn('tests may fail, unable to change the CWD to ' + path, + RuntimeWarning, stacklevel=3) + try: + yield os.getcwd() + finally: + os.chdir(saved_dir) + if is_temporary: + rmtree(name) + + +if hasattr(os, "umask"): + @contextlib.contextmanager + def temp_umask(umask): + """Context manager that temporarily sets the process umask.""" + oldmask = os.umask(umask) + try: + yield + finally: + os.umask(oldmask) + + +def findfile(file, here=__file__, subdir=None): + """Try to find a file on sys.path and the working directory. If it is not + found the argument passed to the function is returned (this does not + necessarily signal failure; could still be the legitimate path).""" + if os.path.isabs(file): + return file + if subdir is not None: + file = os.path.join(subdir, file) + path = sys.path + path = [os.path.dirname(here)] + path + for dn in path: + fn = os.path.join(dn, file) + if os.path.exists(fn): return fn + return file + +def create_empty_file(filename): + """Create an empty file. If the file already exists, truncate it.""" + fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + os.close(fd) + +def sortdict(dict): + "Like repr(dict), but in sorted order." + items = sorted(dict.items()) + reprpairs = ["%r: %r" % pair for pair in items] + withcommas = ", ".join(reprpairs) + return "{%s}" % withcommas + +def make_bad_fd(): + """ + Create an invalid file descriptor by opening and closing a file and return + its fd. + """ + file = open(TESTFN, "wb") + try: + return file.fileno() + finally: + file.close() + unlink(TESTFN) + +def check_syntax_error(testcase, statement): + testcase.assertRaises(SyntaxError, compile, statement, + '', 'exec') + +def open_urlresource(url, *args, **kw): + from future.backports.urllib import (request as urllib_request, + parse as urllib_parse) + + check = kw.pop('check', None) + + filename = urllib_parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! + + fn = os.path.join(os.path.dirname(__file__), "data", filename) + + def check_valid_file(fn): + f = open(fn, *args, **kw) + if check is None: + return f + elif check(f): + f.seek(0) + return f + f.close() + + if os.path.exists(fn): + f = check_valid_file(fn) + if f is not None: + return f + unlink(fn) + + # Verify the requirement before downloading the file + requires('urlfetch') + + print('\tfetching %s ...' % url, file=get_original_stdout()) + f = urllib_request.urlopen(url, timeout=15) + try: + with open(fn, "wb") as out: + s = f.read() + while s: + out.write(s) + s = f.read() + finally: + f.close() + + f = check_valid_file(fn) + if f is not None: + return f + raise TestFailed('invalid resource %r' % fn) + + +class WarningsRecorder(object): + """Convenience wrapper for the warnings list returned on + entry to the warnings.catch_warnings() context manager. + """ + def __init__(self, warnings_list): + self._warnings = warnings_list + self._last = 0 + + def __getattr__(self, attr): + if len(self._warnings) > self._last: + return getattr(self._warnings[-1], attr) + elif attr in warnings.WarningMessage._WARNING_DETAILS: + return None + raise AttributeError("%r has no attribute %r" % (self, attr)) + + @property + def warnings(self): + return self._warnings[self._last:] + + def reset(self): + self._last = len(self._warnings) + + +def _filterwarnings(filters, quiet=False): + """Catch the warnings, then check if all the expected + warnings have been raised and re-raise unexpected warnings. + If 'quiet' is True, only re-raise the unexpected warnings. + """ + # Clear the warning registry of the calling module + # in order to re-raise the warnings. + frame = sys._getframe(2) + registry = frame.f_globals.get('__warningregistry__') + if registry: + if utils.PY3: + registry.clear() + else: + # Py2-compatible: + for i in range(len(registry)): + registry.pop() + with warnings.catch_warnings(record=True) as w: + # Set filter "always" to record all warnings. Because + # test_warnings swap the module, we need to look up in + # the sys.modules dictionary. + sys.modules['warnings'].simplefilter("always") + yield WarningsRecorder(w) + # Filter the recorded warnings + reraise = list(w) + missing = [] + for msg, cat in filters: + seen = False + for w in reraise[:]: + warning = w.message + # Filter out the matching messages + if (re.match(msg, str(warning), re.I) and + issubclass(warning.__class__, cat)): + seen = True + reraise.remove(w) + if not seen and not quiet: + # This filter caught nothing + missing.append((msg, cat.__name__)) + if reraise: + raise AssertionError("unhandled warning %s" % reraise[0]) + if missing: + raise AssertionError("filter (%r, %s) did not catch any warning" % + missing[0]) + + +@contextlib.contextmanager +def check_warnings(*filters, **kwargs): + """Context manager to silence warnings. + + Accept 2-tuples as positional arguments: + ("message regexp", WarningCategory) + + Optional argument: + - if 'quiet' is True, it does not fail if a filter catches nothing + (default True without argument, + default False if some filters are defined) + + Without argument, it defaults to: + check_warnings(("", Warning), quiet=True) + """ + quiet = kwargs.get('quiet') + if not filters: + filters = (("", Warning),) + # Preserve backward compatibility + if quiet is None: + quiet = True + return _filterwarnings(filters, quiet) + + +class CleanImport(object): + """Context manager to force import to return a new module reference. + + This is useful for testing module-level behaviours, such as + the emission of a DeprecationWarning on import. + + Use like this: + + with CleanImport("foo"): + importlib.import_module("foo") # new reference + """ + + def __init__(self, *module_names): + self.original_modules = sys.modules.copy() + for module_name in module_names: + if module_name in sys.modules: + module = sys.modules[module_name] + # It is possible that module_name is just an alias for + # another module (e.g. stub for modules renamed in 3.x). + # In that case, we also need delete the real module to clear + # the import cache. + if module.__name__ != module_name: + del sys.modules[module.__name__] + del sys.modules[module_name] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.modules.update(self.original_modules) + +### Added for python-future: +if utils.PY3: + import collections.abc + mybase = collections.abc.MutableMapping +else: + import UserDict + mybase = UserDict.DictMixin +### + +class EnvironmentVarGuard(mybase): + + """Class to help protect the environment variable properly. Can be used as + a context manager.""" + + def __init__(self): + self._environ = os.environ + self._changed = {} + + def __getitem__(self, envvar): + return self._environ[envvar] + + def __setitem__(self, envvar, value): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + self._environ[envvar] = value + + def __delitem__(self, envvar): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + if envvar in self._environ: + del self._environ[envvar] + + def keys(self): + return self._environ.keys() + + def __iter__(self): + return iter(self._environ) + + def __len__(self): + return len(self._environ) + + def set(self, envvar, value): + self[envvar] = value + + def unset(self, envvar): + del self[envvar] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + for (k, v) in self._changed.items(): + if v is None: + if k in self._environ: + del self._environ[k] + else: + self._environ[k] = v + os.environ = self._environ + + +class DirsOnSysPath(object): + """Context manager to temporarily add directories to sys.path. + + This makes a copy of sys.path, appends any directories given + as positional arguments, then reverts sys.path to the copied + settings when the context ends. + + Note that *all* sys.path modifications in the body of the + context manager, including replacement of the object, + will be reverted at the end of the block. + """ + + def __init__(self, *paths): + self.original_value = sys.path[:] + self.original_object = sys.path + sys.path.extend(paths) + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.path = self.original_object + sys.path[:] = self.original_value + + +class TransientResource(object): + + """Raise ResourceDenied if an exception is raised while the context manager + is in effect that matches the specified exception and attributes.""" + + def __init__(self, exc, **kwargs): + self.exc = exc + self.attrs = kwargs + + def __enter__(self): + return self + + def __exit__(self, type_=None, value=None, traceback=None): + """If type_ is a subclass of self.exc and value has attributes matching + self.attrs, raise ResourceDenied. Otherwise let the exception + propagate (if any).""" + if type_ is not None and issubclass(self.exc, type_): + for attr, attr_value in self.attrs.items(): + if not hasattr(value, attr): + break + if getattr(value, attr) != attr_value: + break + else: + raise ResourceDenied("an optional resource is not available") + +# Context managers that raise ResourceDenied when various issues +# with the Internet connection manifest themselves as exceptions. +# XXX deprecate these and use transient_internet() instead +time_out = TransientResource(IOError, errno=errno.ETIMEDOUT) +socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET) +ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET) + + +@contextlib.contextmanager +def transient_internet(resource_name, timeout=30.0, errnos=()): + """Return a context manager that raises ResourceDenied when various issues + with the Internet connection manifest themselves as exceptions.""" + default_errnos = [ + ('ECONNREFUSED', 111), + ('ECONNRESET', 104), + ('EHOSTUNREACH', 113), + ('ENETUNREACH', 101), + ('ETIMEDOUT', 110), + ] + default_gai_errnos = [ + ('EAI_AGAIN', -3), + ('EAI_FAIL', -4), + ('EAI_NONAME', -2), + ('EAI_NODATA', -5), + # Encountered when trying to resolve IPv6-only hostnames + ('WSANO_DATA', 11004), + ] + + denied = ResourceDenied("Resource %r is not available" % resource_name) + captured_errnos = errnos + gai_errnos = [] + if not captured_errnos: + captured_errnos = [getattr(errno, name, num) + for (name, num) in default_errnos] + gai_errnos = [getattr(socket, name, num) + for (name, num) in default_gai_errnos] + + def filter_error(err): + n = getattr(err, 'errno', None) + if (isinstance(err, socket.timeout) or + (isinstance(err, socket.gaierror) and n in gai_errnos) or + n in captured_errnos): + if not verbose: + sys.stderr.write(denied.args[0] + "\n") + # Was: raise denied from err + # For Python-Future: + exc = denied + exc.__cause__ = err + raise exc + + old_timeout = socket.getdefaulttimeout() + try: + if timeout is not None: + socket.setdefaulttimeout(timeout) + yield + except IOError as err: + # urllib can wrap original socket errors multiple times (!), we must + # unwrap to get at the original error. + while True: + a = err.args + if len(a) >= 1 and isinstance(a[0], IOError): + err = a[0] + # The error can also be wrapped as args[1]: + # except socket.error as msg: + # raise IOError('socket error', msg).with_traceback(sys.exc_info()[2]) + elif len(a) >= 2 and isinstance(a[1], IOError): + err = a[1] + else: + break + filter_error(err) + raise + # XXX should we catch generic exceptions and look for their + # __cause__ or __context__? + finally: + socket.setdefaulttimeout(old_timeout) + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO.""" + import io + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, io.StringIO()) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as s: + print("hello") + self.assertEqual(s.getvalue(), "hello") + """ + return captured_output("stdout") + +def captured_stderr(): + return captured_output("stderr") + +def captured_stdin(): + return captured_output("stdin") + + +def gc_collect(): + """Force as many objects as possible to be collected. + + In non-CPython implementations of Python, this is needed because timely + deallocation is not guaranteed by the garbage collector. (Even in CPython + this can be the case in case of reference cycles.) This means that __del__ + methods may be called later than expected and weakrefs may remain alive for + longer than expected. This function tries its best to force all garbage + objects to disappear. + """ + gc.collect() + if is_jython: + time.sleep(0.1) + gc.collect() + gc.collect() + +@contextlib.contextmanager +def disable_gc(): + have_gc = gc.isenabled() + gc.disable() + try: + yield + finally: + if have_gc: + gc.enable() + + +def python_is_optimized(): + """Find if Python was built with optimizations.""" + # We don't have sysconfig on Py2.6: + import sysconfig + cflags = sysconfig.get_config_var('PY_CFLAGS') or '' + final_opt = "" + for opt in cflags.split(): + if opt.startswith('-O'): + final_opt = opt + return final_opt != '' and final_opt != '-O0' + + +_header = 'nP' +_align = '0n' +if hasattr(sys, "gettotalrefcount"): + _header = '2P' + _header + _align = '0P' +_vheader = _header + 'n' + +def calcobjsize(fmt): + return struct.calcsize(_header + fmt + _align) + +def calcvobjsize(fmt): + return struct.calcsize(_vheader + fmt + _align) + + +_TPFLAGS_HAVE_GC = 1<<14 +_TPFLAGS_HEAPTYPE = 1<<9 + +def check_sizeof(test, o, size): + result = sys.getsizeof(o) + # add GC header size + if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\ + ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))): + size += _testcapi.SIZEOF_PYGC_HEAD + msg = 'wrong size for %s: got %d, expected %d' \ + % (type(o), result, size) + test.assertEqual(result, size, msg) + +#======================================================================= +# Decorator for running a function in a different locale, correctly resetting +# it afterwards. + +def run_with_locale(catstr, *locales): + def decorator(func): + def inner(*args, **kwds): + try: + import locale + category = getattr(locale, catstr) + orig_locale = locale.setlocale(category) + except AttributeError: + # if the test author gives us an invalid category string + raise + except: + # cannot retrieve original locale, so do nothing + locale = orig_locale = None + else: + for loc in locales: + try: + locale.setlocale(category, loc) + break + except: + pass + + # now run the function, resetting the locale on exceptions + try: + return func(*args, **kwds) + finally: + if locale and orig_locale: + locale.setlocale(category, orig_locale) + inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator + +#======================================================================= +# Decorator for running a function in a specific timezone, correctly +# resetting it afterwards. + +def run_with_tz(tz): + def decorator(func): + def inner(*args, **kwds): + try: + tzset = time.tzset + except AttributeError: + raise unittest.SkipTest("tzset required") + if 'TZ' in os.environ: + orig_tz = os.environ['TZ'] + else: + orig_tz = None + os.environ['TZ'] = tz + tzset() + + # now run the function, resetting the tz on exceptions + try: + return func(*args, **kwds) + finally: + if orig_tz is None: + del os.environ['TZ'] + else: + os.environ['TZ'] = orig_tz + time.tzset() + + inner.__name__ = func.__name__ + inner.__doc__ = func.__doc__ + return inner + return decorator + +#======================================================================= +# Big-memory-test support. Separate from 'resources' because memory use +# should be configurable. + +# Some handy shorthands. Note that these are used for byte-limits as well +# as size-limits, in the various bigmem tests +_1M = 1024*1024 +_1G = 1024 * _1M +_2G = 2 * _1G +_4G = 4 * _1G + +MAX_Py_ssize_t = sys.maxsize + +def set_memlimit(limit): + global max_memuse + global real_max_memuse + sizes = { + 'k': 1024, + 'm': _1M, + 'g': _1G, + 't': 1024*_1G, + } + m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, + re.IGNORECASE | re.VERBOSE) + if m is None: + raise ValueError('Invalid memory limit %r' % (limit,)) + memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) + real_max_memuse = memlimit + if memlimit > MAX_Py_ssize_t: + memlimit = MAX_Py_ssize_t + if memlimit < _2G - 1: + raise ValueError('Memory limit %r too low to be useful' % (limit,)) + max_memuse = memlimit + +class _MemoryWatchdog(object): + """An object which periodically watches the process' memory consumption + and prints it out. + """ + + def __init__(self): + self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) + self.started = False + + def start(self): + try: + f = open(self.procfile, 'r') + except OSError as e: + warnings.warn('/proc not available for stats: {0}'.format(e), + RuntimeWarning) + sys.stderr.flush() + return + + watchdog_script = findfile("memory_watchdog.py") + self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], + stdin=f, stderr=subprocess.DEVNULL) + f.close() + self.started = True + + def stop(self): + if self.started: + self.mem_watchdog.terminate() + self.mem_watchdog.wait() + + +def bigmemtest(size, memuse, dry_run=True): + """Decorator for bigmem tests. + + 'minsize' is the minimum useful size for the test (in arbitrary, + test-interpreted units.) 'memuse' is the number of 'bytes per size' for + the test, or a good estimate of it. + + if 'dry_run' is False, it means the test doesn't support dummy runs + when -M is not specified. + """ + def decorator(f): + def wrapper(self): + size = wrapper.size + memuse = wrapper.memuse + if not real_max_memuse: + maxsize = 5147 + else: + maxsize = size + + if ((real_max_memuse or not dry_run) + and real_max_memuse < maxsize * memuse): + raise unittest.SkipTest( + "not enough memory: %.1fG minimum needed" + % (size * memuse / (1024 ** 3))) + + if real_max_memuse and verbose: + print() + print(" ... expected peak memory use: {peak:.1f}G" + .format(peak=size * memuse / (1024 ** 3))) + watchdog = _MemoryWatchdog() + watchdog.start() + else: + watchdog = None + + try: + return f(self, maxsize) + finally: + if watchdog: + watchdog.stop() + + wrapper.size = size + wrapper.memuse = memuse + return wrapper + return decorator + +def bigaddrspacetest(f): + """Decorator for tests that fill the address space.""" + def wrapper(self): + if max_memuse < MAX_Py_ssize_t: + if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: + raise unittest.SkipTest( + "not enough memory: try a 32-bit build instead") + else: + raise unittest.SkipTest( + "not enough memory: %.1fG minimum needed" + % (MAX_Py_ssize_t / (1024 ** 3))) + else: + return f(self) + return wrapper + +#======================================================================= +# unittest integration. + +class BasicTestRunner(object): + def run(self, test): + result = unittest.TestResult() + test(result) + return result + +def _id(obj): + return obj + +def requires_resource(resource): + if resource == 'gui' and not _is_gui_available(): + return unittest.skip("resource 'gui' is not available") + if is_resource_enabled(resource): + return _id + else: + return unittest.skip("resource {0!r} is not enabled".format(resource)) + +def cpython_only(test): + """ + Decorator for tests only applicable on CPython. + """ + return impl_detail(cpython=True)(test) + +def impl_detail(msg=None, **guards): + if check_impl_detail(**guards): + return _id + if msg is None: + guardnames, default = _parse_guards(guards) + if default: + msg = "implementation detail not available on {0}" + else: + msg = "implementation detail specific to {0}" + guardnames = sorted(guardnames.keys()) + msg = msg.format(' or '.join(guardnames)) + return unittest.skip(msg) + +def _parse_guards(guards): + # Returns a tuple ({platform_name: run_me}, default_value) + if not guards: + return ({'cpython': True}, False) + is_true = list(guards.values())[0] + assert list(guards.values()) == [is_true] * len(guards) # all True or all False + return (guards, not is_true) + +# Use the following check to guard CPython's implementation-specific tests -- +# or to run them only on the implementation(s) guarded by the arguments. +def check_impl_detail(**guards): + """This function returns True or False depending on the host platform. + Examples: + if check_impl_detail(): # only on CPython (default) + if check_impl_detail(jython=True): # only on Jython + if check_impl_detail(cpython=False): # everywhere except on CPython + """ + guards, default = _parse_guards(guards) + return guards.get(platform.python_implementation().lower(), default) + + +def no_tracing(func): + """Decorator to temporarily turn off tracing for the duration of a test.""" + if not hasattr(sys, 'gettrace'): + return func + else: + @functools.wraps(func) + def wrapper(*args, **kwargs): + original_trace = sys.gettrace() + try: + sys.settrace(None) + return func(*args, **kwargs) + finally: + sys.settrace(original_trace) + return wrapper + + +def refcount_test(test): + """Decorator for tests which involve reference counting. + + To start, the decorator does not run the test if is not run by CPython. + After that, any trace function is unset during the test to prevent + unexpected refcounts caused by the trace function. + + """ + return no_tracing(cpython_only(test)) + + +def _filter_suite(suite, pred): + """Recursively filter test cases in a suite based on a predicate.""" + newtests = [] + for test in suite._tests: + if isinstance(test, unittest.TestSuite): + _filter_suite(test, pred) + newtests.append(test) + else: + if pred(test): + newtests.append(test) + suite._tests = newtests + +def _run_suite(suite): + """Run tests from a unittest.TestSuite-derived class.""" + if verbose: + runner = unittest.TextTestRunner(sys.stdout, verbosity=2, + failfast=failfast) + else: + runner = BasicTestRunner() + + result = runner.run(suite) + if not result.wasSuccessful(): + if len(result.errors) == 1 and not result.failures: + err = result.errors[0][1] + elif len(result.failures) == 1 and not result.errors: + err = result.failures[0][1] + else: + err = "multiple errors occurred" + if not verbose: err += "; run in verbose mode for details" + raise TestFailed(err) + + +def run_unittest(*classes): + """Run tests from unittest.TestCase-derived classes.""" + valid_types = (unittest.TestSuite, unittest.TestCase) + suite = unittest.TestSuite() + for cls in classes: + if isinstance(cls, str): + if cls in sys.modules: + suite.addTest(unittest.findTestCases(sys.modules[cls])) + else: + raise ValueError("str arguments must be keys in sys.modules") + elif isinstance(cls, valid_types): + suite.addTest(cls) + else: + suite.addTest(unittest.makeSuite(cls)) + def case_pred(test): + if match_tests is None: + return True + for name in test.id().split("."): + if fnmatch.fnmatchcase(name, match_tests): + return True + return False + _filter_suite(suite, case_pred) + _run_suite(suite) + +# We don't have sysconfig on Py2.6: +# #======================================================================= +# # Check for the presence of docstrings. +# +# HAVE_DOCSTRINGS = (check_impl_detail(cpython=False) or +# sys.platform == 'win32' or +# sysconfig.get_config_var('WITH_DOC_STRINGS')) +# +# requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS, +# "test requires docstrings") +# +# +# #======================================================================= +# doctest driver. + +def run_doctest(module, verbosity=None, optionflags=0): + """Run doctest on the given module. Return (#failures, #tests). + + If optional argument verbosity is not specified (or is None), pass + support's belief about verbosity on to doctest. Else doctest's + usual behavior is used (it searches sys.argv for -v). + """ + + import doctest + + if verbosity is None: + verbosity = verbose + else: + verbosity = None + + f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags) + if f: + raise TestFailed("%d of %d doctests failed" % (f, t)) + if verbose: + print('doctest (%s) ... %d tests with zero failures' % + (module.__name__, t)) + return f, t + + +#======================================================================= +# Support for saving and restoring the imported modules. + +def modules_setup(): + return sys.modules.copy(), + +def modules_cleanup(oldmodules): + # Encoders/decoders are registered permanently within the internal + # codec cache. If we destroy the corresponding modules their + # globals will be set to None which will trip up the cached functions. + encodings = [(k, v) for k, v in sys.modules.items() + if k.startswith('encodings.')] + # Was: + # sys.modules.clear() + # Py2-compatible: + for i in range(len(sys.modules)): + sys.modules.pop() + + sys.modules.update(encodings) + # XXX: This kind of problem can affect more than just encodings. In particular + # extension modules (such as _ssl) don't cope with reloading properly. + # Really, test modules should be cleaning out the test specific modules they + # know they added (ala test_runpy) rather than relying on this function (as + # test_importhooks and test_pkg do currently). + # Implicitly imported *real* modules should be left alone (see issue 10556). + sys.modules.update(oldmodules) + +#======================================================================= +# Backported versions of threading_setup() and threading_cleanup() which don't refer +# to threading._dangling (not available on Py2.7). + +# Threading support to prevent reporting refleaks when running regrtest.py -R + +# NOTE: we use thread._count() rather than threading.enumerate() (or the +# moral equivalent thereof) because a threading.Thread object is still alive +# until its __bootstrap() method has returned, even after it has been +# unregistered from the threading module. +# thread._count(), on the other hand, only gets decremented *after* the +# __bootstrap() method has returned, which gives us reliable reference counts +# at the end of a test run. + +def threading_setup(): + if _thread: + return _thread._count(), + else: + return 1, + +def threading_cleanup(nb_threads): + if not _thread: + return + + _MAX_COUNT = 10 + for count in range(_MAX_COUNT): + n = _thread._count() + if n == nb_threads: + break + time.sleep(0.1) + # XXX print a warning in case of failure? + +def reap_threads(func): + """Use this function when threads are being used. This will + ensure that the threads are cleaned up even when the test fails. + If threading is unavailable this function does nothing. + """ + if not _thread: + return func + + @functools.wraps(func) + def decorator(*args): + key = threading_setup() + try: + return func(*args) + finally: + threading_cleanup(*key) + return decorator + +def reap_children(): + """Use this function at the end of test_main() whenever sub-processes + are started. This will help ensure that no extra children (zombies) + stick around to hog resources and create problems when looking + for refleaks. + """ + + # Reap all our dead child processes so we don't leave zombies around. + # These hog resources and might be causing some of the buildbots to die. + if hasattr(os, 'waitpid'): + any_process = -1 + while True: + try: + # This will raise an exception on Windows. That's ok. + pid, status = os.waitpid(any_process, os.WNOHANG) + if pid == 0: + break + except: + break + +@contextlib.contextmanager +def swap_attr(obj, attr, new_val): + """Temporary swap out an attribute with a new object. + + Usage: + with swap_attr(obj, "attr", 5): + ... + + This will set obj.attr to 5 for the duration of the with: block, + restoring the old value at the end of the block. If `attr` doesn't + exist on `obj`, it will be created and then deleted at the end of the + block. + """ + if hasattr(obj, attr): + real_val = getattr(obj, attr) + setattr(obj, attr, new_val) + try: + yield + finally: + setattr(obj, attr, real_val) + else: + setattr(obj, attr, new_val) + try: + yield + finally: + delattr(obj, attr) + +@contextlib.contextmanager +def swap_item(obj, item, new_val): + """Temporary swap out an item with a new object. + + Usage: + with swap_item(obj, "item", 5): + ... + + This will set obj["item"] to 5 for the duration of the with: block, + restoring the old value at the end of the block. If `item` doesn't + exist on `obj`, it will be created and then deleted at the end of the + block. + """ + if item in obj: + real_val = obj[item] + obj[item] = new_val + try: + yield + finally: + obj[item] = real_val + else: + obj[item] = new_val + try: + yield + finally: + del obj[item] + +def strip_python_stderr(stderr): + """Strip the stderr of a Python process from potential debug output + emitted by the interpreter. + + This will typically be run on the result of the communicate() method + of a subprocess.Popen object. + """ + stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() + return stderr + +def args_from_interpreter_flags(): + """Return a list of command-line arguments reproducing the current + settings in sys.flags and sys.warnoptions.""" + return subprocess._args_from_interpreter_flags() + +#============================================================ +# Support for assertions about logging. +#============================================================ + +class TestHandler(logging.handlers.BufferingHandler): + def __init__(self, matcher): + # BufferingHandler takes a "capacity" argument + # so as to know when to flush. As we're overriding + # shouldFlush anyway, we can set a capacity of zero. + # You can call flush() manually to clear out the + # buffer. + logging.handlers.BufferingHandler.__init__(self, 0) + self.matcher = matcher + + def shouldFlush(self): + return False + + def emit(self, record): + self.format(record) + self.buffer.append(record.__dict__) + + def matches(self, **kwargs): + """ + Look for a saved dict whose keys/values match the supplied arguments. + """ + result = False + for d in self.buffer: + if self.matcher.matches(d, **kwargs): + result = True + break + return result + +class Matcher(object): + + _partial_matches = ('msg', 'message') + + def matches(self, d, **kwargs): + """ + Try to match a single dict with the supplied arguments. + + Keys whose values are strings and which are in self._partial_matches + will be checked for partial (i.e. substring) matches. You can extend + this scheme to (for example) do regular expression matching, etc. + """ + result = True + for k in kwargs: + v = kwargs[k] + dv = d.get(k) + if not self.match_value(k, dv, v): + result = False + break + return result + + def match_value(self, k, dv, v): + """ + Try to match a single stored value (dv) with a supplied value (v). + """ + if type(v) != type(dv): + result = False + elif type(dv) is not str or k not in self._partial_matches: + result = (v == dv) + else: + result = dv.find(v) >= 0 + return result + + +_can_symlink = None +def can_symlink(): + global _can_symlink + if _can_symlink is not None: + return _can_symlink + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + _can_symlink = can + return can + +def skip_unless_symlink(test): + """Skip decorator for tests that require functional symlink""" + ok = can_symlink() + msg = "Requires functional symlink implementation" + return test if ok else unittest.skip(msg)(test) + +_can_xattr = None +def can_xattr(): + global _can_xattr + if _can_xattr is not None: + return _can_xattr + if not hasattr(os, "setxattr"): + can = False + else: + tmp_fp, tmp_name = tempfile.mkstemp() + try: + with open(TESTFN, "wb") as fp: + try: + # TESTFN & tempfile may use different file systems with + # different capabilities + os.setxattr(tmp_fp, b"user.test", b"") + os.setxattr(fp.fileno(), b"user.test", b"") + # Kernels < 2.6.39 don't respect setxattr flags. + kernel_version = platform.release() + m = re.match("2.6.(\d{1,2})", kernel_version) + can = m is None or int(m.group(1)) >= 39 + except OSError: + can = False + finally: + unlink(TESTFN) + unlink(tmp_name) + _can_xattr = can + return can + +def skip_unless_xattr(test): + """Skip decorator for tests that require functional extended attributes""" + ok = can_xattr() + msg = "no non-broken extended attribute support" + return test if ok else unittest.skip(msg)(test) + + +if sys.platform.startswith('win'): + @contextlib.contextmanager + def suppress_crash_popup(): + """Disable Windows Error Reporting dialogs using SetErrorMode.""" + # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621%28v=vs.85%29.aspx + # GetErrorMode is not available on Windows XP and Windows Server 2003, + # but SetErrorMode returns the previous value, so we can use that + import ctypes + k32 = ctypes.windll.kernel32 + SEM_NOGPFAULTERRORBOX = 0x02 + old_error_mode = k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) + k32.SetErrorMode(old_error_mode | SEM_NOGPFAULTERRORBOX) + try: + yield + finally: + k32.SetErrorMode(old_error_mode) +else: + # this is a no-op for other platforms + @contextlib.contextmanager + def suppress_crash_popup(): + yield + + +def patch(test_instance, object_to_patch, attr_name, new_value): + """Override 'object_to_patch'.'attr_name' with 'new_value'. + + Also, add a cleanup procedure to 'test_instance' to restore + 'object_to_patch' value for 'attr_name'. + The 'attr_name' should be a valid attribute for 'object_to_patch'. + + """ + # check that 'attr_name' is a real attribute for 'object_to_patch' + # will raise AttributeError if it does not exist + getattr(object_to_patch, attr_name) + + # keep a copy of the old value + attr_is_local = False + try: + old_value = object_to_patch.__dict__[attr_name] + except (AttributeError, KeyError): + old_value = getattr(object_to_patch, attr_name, None) + else: + attr_is_local = True + + # restore the value when the test is done + def cleanup(): + if attr_is_local: + setattr(object_to_patch, attr_name, old_value) + else: + delattr(object_to_patch, attr_name) + + test_instance.addCleanup(cleanup) + + # actually override the attribute + setattr(object_to_patch, attr_name, new_value) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/total_ordering.py b/minor_project/lib/python3.6/site-packages/future/backports/total_ordering.py new file mode 100644 index 0000000..760f06d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/total_ordering.py @@ -0,0 +1,38 @@ +""" +For Python < 2.7.2. total_ordering in versions prior to 2.7.2 is buggy. +See http://bugs.python.org/issue10042 for details. For these versions use +code borrowed from Python 2.7.3. + +From django.utils. +""" + +import sys +if sys.version_info >= (2, 7, 2): + from functools import total_ordering +else: + def total_ordering(cls): + """Class decorator that fills in missing ordering methods""" + convert = { + '__lt__': [('__gt__', lambda self, other: not (self < other or self == other)), + ('__le__', lambda self, other: self < other or self == other), + ('__ge__', lambda self, other: not self < other)], + '__le__': [('__ge__', lambda self, other: not self <= other or self == other), + ('__lt__', lambda self, other: self <= other and not self == other), + ('__gt__', lambda self, other: not self <= other)], + '__gt__': [('__lt__', lambda self, other: not (self > other or self == other)), + ('__ge__', lambda self, other: self > other or self == other), + ('__le__', lambda self, other: not self > other)], + '__ge__': [('__le__', lambda self, other: (not self >= other) or self == other), + ('__gt__', lambda self, other: self >= other and not self == other), + ('__lt__', lambda self, other: not self >= other)] + } + roots = set(dir(cls)) & set(convert) + if not roots: + raise ValueError('must define at least one ordering operation: < > <= >=') + root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__ + for opname, opfunc in convert[root]: + if opname not in roots: + opfunc.__name__ = opname + opfunc.__doc__ = getattr(int, opname).__doc__ + setattr(cls, opname, opfunc) + return cls diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..83ac5e3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/error.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/error.cpython-36.pyc new file mode 100644 index 0000000..d95062e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/error.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/parse.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/parse.cpython-36.pyc new file mode 100644 index 0000000..2566fbe Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/parse.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/request.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/request.cpython-36.pyc new file mode 100644 index 0000000..29f61d7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/request.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/response.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/response.cpython-36.pyc new file mode 100644 index 0000000..a438ba5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/response.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/robotparser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/robotparser.cpython-36.pyc new file mode 100644 index 0000000..66f901e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/urllib/__pycache__/robotparser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/error.py b/minor_project/lib/python3.6/site-packages/future/backports/urllib/error.py new file mode 100644 index 0000000..a473e44 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/urllib/error.py @@ -0,0 +1,75 @@ +"""Exception classes raised by urllib. + +The base exception class is URLError, which inherits from IOError. It +doesn't define any behavior of its own, but is the base class for all +exceptions defined in this package. + +HTTPError is an exception class that is also a valid HTTP response +instance. It behaves this way because HTTP protocol errors are valid +responses, with a status code, headers, and a body. In some contexts, +an application may want to handle an exception like a regular +response. +""" +from __future__ import absolute_import, division, unicode_literals +from future import standard_library + +from future.backports.urllib import response as urllib_response + + +__all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] + + +# do these error classes make sense? +# make sure all of the IOError stuff is overridden. we just want to be +# subtypes. + +class URLError(IOError): + # URLError is a sub-type of IOError, but it doesn't share any of + # the implementation. need to override __init__ and __str__. + # It sets self.args for compatibility with other EnvironmentError + # subclasses, but args doesn't have the typical format with errno in + # slot 0 and strerror in slot 1. This may be better than nothing. + def __init__(self, reason, filename=None): + self.args = reason, + self.reason = reason + if filename is not None: + self.filename = filename + + def __str__(self): + return '' % self.reason + +class HTTPError(URLError, urllib_response.addinfourl): + """Raised when HTTP error occurs, but also acts like non-error return""" + __super_init = urllib_response.addinfourl.__init__ + + def __init__(self, url, code, msg, hdrs, fp): + self.code = code + self.msg = msg + self.hdrs = hdrs + self.fp = fp + self.filename = url + # The addinfourl classes depend on fp being a valid file + # object. In some cases, the HTTPError may not have a valid + # file object. If this happens, the simplest workaround is to + # not initialize the base classes. + if fp is not None: + self.__super_init(fp, hdrs, url, code) + + def __str__(self): + return 'HTTP Error %s: %s' % (self.code, self.msg) + + # since URLError specifies a .reason attribute, HTTPError should also + # provide this attribute. See issue13211 for discussion. + @property + def reason(self): + return self.msg + + def info(self): + return self.hdrs + + +# exception raised when downloaded size does not match content-length +class ContentTooShortError(URLError): + def __init__(self, message, content): + URLError.__init__(self, message) + self.content = content diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/parse.py b/minor_project/lib/python3.6/site-packages/future/backports/urllib/parse.py new file mode 100644 index 0000000..04e52d4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/urllib/parse.py @@ -0,0 +1,991 @@ +""" +Ported using Python-Future from the Python 3.3 standard library. + +Parse (absolute and relative) URLs. + +urlparse module is based upon the following RFC specifications. + +RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding +and L. Masinter, January 2005. + +RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter +and L.Masinter, December 1999. + +RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. +Berners-Lee, R. Fielding, and L. Masinter, August 1998. + +RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. + +RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June +1995. + +RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. +McCahill, December 1994 + +RFC 3986 is considered the current standard and any future changes to +urlparse module should conform with it. The urlparse module is +currently not entirely compliant with this RFC due to defacto +scenarios for parsing, and for backward compatibility purposes, some +parsing quirks from older RFCs are retained. The testcases in +test_urlparse.py provides a good indicator of parsing behavior. +""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import bytes, chr, dict, int, range, str +from future.utils import raise_with_traceback + +import re +import sys +import collections + +__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag", + "urlsplit", "urlunsplit", "urlencode", "parse_qs", + "parse_qsl", "quote", "quote_plus", "quote_from_bytes", + "unquote", "unquote_plus", "unquote_to_bytes"] + +# A classification of schemes ('' means apply by default) +uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap', + 'wais', 'file', 'https', 'shttp', 'mms', + 'prospero', 'rtsp', 'rtspu', '', 'sftp', + 'svn', 'svn+ssh'] +uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet', + 'imap', 'wais', 'file', 'mms', 'https', 'shttp', + 'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '', + 'svn', 'svn+ssh', 'sftp', 'nfs', 'git', 'git+ssh'] +uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap', + 'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips', + 'mms', '', 'sftp', 'tel'] + +# These are not actually used anymore, but should stay for backwards +# compatibility. (They are undocumented, but have a public-looking name.) +non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', + 'telnet', 'wais', 'imap', 'snews', 'sip', 'sips'] +uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms', + 'gopher', 'rtsp', 'rtspu', 'sip', 'sips', ''] +uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news', + 'nntp', 'wais', 'https', 'shttp', 'snews', + 'file', 'prospero', ''] + +# Characters valid in scheme names +scheme_chars = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789' + '+-.') + +# XXX: Consider replacing with functools.lru_cache +MAX_CACHE_SIZE = 20 +_parse_cache = {} + +def clear_cache(): + """Clear the parse cache and the quoters cache.""" + _parse_cache.clear() + _safe_quoters.clear() + + +# Helpers for bytes handling +# For 3.2, we deliberately require applications that +# handle improperly quoted URLs to do their own +# decoding and encoding. If valid use cases are +# presented, we may relax this by using latin-1 +# decoding internally for 3.3 +_implicit_encoding = 'ascii' +_implicit_errors = 'strict' + +def _noop(obj): + return obj + +def _encode_result(obj, encoding=_implicit_encoding, + errors=_implicit_errors): + return obj.encode(encoding, errors) + +def _decode_args(args, encoding=_implicit_encoding, + errors=_implicit_errors): + return tuple(x.decode(encoding, errors) if x else '' for x in args) + +def _coerce_args(*args): + # Invokes decode if necessary to create str args + # and returns the coerced inputs along with + # an appropriate result coercion function + # - noop for str inputs + # - encoding function otherwise + str_input = isinstance(args[0], str) + for arg in args[1:]: + # We special-case the empty string to support the + # "scheme=''" default argument to some functions + if arg and isinstance(arg, str) != str_input: + raise TypeError("Cannot mix str and non-str arguments") + if str_input: + return args + (_noop,) + return _decode_args(args) + (_encode_result,) + +# Result objects are more helpful than simple tuples +class _ResultMixinStr(object): + """Standard approach to encoding parsed results from str to bytes""" + __slots__ = () + + def encode(self, encoding='ascii', errors='strict'): + return self._encoded_counterpart(*(x.encode(encoding, errors) for x in self)) + + +class _ResultMixinBytes(object): + """Standard approach to decoding parsed results from bytes to str""" + __slots__ = () + + def decode(self, encoding='ascii', errors='strict'): + return self._decoded_counterpart(*(x.decode(encoding, errors) for x in self)) + + +class _NetlocResultMixinBase(object): + """Shared methods for the parsed result objects containing a netloc element""" + __slots__ = () + + @property + def username(self): + return self._userinfo[0] + + @property + def password(self): + return self._userinfo[1] + + @property + def hostname(self): + hostname = self._hostinfo[0] + if not hostname: + hostname = None + elif hostname is not None: + hostname = hostname.lower() + return hostname + + @property + def port(self): + port = self._hostinfo[1] + if port is not None: + port = int(port, 10) + # Return None on an illegal port + if not ( 0 <= port <= 65535): + return None + return port + + +class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr): + __slots__ = () + + @property + def _userinfo(self): + netloc = self.netloc + userinfo, have_info, hostinfo = netloc.rpartition('@') + if have_info: + username, have_password, password = userinfo.partition(':') + if not have_password: + password = None + else: + username = password = None + return username, password + + @property + def _hostinfo(self): + netloc = self.netloc + _, _, hostinfo = netloc.rpartition('@') + _, have_open_br, bracketed = hostinfo.partition('[') + if have_open_br: + hostname, _, port = bracketed.partition(']') + _, have_port, port = port.partition(':') + else: + hostname, have_port, port = hostinfo.partition(':') + if not have_port: + port = None + return hostname, port + + +class _NetlocResultMixinBytes(_NetlocResultMixinBase, _ResultMixinBytes): + __slots__ = () + + @property + def _userinfo(self): + netloc = self.netloc + userinfo, have_info, hostinfo = netloc.rpartition(b'@') + if have_info: + username, have_password, password = userinfo.partition(b':') + if not have_password: + password = None + else: + username = password = None + return username, password + + @property + def _hostinfo(self): + netloc = self.netloc + _, _, hostinfo = netloc.rpartition(b'@') + _, have_open_br, bracketed = hostinfo.partition(b'[') + if have_open_br: + hostname, _, port = bracketed.partition(b']') + _, have_port, port = port.partition(b':') + else: + hostname, have_port, port = hostinfo.partition(b':') + if not have_port: + port = None + return hostname, port + + +from collections import namedtuple + +_DefragResultBase = namedtuple('DefragResult', 'url fragment') +_SplitResultBase = namedtuple('SplitResult', 'scheme netloc path query fragment') +_ParseResultBase = namedtuple('ParseResult', 'scheme netloc path params query fragment') + +# For backwards compatibility, alias _NetlocResultMixinStr +# ResultBase is no longer part of the documented API, but it is +# retained since deprecating it isn't worth the hassle +ResultBase = _NetlocResultMixinStr + +# Structured result objects for string data +class DefragResult(_DefragResultBase, _ResultMixinStr): + __slots__ = () + def geturl(self): + if self.fragment: + return self.url + '#' + self.fragment + else: + return self.url + +class SplitResult(_SplitResultBase, _NetlocResultMixinStr): + __slots__ = () + def geturl(self): + return urlunsplit(self) + +class ParseResult(_ParseResultBase, _NetlocResultMixinStr): + __slots__ = () + def geturl(self): + return urlunparse(self) + +# Structured result objects for bytes data +class DefragResultBytes(_DefragResultBase, _ResultMixinBytes): + __slots__ = () + def geturl(self): + if self.fragment: + return self.url + b'#' + self.fragment + else: + return self.url + +class SplitResultBytes(_SplitResultBase, _NetlocResultMixinBytes): + __slots__ = () + def geturl(self): + return urlunsplit(self) + +class ParseResultBytes(_ParseResultBase, _NetlocResultMixinBytes): + __slots__ = () + def geturl(self): + return urlunparse(self) + +# Set up the encode/decode result pairs +def _fix_result_transcoding(): + _result_pairs = ( + (DefragResult, DefragResultBytes), + (SplitResult, SplitResultBytes), + (ParseResult, ParseResultBytes), + ) + for _decoded, _encoded in _result_pairs: + _decoded._encoded_counterpart = _encoded + _encoded._decoded_counterpart = _decoded + +_fix_result_transcoding() +del _fix_result_transcoding + +def urlparse(url, scheme='', allow_fragments=True): + """Parse a URL into 6 components: + :///;?# + Return a 6-tuple: (scheme, netloc, path, params, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + url, scheme, _coerce_result = _coerce_args(url, scheme) + splitresult = urlsplit(url, scheme, allow_fragments) + scheme, netloc, url, query, fragment = splitresult + if scheme in uses_params and ';' in url: + url, params = _splitparams(url) + else: + params = '' + result = ParseResult(scheme, netloc, url, params, query, fragment) + return _coerce_result(result) + +def _splitparams(url): + if '/' in url: + i = url.find(';', url.rfind('/')) + if i < 0: + return url, '' + else: + i = url.find(';') + return url[:i], url[i+1:] + +def _splitnetloc(url, start=0): + delim = len(url) # position of end of domain part of url, default is end + for c in '/?#': # look for delimiters; the order is NOT important + wdelim = url.find(c, start) # find first of this delim + if wdelim >= 0: # if found + delim = min(delim, wdelim) # use earliest delim position + return url[start:delim], url[delim:] # return (domain, rest) + +def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL into 5 components: + :///?# + Return a 5-tuple: (scheme, netloc, path, query, fragment). + Note that we don't break the components up in smaller bits + (e.g. netloc is a single string) and we don't expand % escapes.""" + url, scheme, _coerce_result = _coerce_args(url, scheme) + allow_fragments = bool(allow_fragments) + key = url, scheme, allow_fragments, type(url), type(scheme) + cached = _parse_cache.get(key, None) + if cached: + return _coerce_result(cached) + if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth + clear_cache() + netloc = query = fragment = '' + i = url.find(':') + if i > 0: + if url[:i] == 'http': # optimize the common case + scheme = url[:i].lower() + url = url[i+1:] + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) + for c in url[:i]: + if c not in scheme_chars: + break + else: + # make sure "url" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i+1:] + if not rest or any(c not in '0123456789' for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == '//': + netloc, url = _splitnetloc(url, 2) + if (('[' in netloc and ']' not in netloc) or + (']' in netloc and '[' not in netloc)): + raise ValueError("Invalid IPv6 URL") + if allow_fragments and '#' in url: + url, fragment = url.split('#', 1) + if '?' in url: + url, query = url.split('?', 1) + v = SplitResult(scheme, netloc, url, query, fragment) + _parse_cache[key] = v + return _coerce_result(v) + +def urlunparse(components): + """Put a parsed URL back together again. This may result in a + slightly different, but equivalent URL, if the URL that was parsed + originally had redundant delimiters, e.g. a ? with an empty query + (the draft states that these are equivalent).""" + scheme, netloc, url, params, query, fragment, _coerce_result = ( + _coerce_args(*components)) + if params: + url = "%s;%s" % (url, params) + return _coerce_result(urlunsplit((scheme, netloc, url, query, fragment))) + +def urlunsplit(components): + """Combine the elements of a tuple as returned by urlsplit() into a + complete URL as a string. The data argument can be any five-item iterable. + This may result in a slightly different, but equivalent URL, if the URL that + was parsed originally had unnecessary delimiters (for example, a ? with an + empty query; the RFC states that these are equivalent).""" + scheme, netloc, url, query, fragment, _coerce_result = ( + _coerce_args(*components)) + if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'): + if url and url[:1] != '/': url = '/' + url + url = '//' + (netloc or '') + url + if scheme: + url = scheme + ':' + url + if query: + url = url + '?' + query + if fragment: + url = url + '#' + fragment + return _coerce_result(url) + +def urljoin(base, url, allow_fragments=True): + """Join a base URL and a possibly relative URL to form an absolute + interpretation of the latter.""" + if not base: + return url + if not url: + return base + base, url, _coerce_result = _coerce_args(base, url) + bscheme, bnetloc, bpath, bparams, bquery, bfragment = \ + urlparse(base, '', allow_fragments) + scheme, netloc, path, params, query, fragment = \ + urlparse(url, bscheme, allow_fragments) + if scheme != bscheme or scheme not in uses_relative: + return _coerce_result(url) + if scheme in uses_netloc: + if netloc: + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + netloc = bnetloc + if path[:1] == '/': + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + if not path and not params: + path = bpath + params = bparams + if not query: + query = bquery + return _coerce_result(urlunparse((scheme, netloc, path, + params, query, fragment))) + segments = bpath.split('/')[:-1] + path.split('/') + # XXX The stuff below is bogus in various ways... + if segments[-1] == '.': + segments[-1] = '' + while '.' in segments: + segments.remove('.') + while 1: + i = 1 + n = len(segments) - 1 + while i < n: + if (segments[i] == '..' + and segments[i-1] not in ('', '..')): + del segments[i-1:i+1] + break + i = i+1 + else: + break + if segments == ['', '..']: + segments[-1] = '' + elif len(segments) >= 2 and segments[-1] == '..': + segments[-2:] = [''] + return _coerce_result(urlunparse((scheme, netloc, '/'.join(segments), + params, query, fragment))) + +def urldefrag(url): + """Removes any existing fragment from URL. + + Returns a tuple of the defragmented URL and the fragment. If + the URL contained no fragments, the second element is the + empty string. + """ + url, _coerce_result = _coerce_args(url) + if '#' in url: + s, n, p, a, q, frag = urlparse(url) + defrag = urlunparse((s, n, p, a, q, '')) + else: + frag = '' + defrag = url + return _coerce_result(DefragResult(defrag, frag)) + +_hexdig = '0123456789ABCDEFabcdef' +_hextobyte = dict(((a + b).encode(), bytes([int(a + b, 16)])) + for a in _hexdig for b in _hexdig) + +def unquote_to_bytes(string): + """unquote_to_bytes('abc%20def') -> b'abc def'.""" + # Note: strings are encoded as UTF-8. This is only an issue if it contains + # unescaped non-ASCII characters, which URIs should not. + if not string: + # Is it a string-like object? + string.split + return bytes(b'') + if isinstance(string, str): + string = string.encode('utf-8') + ### For Python-Future: + # It is already a byte-string object, but force it to be newbytes here on + # Py2: + string = bytes(string) + ### + bits = string.split(b'%') + if len(bits) == 1: + return string + res = [bits[0]] + append = res.append + for item in bits[1:]: + try: + append(_hextobyte[item[:2]]) + append(item[2:]) + except KeyError: + append(b'%') + append(item) + return bytes(b'').join(res) + +_asciire = re.compile('([\x00-\x7f]+)') + +def unquote(string, encoding='utf-8', errors='replace'): + """Replace %xx escapes by their single-character equivalent. The optional + encoding and errors parameters specify how to decode percent-encoded + sequences into Unicode characters, as accepted by the bytes.decode() + method. + By default, percent-encoded sequences are decoded with UTF-8, and invalid + sequences are replaced by a placeholder character. + + unquote('abc%20def') -> 'abc def'. + """ + if '%' not in string: + string.split + return string + if encoding is None: + encoding = 'utf-8' + if errors is None: + errors = 'replace' + bits = _asciire.split(string) + res = [bits[0]] + append = res.append + for i in range(1, len(bits), 2): + append(unquote_to_bytes(bits[i]).decode(encoding, errors)) + append(bits[i + 1]) + return ''.join(res) + +def parse_qs(qs, keep_blank_values=False, strict_parsing=False, + encoding='utf-8', errors='replace'): + """Parse a query given as a string argument. + + Arguments: + + qs: percent-encoded query string to be parsed + + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. + + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. + + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + """ + parsed_result = {} + pairs = parse_qsl(qs, keep_blank_values, strict_parsing, + encoding=encoding, errors=errors) + for name, value in pairs: + if name in parsed_result: + parsed_result[name].append(value) + else: + parsed_result[name] = [value] + return parsed_result + +def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, + encoding='utf-8', errors='replace'): + """Parse a query given as a string argument. + + Arguments: + + qs: percent-encoded query string to be parsed + + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. A + true value indicates that blanks should be retained as blank + strings. The default false value indicates that blank values + are to be ignored and treated as if they were not included. + + strict_parsing: flag indicating what to do with parsing errors. If + false (the default), errors are silently ignored. If true, + errors raise a ValueError exception. + + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + + Returns a list, as G-d intended. + """ + qs, _coerce_result = _coerce_args(qs) + pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] + r = [] + for name_value in pairs: + if not name_value and not strict_parsing: + continue + nv = name_value.split('=', 1) + if len(nv) != 2: + if strict_parsing: + raise ValueError("bad query field: %r" % (name_value,)) + # Handle case of a control-name with no equal sign + if keep_blank_values: + nv.append('') + else: + continue + if len(nv[1]) or keep_blank_values: + name = nv[0].replace('+', ' ') + name = unquote(name, encoding=encoding, errors=errors) + name = _coerce_result(name) + value = nv[1].replace('+', ' ') + value = unquote(value, encoding=encoding, errors=errors) + value = _coerce_result(value) + r.append((name, value)) + return r + +def unquote_plus(string, encoding='utf-8', errors='replace'): + """Like unquote(), but also replace plus signs by spaces, as required for + unquoting HTML form values. + + unquote_plus('%7e/abc+def') -> '~/abc def' + """ + string = string.replace('+', ' ') + return unquote(string, encoding, errors) + +_ALWAYS_SAFE = frozenset(bytes(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + b'abcdefghijklmnopqrstuvwxyz' + b'0123456789' + b'_.-')) +_ALWAYS_SAFE_BYTES = bytes(_ALWAYS_SAFE) +_safe_quoters = {} + +class Quoter(collections.defaultdict): + """A mapping from bytes (in range(0,256)) to strings. + + String values are percent-encoded byte values, unless the key < 128, and + in the "safe" set (either the specified safe set, or default set). + """ + # Keeps a cache internally, using defaultdict, for efficiency (lookups + # of cached keys don't call Python code at all). + def __init__(self, safe): + """safe: bytes object.""" + self.safe = _ALWAYS_SAFE.union(bytes(safe)) + + def __repr__(self): + # Without this, will just display as a defaultdict + return "" % dict(self) + + def __missing__(self, b): + # Handle a cache miss. Store quoted string in cache and return. + res = chr(b) if b in self.safe else '%{0:02X}'.format(b) + self[b] = res + return res + +def quote(string, safe='/', encoding=None, errors=None): + """quote('abc def') -> 'abc%20def' + + Each part of a URL, e.g. the path info, the query, etc., has a + different set of reserved characters that must be quoted. + + RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists + the following reserved characters. + + reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | + "$" | "," + + Each of these characters is reserved in some component of a URL, + but not necessarily in all of them. + + By default, the quote function is intended for quoting the path + section of a URL. Thus, it will not encode '/'. This character + is reserved, but in typical usage the quote function is being + called on a path where the existing slash characters are used as + reserved characters. + + string and safe may be either str or bytes objects. encoding must + not be specified if string is a str. + + The optional encoding and errors parameters specify how to deal with + non-ASCII characters, as accepted by the str.encode method. + By default, encoding='utf-8' (characters are encoded with UTF-8), and + errors='strict' (unsupported characters raise a UnicodeEncodeError). + """ + if isinstance(string, str): + if not string: + return string + if encoding is None: + encoding = 'utf-8' + if errors is None: + errors = 'strict' + string = string.encode(encoding, errors) + else: + if encoding is not None: + raise TypeError("quote() doesn't support 'encoding' for bytes") + if errors is not None: + raise TypeError("quote() doesn't support 'errors' for bytes") + return quote_from_bytes(string, safe) + +def quote_plus(string, safe='', encoding=None, errors=None): + """Like quote(), but also replace ' ' with '+', as required for quoting + HTML form values. Plus signs in the original string are escaped unless + they are included in safe. It also does not have safe default to '/'. + """ + # Check if ' ' in string, where string may either be a str or bytes. If + # there are no spaces, the regular quote will produce the right answer. + if ((isinstance(string, str) and ' ' not in string) or + (isinstance(string, bytes) and b' ' not in string)): + return quote(string, safe, encoding, errors) + if isinstance(safe, str): + space = str(' ') + else: + space = bytes(b' ') + string = quote(string, safe + space, encoding, errors) + return string.replace(' ', '+') + +def quote_from_bytes(bs, safe='/'): + """Like quote(), but accepts a bytes object rather than a str, and does + not perform string-to-bytes encoding. It always returns an ASCII string. + quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f' + """ + if not isinstance(bs, (bytes, bytearray)): + raise TypeError("quote_from_bytes() expected bytes") + if not bs: + return str('') + ### For Python-Future: + bs = bytes(bs) + ### + if isinstance(safe, str): + # Normalize 'safe' by converting to bytes and removing non-ASCII chars + safe = str(safe).encode('ascii', 'ignore') + else: + ### For Python-Future: + safe = bytes(safe) + ### + safe = bytes([c for c in safe if c < 128]) + if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): + return bs.decode() + try: + quoter = _safe_quoters[safe] + except KeyError: + _safe_quoters[safe] = quoter = Quoter(safe).__getitem__ + return str('').join([quoter(char) for char in bs]) + +def urlencode(query, doseq=False, safe='', encoding=None, errors=None): + """Encode a sequence of two-element tuples or dictionary into a URL query string. + + If any values in the query arg are sequences and doseq is true, each + sequence element is converted to a separate parameter. + + If the query arg is a sequence of two-element tuples, the order of the + parameters in the output will match the order of parameters in the + input. + + The query arg may be either a string or a bytes type. When query arg is a + string, the safe, encoding and error parameters are sent the quote_plus for + encoding. + """ + + if hasattr(query, "items"): + query = query.items() + else: + # It's a bother at times that strings and string-like objects are + # sequences. + try: + # non-sequence items should not work with len() + # non-empty strings will fail this + if len(query) and not isinstance(query[0], tuple): + raise TypeError + # Zero-length sequences of all types will get here and succeed, + # but that's a minor nit. Since the original implementation + # allowed empty dicts that type of behavior probably should be + # preserved for consistency + except TypeError: + ty, va, tb = sys.exc_info() + raise_with_traceback(TypeError("not a valid non-string sequence " + "or mapping object"), tb) + + l = [] + if not doseq: + for k, v in query: + if isinstance(k, bytes): + k = quote_plus(k, safe) + else: + k = quote_plus(str(k), safe, encoding, errors) + + if isinstance(v, bytes): + v = quote_plus(v, safe) + else: + v = quote_plus(str(v), safe, encoding, errors) + l.append(k + '=' + v) + else: + for k, v in query: + if isinstance(k, bytes): + k = quote_plus(k, safe) + else: + k = quote_plus(str(k), safe, encoding, errors) + + if isinstance(v, bytes): + v = quote_plus(v, safe) + l.append(k + '=' + v) + elif isinstance(v, str): + v = quote_plus(v, safe, encoding, errors) + l.append(k + '=' + v) + else: + try: + # Is this a sufficient test for sequence-ness? + x = len(v) + except TypeError: + # not a sequence + v = quote_plus(str(v), safe, encoding, errors) + l.append(k + '=' + v) + else: + # loop over the sequence + for elt in v: + if isinstance(elt, bytes): + elt = quote_plus(elt, safe) + else: + elt = quote_plus(str(elt), safe, encoding, errors) + l.append(k + '=' + elt) + return str('&').join(l) + +# Utilities to parse URLs (most of these return None for missing parts): +# unwrap('') --> 'type://host/path' +# splittype('type:opaquestring') --> 'type', 'opaquestring' +# splithost('//host[:port]/path') --> 'host[:port]', '/path' +# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]' +# splitpasswd('user:passwd') -> 'user', 'passwd' +# splitport('host:port') --> 'host', 'port' +# splitquery('/path?query') --> '/path', 'query' +# splittag('/path#tag') --> '/path', 'tag' +# splitattr('/path;attr1=value1;attr2=value2;...') -> +# '/path', ['attr1=value1', 'attr2=value2', ...] +# splitvalue('attr=value') --> 'attr', 'value' +# urllib.parse.unquote('abc%20def') -> 'abc def' +# quote('abc def') -> 'abc%20def') + +def to_bytes(url): + """to_bytes(u"URL") --> 'URL'.""" + # Most URL schemes require ASCII. If that changes, the conversion + # can be relaxed. + # XXX get rid of to_bytes() + if isinstance(url, str): + try: + url = url.encode("ASCII").decode() + except UnicodeError: + raise UnicodeError("URL " + repr(url) + + " contains non-ASCII characters") + return url + +def unwrap(url): + """unwrap('') --> 'type://host/path'.""" + url = str(url).strip() + if url[:1] == '<' and url[-1:] == '>': + url = url[1:-1].strip() + if url[:4] == 'URL:': url = url[4:].strip() + return url + +_typeprog = None +def splittype(url): + """splittype('type:opaquestring') --> 'type', 'opaquestring'.""" + global _typeprog + if _typeprog is None: + import re + _typeprog = re.compile('^([^/:]+):') + + match = _typeprog.match(url) + if match: + scheme = match.group(1) + return scheme.lower(), url[len(scheme) + 1:] + return None, url + +_hostprog = None +def splithost(url): + """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" + global _hostprog + if _hostprog is None: + import re + _hostprog = re.compile('^//([^/?]*)(.*)$') + + match = _hostprog.match(url) + if match: + host_port = match.group(1) + path = match.group(2) + if path and not path.startswith('/'): + path = '/' + path + return host_port, path + return None, url + +_userprog = None +def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +_passwdprog = None +def splitpasswd(user): + """splitpasswd('user:passwd') -> 'user', 'passwd'.""" + global _passwdprog + if _passwdprog is None: + import re + _passwdprog = re.compile('^([^:]*):(.*)$',re.S) + + match = _passwdprog.match(user) + if match: return match.group(1, 2) + return user, None + +# splittag('/path#tag') --> '/path', 'tag' +_portprog = None +def splitport(host): + """splitport('host:port') --> 'host', 'port'.""" + global _portprog + if _portprog is None: + import re + _portprog = re.compile('^(.*):([0-9]+)$') + + match = _portprog.match(host) + if match: return match.group(1, 2) + return host, None + +_nportprog = None +def splitnport(host, defport=-1): + """Split host and port, returning numeric port. + Return given default port if no ':' found; defaults to -1. + Return numerical port if a valid number are found after ':'. + Return None if ':' but not a valid number.""" + global _nportprog + if _nportprog is None: + import re + _nportprog = re.compile('^(.*):(.*)$') + + match = _nportprog.match(host) + if match: + host, port = match.group(1, 2) + try: + if not port: raise ValueError("no digits") + nport = int(port) + except ValueError: + nport = None + return host, nport + return host, defport + +_queryprog = None +def splitquery(url): + """splitquery('/path?query') --> '/path', 'query'.""" + global _queryprog + if _queryprog is None: + import re + _queryprog = re.compile('^(.*)\?([^?]*)$') + + match = _queryprog.match(url) + if match: return match.group(1, 2) + return url, None + +_tagprog = None +def splittag(url): + """splittag('/path#tag') --> '/path', 'tag'.""" + global _tagprog + if _tagprog is None: + import re + _tagprog = re.compile('^(.*)#([^#]*)$') + + match = _tagprog.match(url) + if match: return match.group(1, 2) + return url, None + +def splitattr(url): + """splitattr('/path;attr1=value1;attr2=value2;...') -> + '/path', ['attr1=value1', 'attr2=value2', ...].""" + words = url.split(';') + return words[0], words[1:] + +_valueprog = None +def splitvalue(attr): + """splitvalue('attr=value') --> 'attr', 'value'.""" + global _valueprog + if _valueprog is None: + import re + _valueprog = re.compile('^([^=]*)=(.*)$') + + match = _valueprog.match(attr) + if match: return match.group(1, 2) + return attr, None diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/request.py b/minor_project/lib/python3.6/site-packages/future/backports/urllib/request.py new file mode 100644 index 0000000..baee540 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/urllib/request.py @@ -0,0 +1,2647 @@ +""" +Ported using Python-Future from the Python 3.3 standard library. + +An extensible library for opening URLs using a variety of protocols + +The simplest way to use this module is to call the urlopen function, +which accepts a string containing a URL or a Request object (described +below). It opens the URL and returns the results as file-like +object; the returned object has some extra methods described below. + +The OpenerDirector manages a collection of Handler objects that do +all the actual work. Each Handler implements a particular protocol or +option. The OpenerDirector is a composite object that invokes the +Handlers needed to open the requested URL. For example, the +HTTPHandler performs HTTP GET and POST requests and deals with +non-error returns. The HTTPRedirectHandler automatically deals with +HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler +deals with digest authentication. + +urlopen(url, data=None) -- Basic usage is the same as original +urllib. pass the url and optionally data to post to an HTTP URL, and +get a file-like object back. One difference is that you can also pass +a Request instance instead of URL. Raises a URLError (subclass of +IOError); for HTTP errors, raises an HTTPError, which can also be +treated as a valid response. + +build_opener -- Function that creates a new OpenerDirector instance. +Will install the default handlers. Accepts one or more Handlers as +arguments, either instances or Handler classes that it will +instantiate. If one of the argument is a subclass of the default +handler, the argument will be installed instead of the default. + +install_opener -- Installs a new opener as the default opener. + +objects of interest: + +OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages +the Handler classes, while dealing with requests and responses. + +Request -- An object that encapsulates the state of a request. The +state can be as simple as the URL. It can also include extra HTTP +headers, e.g. a User-Agent. + +BaseHandler -- + +internals: +BaseHandler and parent +_call_chain conventions + +Example usage: + +import urllib.request + +# set up authentication info +authinfo = urllib.request.HTTPBasicAuthHandler() +authinfo.add_password(realm='PDQ Application', + uri='https://mahler:8092/site-updates.py', + user='klem', + passwd='geheim$parole') + +proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) + +# build a new opener that adds authentication and caching FTP handlers +opener = urllib.request.build_opener(proxy_support, authinfo, + urllib.request.CacheFTPHandler) + +# install it +urllib.request.install_opener(opener) + +f = urllib.request.urlopen('http://www.python.org/') +""" + +# XXX issues: +# If an authentication error handler that tries to perform +# authentication for some reason but fails, how should the error be +# signalled? The client needs to know the HTTP error code. But if +# the handler knows that the problem was, e.g., that it didn't know +# that hash algo that requested in the challenge, it would be good to +# pass that information along to the client, too. +# ftp errors aren't handled cleanly +# check digest against correct (i.e. non-apache) implementation + +# Possible extensions: +# complex proxies XXX not sure what exactly was meant by this +# abstract factory for opener + +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import bytes, dict, filter, input, int, map, open, str +from future.utils import PY2, PY3, raise_with_traceback + +import base64 +import bisect +import hashlib +import array + +from future.backports import email +from future.backports.http import client as http_client +from .error import URLError, HTTPError, ContentTooShortError +from .parse import ( + urlparse, urlsplit, urljoin, unwrap, quote, unquote, + splittype, splithost, splitport, splituser, splitpasswd, + splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) +from .response import addinfourl, addclosehook + +import io +import os +import posixpath +import re +import socket +import sys +import time +import tempfile +import contextlib +import warnings + +from future.utils import PY2 + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +# check for SSL +try: + import ssl + # Not available in the SSL module in Py2: + from ssl import SSLContext +except ImportError: + _have_ssl = False +else: + _have_ssl = True + +__all__ = [ + # Classes + 'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler', + 'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler', + 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', + 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', + 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', + 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', + 'UnknownHandler', 'HTTPErrorProcessor', + # Functions + 'urlopen', 'install_opener', 'build_opener', + 'pathname2url', 'url2pathname', 'getproxies', + # Legacy interface + 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', +] + +# used in User-Agent header sent +__version__ = sys.version[:3] + +_opener = None +def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs): + if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault'] + else: cadefault = False + if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath'] + else: capath = None + if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile'] + else: cafile = None + global _opener + if cafile or capath or cadefault: + if not _have_ssl: + raise ValueError('SSL support not available') + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + context.verify_mode = ssl.CERT_REQUIRED + if cafile or capath: + context.load_verify_locations(cafile, capath) + else: + context.set_default_verify_paths() + https_handler = HTTPSHandler(context=context, check_hostname=True) + opener = build_opener(https_handler) + elif _opener is None: + _opener = opener = build_opener() + else: + opener = _opener + return opener.open(url, data, timeout) + +def install_opener(opener): + global _opener + _opener = opener + +_url_tempfiles = [] +def urlretrieve(url, filename=None, reporthook=None, data=None): + """ + Retrieve a URL into a temporary location on disk. + + Requires a URL argument. If a filename is passed, it is used as + the temporary file location. The reporthook argument should be + a callable that accepts a block number, a read size, and the + total file size of the URL target. The data argument should be + valid URL encoded data. + + If a filename is passed and the URL points to a local resource, + the result is a copy from local file to new file. + + Returns a tuple containing the path to the newly created + data file as well as the resulting HTTPMessage object. + """ + url_type, path = splittype(url) + + with contextlib.closing(urlopen(url, data)) as fp: + headers = fp.info() + + # Just return the local path and the "headers" for file:// + # URLs. No sense in performing a copy unless requested. + if url_type == "file" and not filename: + return os.path.normpath(path), headers + + # Handle temporary file setup. + if filename: + tfp = open(filename, 'wb') + else: + tfp = tempfile.NamedTemporaryFile(delete=False) + filename = tfp.name + _url_tempfiles.append(filename) + + with tfp: + result = filename, headers + bs = 1024*8 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + + if reporthook: + reporthook(blocknum, bs, size) + + while True: + block = fp.read(bs) + if not block: + break + read += len(block) + tfp.write(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, bs, size) + + if size >= 0 and read < size: + raise ContentTooShortError( + "retrieval incomplete: got only %i out of %i bytes" + % (read, size), result) + + return result + +def urlcleanup(): + for temp_file in _url_tempfiles: + try: + os.unlink(temp_file) + except EnvironmentError: + pass + + del _url_tempfiles[:] + global _opener + if _opener: + _opener = None + +if PY3: + _cut_port_re = re.compile(r":\d+$", re.ASCII) +else: + _cut_port_re = re.compile(r":\d+$") + +def request_host(request): + + """Return request-host, as defined by RFC 2965. + + Variation from RFC: returned value is lowercased, for convenient + comparison. + + """ + url = request.full_url + host = urlparse(url)[1] + if host == "": + host = request.get_header("Host", "") + + # remove port, if present + host = _cut_port_re.sub("", host, 1) + return host.lower() + +class Request(object): + + def __init__(self, url, data=None, headers={}, + origin_req_host=None, unverifiable=False, + method=None): + # unwrap('') --> 'type://host/path' + self.full_url = unwrap(url) + self.full_url, self.fragment = splittag(self.full_url) + self.data = data + self.headers = {} + self._tunnel_host = None + for key, value in headers.items(): + self.add_header(key, value) + self.unredirected_hdrs = {} + if origin_req_host is None: + origin_req_host = request_host(self) + self.origin_req_host = origin_req_host + self.unverifiable = unverifiable + self.method = method + self._parse() + + def _parse(self): + self.type, rest = splittype(self.full_url) + if self.type is None: + raise ValueError("unknown url type: %r" % self.full_url) + self.host, self.selector = splithost(rest) + if self.host: + self.host = unquote(self.host) + + def get_method(self): + """Return a string indicating the HTTP request method.""" + if self.method is not None: + return self.method + elif self.data is not None: + return "POST" + else: + return "GET" + + def get_full_url(self): + if self.fragment: + return '%s#%s' % (self.full_url, self.fragment) + else: + return self.full_url + + # Begin deprecated methods + + def add_data(self, data): + msg = "Request.add_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + self.data = data + + def has_data(self): + msg = "Request.has_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.data is not None + + def get_data(self): + msg = "Request.get_data method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.data + + def get_type(self): + msg = "Request.get_type method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.type + + def get_host(self): + msg = "Request.get_host method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.host + + def get_selector(self): + msg = "Request.get_selector method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.selector + + def is_unverifiable(self): + msg = "Request.is_unverifiable method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.unverifiable + + def get_origin_req_host(self): + msg = "Request.get_origin_req_host method is deprecated." + warnings.warn(msg, DeprecationWarning, stacklevel=1) + return self.origin_req_host + + # End deprecated methods + + def set_proxy(self, host, type): + if self.type == 'https' and not self._tunnel_host: + self._tunnel_host = self.host + else: + self.type= type + self.selector = self.full_url + self.host = host + + def has_proxy(self): + return self.selector == self.full_url + + def add_header(self, key, val): + # useful for something like authentication + self.headers[key.capitalize()] = val + + def add_unredirected_header(self, key, val): + # will not be added to a redirected request + self.unredirected_hdrs[key.capitalize()] = val + + def has_header(self, header_name): + return (header_name in self.headers or + header_name in self.unredirected_hdrs) + + def get_header(self, header_name, default=None): + return self.headers.get( + header_name, + self.unredirected_hdrs.get(header_name, default)) + + def header_items(self): + hdrs = self.unredirected_hdrs.copy() + hdrs.update(self.headers) + return list(hdrs.items()) + +class OpenerDirector(object): + def __init__(self): + client_version = "Python-urllib/%s" % __version__ + self.addheaders = [('User-agent', client_version)] + # self.handlers is retained only for backward compatibility + self.handlers = [] + # manage the individual handlers + self.handle_open = {} + self.handle_error = {} + self.process_response = {} + self.process_request = {} + + def add_handler(self, handler): + if not hasattr(handler, "add_parent"): + raise TypeError("expected BaseHandler instance, got %r" % + type(handler)) + + added = False + for meth in dir(handler): + if meth in ["redirect_request", "do_open", "proxy_open"]: + # oops, coincidental match + continue + + i = meth.find("_") + protocol = meth[:i] + condition = meth[i+1:] + + if condition.startswith("error"): + j = condition.find("_") + i + 1 + kind = meth[j+1:] + try: + kind = int(kind) + except ValueError: + pass + lookup = self.handle_error.get(protocol, {}) + self.handle_error[protocol] = lookup + elif condition == "open": + kind = protocol + lookup = self.handle_open + elif condition == "response": + kind = protocol + lookup = self.process_response + elif condition == "request": + kind = protocol + lookup = self.process_request + else: + continue + + handlers = lookup.setdefault(kind, []) + if handlers: + bisect.insort(handlers, handler) + else: + handlers.append(handler) + added = True + + if added: + bisect.insort(self.handlers, handler) + handler.add_parent(self) + + def close(self): + # Only exists for backwards compatibility. + pass + + def _call_chain(self, chain, kind, meth_name, *args): + # Handlers raise an exception if no one else should try to handle + # the request, or return None if they can't but another handler + # could. Otherwise, they return the response. + handlers = chain.get(kind, ()) + for handler in handlers: + func = getattr(handler, meth_name) + result = func(*args) + if result is not None: + return result + + def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): + """ + Accept a URL or a Request object + + Python-Future: if the URL is passed as a byte-string, decode it first. + """ + if isinstance(fullurl, bytes): + fullurl = fullurl.decode() + if isinstance(fullurl, str): + req = Request(fullurl, data) + else: + req = fullurl + if data is not None: + req.data = data + + req.timeout = timeout + protocol = req.type + + # pre-process request + meth_name = protocol+"_request" + for processor in self.process_request.get(protocol, []): + meth = getattr(processor, meth_name) + req = meth(req) + + response = self._open(req, data) + + # post-process response + meth_name = protocol+"_response" + for processor in self.process_response.get(protocol, []): + meth = getattr(processor, meth_name) + response = meth(req, response) + + return response + + def _open(self, req, data=None): + result = self._call_chain(self.handle_open, 'default', + 'default_open', req) + if result: + return result + + protocol = req.type + result = self._call_chain(self.handle_open, protocol, protocol + + '_open', req) + if result: + return result + + return self._call_chain(self.handle_open, 'unknown', + 'unknown_open', req) + + def error(self, proto, *args): + if proto in ('http', 'https'): + # XXX http[s] protocols are special-cased + dict = self.handle_error['http'] # https is not different than http + proto = args[2] # YUCK! + meth_name = 'http_error_%s' % proto + http_err = 1 + orig_args = args + else: + dict = self.handle_error + meth_name = proto + '_error' + http_err = 0 + args = (dict, proto, meth_name) + args + result = self._call_chain(*args) + if result: + return result + + if http_err: + args = (dict, 'default', 'http_error_default') + orig_args + return self._call_chain(*args) + +# XXX probably also want an abstract factory that knows when it makes +# sense to skip a superclass in favor of a subclass and when it might +# make sense to include both + +def build_opener(*handlers): + """Create an opener object from a list of handlers. + + The opener will use several default handlers, including support + for HTTP, FTP and when applicable HTTPS. + + If any of the handlers passed as arguments are subclasses of the + default handlers, the default handlers will not be used. + """ + def isclass(obj): + return isinstance(obj, type) or hasattr(obj, "__bases__") + + opener = OpenerDirector() + default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, + HTTPDefaultErrorHandler, HTTPRedirectHandler, + FTPHandler, FileHandler, HTTPErrorProcessor] + if hasattr(http_client, "HTTPSConnection"): + default_classes.append(HTTPSHandler) + skip = set() + for klass in default_classes: + for check in handlers: + if isclass(check): + if issubclass(check, klass): + skip.add(klass) + elif isinstance(check, klass): + skip.add(klass) + for klass in skip: + default_classes.remove(klass) + + for klass in default_classes: + opener.add_handler(klass()) + + for h in handlers: + if isclass(h): + h = h() + opener.add_handler(h) + return opener + +class BaseHandler(object): + handler_order = 500 + + def add_parent(self, parent): + self.parent = parent + + def close(self): + # Only exists for backwards compatibility + pass + + def __lt__(self, other): + if not hasattr(other, "handler_order"): + # Try to preserve the old behavior of having custom classes + # inserted after default ones (works only for custom user + # classes which are not aware of handler_order). + return True + return self.handler_order < other.handler_order + + +class HTTPErrorProcessor(BaseHandler): + """Process HTTP error responses.""" + handler_order = 1000 # after all other processing + + def http_response(self, request, response): + code, msg, hdrs = response.code, response.msg, response.info() + + # According to RFC 2616, "2xx" code indicates that the client's + # request was successfully received, understood, and accepted. + if not (200 <= code < 300): + response = self.parent.error( + 'http', request, response, code, msg, hdrs) + + return response + + https_response = http_response + +class HTTPDefaultErrorHandler(BaseHandler): + def http_error_default(self, req, fp, code, msg, hdrs): + raise HTTPError(req.full_url, code, msg, hdrs, fp) + +class HTTPRedirectHandler(BaseHandler): + # maximum number of redirections to any single URL + # this is needed because of the state that cookies introduce + max_repeats = 4 + # maximum total number of redirections (regardless of URL) before + # assuming we're in a loop + max_redirections = 10 + + def redirect_request(self, req, fp, code, msg, headers, newurl): + """Return a Request or None in response to a redirect. + + This is called by the http_error_30x methods when a + redirection response is received. If a redirection should + take place, return a new Request to allow http_error_30x to + perform the redirect. Otherwise, raise HTTPError if no-one + else should try to handle this url. Return None if you can't + but another Handler might. + """ + m = req.get_method() + if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD") + or code in (301, 302, 303) and m == "POST")): + raise HTTPError(req.full_url, code, msg, headers, fp) + + # Strictly (according to RFC 2616), 301 or 302 in response to + # a POST MUST NOT cause a redirection without confirmation + # from the user (of urllib.request, in this case). In practice, + # essentially all clients do redirect in this case, so we do + # the same. + # be conciliant with URIs containing a space + newurl = newurl.replace(' ', '%20') + CONTENT_HEADERS = ("content-length", "content-type") + newheaders = dict((k, v) for k, v in req.headers.items() + if k.lower() not in CONTENT_HEADERS) + return Request(newurl, + headers=newheaders, + origin_req_host=req.origin_req_host, + unverifiable=True) + + # Implementation note: To avoid the server sending us into an + # infinite loop, the request object needs to track what URLs we + # have already seen. Do this by adding a handler-specific + # attribute to the Request object. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + if "location" in headers: + newurl = headers["location"] + elif "uri" in headers: + newurl = headers["uri"] + else: + return + + # fix a possible malformed URL + urlparts = urlparse(newurl) + + # For security reasons we don't allow redirection to anything other + # than http, https or ftp. + + if urlparts.scheme not in ('http', 'https', 'ftp', ''): + raise HTTPError( + newurl, code, + "%s - Redirection to url '%s' is not allowed" % (msg, newurl), + headers, fp) + + if not urlparts.path: + urlparts = list(urlparts) + urlparts[2] = "/" + newurl = urlunparse(urlparts) + + newurl = urljoin(req.full_url, newurl) + + # XXX Probably want to forget about the state of the current + # request, although that might interact poorly with other + # handlers that also use handler-specific request attributes + new = self.redirect_request(req, fp, code, msg, headers, newurl) + if new is None: + return + + # loop detection + # .redirect_dict has a key url if url was previously visited. + if hasattr(req, 'redirect_dict'): + visited = new.redirect_dict = req.redirect_dict + if (visited.get(newurl, 0) >= self.max_repeats or + len(visited) >= self.max_redirections): + raise HTTPError(req.full_url, code, + self.inf_msg + msg, headers, fp) + else: + visited = new.redirect_dict = req.redirect_dict = {} + visited[newurl] = visited.get(newurl, 0) + 1 + + # Don't close the fp until we are sure that we won't use it + # with HTTPError. + fp.read() + fp.close() + + return self.parent.open(new, timeout=req.timeout) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + + inf_msg = "The HTTP server returned a redirect error that would " \ + "lead to an infinite loop.\n" \ + "The last 30x error message was:\n" + + +def _parse_proxy(proxy): + """Return (scheme, user, password, host/port) given a URL or an authority. + + If a URL is supplied, it must have an authority (host:port) component. + According to RFC 3986, having an authority component means the URL must + have two slashes after the scheme: + + >>> _parse_proxy('file:/ftp.example.com/') + Traceback (most recent call last): + ValueError: proxy URL with no authority: 'file:/ftp.example.com/' + + The first three items of the returned tuple may be None. + + Examples of authority parsing: + + >>> _parse_proxy('proxy.example.com') + (None, None, None, 'proxy.example.com') + >>> _parse_proxy('proxy.example.com:3128') + (None, None, None, 'proxy.example.com:3128') + + The authority component may optionally include userinfo (assumed to be + username:password): + + >>> _parse_proxy('joe:password@proxy.example.com') + (None, 'joe', 'password', 'proxy.example.com') + >>> _parse_proxy('joe:password@proxy.example.com:3128') + (None, 'joe', 'password', 'proxy.example.com:3128') + + Same examples, but with URLs instead: + + >>> _parse_proxy('http://proxy.example.com/') + ('http', None, None, 'proxy.example.com') + >>> _parse_proxy('http://proxy.example.com:3128/') + ('http', None, None, 'proxy.example.com:3128') + >>> _parse_proxy('http://joe:password@proxy.example.com/') + ('http', 'joe', 'password', 'proxy.example.com') + >>> _parse_proxy('http://joe:password@proxy.example.com:3128') + ('http', 'joe', 'password', 'proxy.example.com:3128') + + Everything after the authority is ignored: + + >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128') + ('ftp', 'joe', 'password', 'proxy.example.com') + + Test for no trailing '/' case: + + >>> _parse_proxy('http://joe:password@proxy.example.com') + ('http', 'joe', 'password', 'proxy.example.com') + + """ + scheme, r_scheme = splittype(proxy) + if not r_scheme.startswith("/"): + # authority + scheme = None + authority = proxy + else: + # URL + if not r_scheme.startswith("//"): + raise ValueError("proxy URL with no authority: %r" % proxy) + # We have an authority, so for RFC 3986-compliant URLs (by ss 3. + # and 3.3.), path is empty or starts with '/' + end = r_scheme.find("/", 2) + if end == -1: + end = None + authority = r_scheme[2:end] + userinfo, hostport = splituser(authority) + if userinfo is not None: + user, password = splitpasswd(userinfo) + else: + user = password = None + return scheme, user, password, hostport + +class ProxyHandler(BaseHandler): + # Proxies must be in front + handler_order = 100 + + def __init__(self, proxies=None): + if proxies is None: + proxies = getproxies() + assert hasattr(proxies, 'keys'), "proxies must be a mapping" + self.proxies = proxies + for type, url in proxies.items(): + setattr(self, '%s_open' % type, + lambda r, proxy=url, type=type, meth=self.proxy_open: + meth(r, proxy, type)) + + def proxy_open(self, req, proxy, type): + orig_type = req.type + proxy_type, user, password, hostport = _parse_proxy(proxy) + if proxy_type is None: + proxy_type = orig_type + + if req.host and proxy_bypass(req.host): + return None + + if user and password: + user_pass = '%s:%s' % (unquote(user), + unquote(password)) + creds = base64.b64encode(user_pass.encode()).decode("ascii") + req.add_header('Proxy-authorization', 'Basic ' + creds) + hostport = unquote(hostport) + req.set_proxy(hostport, proxy_type) + if orig_type == proxy_type or orig_type == 'https': + # let other handlers take care of it + return None + else: + # need to start over, because the other handlers don't + # grok the proxy's URL type + # e.g. if we have a constructor arg proxies like so: + # {'http': 'ftp://proxy.example.com'}, we may end up turning + # a request for http://acme.example.com/a into one for + # ftp://proxy.example.com/a + return self.parent.open(req, timeout=req.timeout) + +class HTTPPasswordMgr(object): + + def __init__(self): + self.passwd = {} + + def add_password(self, realm, uri, user, passwd): + # uri could be a single URI or a sequence + if isinstance(uri, str): + uri = [uri] + if realm not in self.passwd: + self.passwd[realm] = {} + for default_port in True, False: + reduced_uri = tuple( + [self.reduce_uri(u, default_port) for u in uri]) + self.passwd[realm][reduced_uri] = (user, passwd) + + def find_user_password(self, realm, authuri): + domains = self.passwd.get(realm, {}) + for default_port in True, False: + reduced_authuri = self.reduce_uri(authuri, default_port) + for uris, authinfo in domains.items(): + for uri in uris: + if self.is_suburi(uri, reduced_authuri): + return authinfo + return None, None + + def reduce_uri(self, uri, default_port=True): + """Accept authority or URI and extract only the authority and path.""" + # note HTTP URLs do not have a userinfo component + parts = urlsplit(uri) + if parts[1]: + # URI + scheme = parts[0] + authority = parts[1] + path = parts[2] or '/' + else: + # host or host:port + scheme = None + authority = uri + path = '/' + host, port = splitport(authority) + if default_port and port is None and scheme is not None: + dport = {"http": 80, + "https": 443, + }.get(scheme) + if dport is not None: + authority = "%s:%d" % (host, dport) + return authority, path + + def is_suburi(self, base, test): + """Check if test is below base in a URI tree + + Both args must be URIs in reduced form. + """ + if base == test: + return True + if base[0] != test[0]: + return False + common = posixpath.commonprefix((base[1], test[1])) + if len(common) == len(base[1]): + return True + return False + + +class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): + + def find_user_password(self, realm, authuri): + user, password = HTTPPasswordMgr.find_user_password(self, realm, + authuri) + if user is not None: + return user, password + return HTTPPasswordMgr.find_user_password(self, None, authuri) + + +class AbstractBasicAuthHandler(object): + + # XXX this allows for multiple auth-schemes, but will stupidly pick + # the last one with a realm specified. + + # allow for double- and single-quoted realm values + # (single quotes are a violation of the RFC, but appear in the wild) + rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+' + 'realm=(["\']?)([^"\']*)\\2', re.I) + + # XXX could pre-emptively send auth info already accepted (RFC 2617, + # end of section 2, and section 1.2 immediately after "credentials" + # production). + + def __init__(self, password_mgr=None): + if password_mgr is None: + password_mgr = HTTPPasswordMgr() + self.passwd = password_mgr + self.add_password = self.passwd.add_password + self.retried = 0 + + def reset_retry_count(self): + self.retried = 0 + + def http_error_auth_reqed(self, authreq, host, req, headers): + # host may be an authority (without userinfo) or a URL with an + # authority + # XXX could be multiple headers + authreq = headers.get(authreq, None) + + if self.retried > 5: + # retry sending the username:password 5 times before failing. + raise HTTPError(req.get_full_url(), 401, "basic auth failed", + headers, None) + else: + self.retried += 1 + + if authreq: + scheme = authreq.split()[0] + if scheme.lower() != 'basic': + raise ValueError("AbstractBasicAuthHandler does not" + " support the following scheme: '%s'" % + scheme) + else: + mo = AbstractBasicAuthHandler.rx.search(authreq) + if mo: + scheme, quote, realm = mo.groups() + if quote not in ['"',"'"]: + warnings.warn("Basic Auth Realm was unquoted", + UserWarning, 2) + if scheme.lower() == 'basic': + response = self.retry_http_basic_auth(host, req, realm) + if response and response.code != 401: + self.retried = 0 + return response + + def retry_http_basic_auth(self, host, req, realm): + user, pw = self.passwd.find_user_password(realm, host) + if pw is not None: + raw = "%s:%s" % (user, pw) + auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii") + if req.headers.get(self.auth_header, None) == auth: + return None + req.add_unredirected_header(self.auth_header, auth) + return self.parent.open(req, timeout=req.timeout) + else: + return None + + +class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + + auth_header = 'Authorization' + + def http_error_401(self, req, fp, code, msg, headers): + url = req.full_url + response = self.http_error_auth_reqed('www-authenticate', + url, req, headers) + self.reset_retry_count() + return response + + +class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + + auth_header = 'Proxy-authorization' + + def http_error_407(self, req, fp, code, msg, headers): + # http_error_auth_reqed requires that there is no userinfo component in + # authority. Assume there isn't one, since urllib.request does not (and + # should not, RFC 3986 s. 3.2.1) support requests for URLs containing + # userinfo. + authority = req.host + response = self.http_error_auth_reqed('proxy-authenticate', + authority, req, headers) + self.reset_retry_count() + return response + + +# Return n random bytes. +_randombytes = os.urandom + + +class AbstractDigestAuthHandler(object): + # Digest authentication is specified in RFC 2617. + + # XXX The client does not inspect the Authentication-Info header + # in a successful response. + + # XXX It should be possible to test this implementation against + # a mock server that just generates a static set of challenges. + + # XXX qop="auth-int" supports is shaky + + def __init__(self, passwd=None): + if passwd is None: + passwd = HTTPPasswordMgr() + self.passwd = passwd + self.add_password = self.passwd.add_password + self.retried = 0 + self.nonce_count = 0 + self.last_nonce = None + + def reset_retry_count(self): + self.retried = 0 + + def http_error_auth_reqed(self, auth_header, host, req, headers): + authreq = headers.get(auth_header, None) + if self.retried > 5: + # Don't fail endlessly - if we failed once, we'll probably + # fail a second time. Hm. Unless the Password Manager is + # prompting for the information. Crap. This isn't great + # but it's better than the current 'repeat until recursion + # depth exceeded' approach + raise HTTPError(req.full_url, 401, "digest auth failed", + headers, None) + else: + self.retried += 1 + if authreq: + scheme = authreq.split()[0] + if scheme.lower() == 'digest': + return self.retry_http_digest_auth(req, authreq) + elif scheme.lower() != 'basic': + raise ValueError("AbstractDigestAuthHandler does not support" + " the following scheme: '%s'" % scheme) + + def retry_http_digest_auth(self, req, auth): + token, challenge = auth.split(' ', 1) + chal = parse_keqv_list(filter(None, parse_http_list(challenge))) + auth = self.get_authorization(req, chal) + if auth: + auth_val = 'Digest %s' % auth + if req.headers.get(self.auth_header, None) == auth_val: + return None + req.add_unredirected_header(self.auth_header, auth_val) + resp = self.parent.open(req, timeout=req.timeout) + return resp + + def get_cnonce(self, nonce): + # The cnonce-value is an opaque + # quoted string value provided by the client and used by both client + # and server to avoid chosen plaintext attacks, to provide mutual + # authentication, and to provide some message integrity protection. + # This isn't a fabulous effort, but it's probably Good Enough. + s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime()) + b = s.encode("ascii") + _randombytes(8) + dig = hashlib.sha1(b).hexdigest() + return dig[:16] + + def get_authorization(self, req, chal): + try: + realm = chal['realm'] + nonce = chal['nonce'] + qop = chal.get('qop') + algorithm = chal.get('algorithm', 'MD5') + # mod_digest doesn't send an opaque, even though it isn't + # supposed to be optional + opaque = chal.get('opaque', None) + except KeyError: + return None + + H, KD = self.get_algorithm_impls(algorithm) + if H is None: + return None + + user, pw = self.passwd.find_user_password(realm, req.full_url) + if user is None: + return None + + # XXX not implemented yet + if req.data is not None: + entdig = self.get_entity_digest(req.data, chal) + else: + entdig = None + + A1 = "%s:%s:%s" % (user, realm, pw) + A2 = "%s:%s" % (req.get_method(), + # XXX selector: what about proxies and full urls + req.selector) + if qop == 'auth': + if nonce == self.last_nonce: + self.nonce_count += 1 + else: + self.nonce_count = 1 + self.last_nonce = nonce + ncvalue = '%08x' % self.nonce_count + cnonce = self.get_cnonce(nonce) + noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) + respdig = KD(H(A1), noncebit) + elif qop is None: + respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) + else: + # XXX handle auth-int. + raise URLError("qop '%s' is not supported." % qop) + + # XXX should the partial digests be encoded too? + + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (user, realm, nonce, req.selector, + respdig) + if opaque: + base += ', opaque="%s"' % opaque + if entdig: + base += ', digest="%s"' % entdig + base += ', algorithm="%s"' % algorithm + if qop: + base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) + return base + + def get_algorithm_impls(self, algorithm): + # lambdas assume digest modules are imported at the top level + if algorithm == 'MD5': + H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest() + elif algorithm == 'SHA': + H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest() + # XXX MD5-sess + KD = lambda s, d: H("%s:%s" % (s, d)) + return H, KD + + def get_entity_digest(self, data, chal): + # XXX not implemented yet + return None + + +class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + """An authentication protocol defined by RFC 2069 + + Digest authentication improves on basic authentication because it + does not transmit passwords in the clear. + """ + + auth_header = 'Authorization' + handler_order = 490 # before Basic auth + + def http_error_401(self, req, fp, code, msg, headers): + host = urlparse(req.full_url)[1] + retry = self.http_error_auth_reqed('www-authenticate', + host, req, headers) + self.reset_retry_count() + return retry + + +class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + + auth_header = 'Proxy-Authorization' + handler_order = 490 # before Basic auth + + def http_error_407(self, req, fp, code, msg, headers): + host = req.host + retry = self.http_error_auth_reqed('proxy-authenticate', + host, req, headers) + self.reset_retry_count() + return retry + +class AbstractHTTPHandler(BaseHandler): + + def __init__(self, debuglevel=0): + self._debuglevel = debuglevel + + def set_http_debuglevel(self, level): + self._debuglevel = level + + def do_request_(self, request): + host = request.host + if not host: + raise URLError('no host given') + + if request.data is not None: # POST + data = request.data + if isinstance(data, str): + msg = "POST data should be bytes or an iterable of bytes. " \ + "It cannot be of type str." + raise TypeError(msg) + if not request.has_header('Content-type'): + request.add_unredirected_header( + 'Content-type', + 'application/x-www-form-urlencoded') + if not request.has_header('Content-length'): + size = None + try: + ### For Python-Future: + if PY2 and isinstance(data, array.array): + # memoryviews of arrays aren't supported + # in Py2.7. (e.g. memoryview(array.array('I', + # [1, 2, 3, 4])) raises a TypeError.) + # So we calculate the size manually instead: + size = len(data) * data.itemsize + ### + else: + mv = memoryview(data) + size = len(mv) * mv.itemsize + except TypeError: + if isinstance(data, Iterable): + raise ValueError("Content-Length should be specified " + "for iterable data of type %r %r" % (type(data), + data)) + else: + request.add_unredirected_header( + 'Content-length', '%d' % size) + + sel_host = host + if request.has_proxy(): + scheme, sel = splittype(request.selector) + sel_host, sel_path = splithost(sel) + if not request.has_header('Host'): + request.add_unredirected_header('Host', sel_host) + for name, value in self.parent.addheaders: + name = name.capitalize() + if not request.has_header(name): + request.add_unredirected_header(name, value) + + return request + + def do_open(self, http_class, req, **http_conn_args): + """Return an HTTPResponse object for the request, using http_class. + + http_class must implement the HTTPConnection API from http.client. + """ + host = req.host + if not host: + raise URLError('no host given') + + # will parse host:port + h = http_class(host, timeout=req.timeout, **http_conn_args) + + headers = dict(req.unredirected_hdrs) + headers.update(dict((k, v) for k, v in req.headers.items() + if k not in headers)) + + # TODO(jhylton): Should this be redesigned to handle + # persistent connections? + + # We want to make an HTTP/1.1 request, but the addinfourl + # class isn't prepared to deal with a persistent connection. + # It will try to read all remaining data from the socket, + # which will block while the server waits for the next request. + # So make sure the connection gets closed after the (only) + # request. + headers["Connection"] = "close" + headers = dict((name.title(), val) for name, val in headers.items()) + + if req._tunnel_host: + tunnel_headers = {} + proxy_auth_hdr = "Proxy-Authorization" + if proxy_auth_hdr in headers: + tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] + # Proxy-Authorization should not be sent to origin + # server. + del headers[proxy_auth_hdr] + h.set_tunnel(req._tunnel_host, headers=tunnel_headers) + + try: + h.request(req.get_method(), req.selector, req.data, headers) + except socket.error as err: # timeout error + h.close() + raise URLError(err) + else: + r = h.getresponse() + # If the server does not send us a 'Connection: close' header, + # HTTPConnection assumes the socket should be left open. Manually + # mark the socket to be closed when this response object goes away. + if h.sock: + h.sock.close() + h.sock = None + + + r.url = req.get_full_url() + # This line replaces the .msg attribute of the HTTPResponse + # with .headers, because urllib clients expect the response to + # have the reason in .msg. It would be good to mark this + # attribute is deprecated and get then to use info() or + # .headers. + r.msg = r.reason + return r + + +class HTTPHandler(AbstractHTTPHandler): + + def http_open(self, req): + return self.do_open(http_client.HTTPConnection, req) + + http_request = AbstractHTTPHandler.do_request_ + +if hasattr(http_client, 'HTTPSConnection'): + + class HTTPSHandler(AbstractHTTPHandler): + + def __init__(self, debuglevel=0, context=None, check_hostname=None): + AbstractHTTPHandler.__init__(self, debuglevel) + self._context = context + self._check_hostname = check_hostname + + def https_open(self, req): + return self.do_open(http_client.HTTPSConnection, req, + context=self._context, check_hostname=self._check_hostname) + + https_request = AbstractHTTPHandler.do_request_ + + __all__.append('HTTPSHandler') + +class HTTPCookieProcessor(BaseHandler): + def __init__(self, cookiejar=None): + import future.backports.http.cookiejar as http_cookiejar + if cookiejar is None: + cookiejar = http_cookiejar.CookieJar() + self.cookiejar = cookiejar + + def http_request(self, request): + self.cookiejar.add_cookie_header(request) + return request + + def http_response(self, request, response): + self.cookiejar.extract_cookies(response, request) + return response + + https_request = http_request + https_response = http_response + +class UnknownHandler(BaseHandler): + def unknown_open(self, req): + type = req.type + raise URLError('unknown url type: %s' % type) + +def parse_keqv_list(l): + """Parse list of key=value strings where keys are not duplicated.""" + parsed = {} + for elt in l: + k, v = elt.split('=', 1) + if v[0] == '"' and v[-1] == '"': + v = v[1:-1] + parsed[k] = v + return parsed + +def parse_http_list(s): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Neither commas nor quotes count if they are escaped. + Only double-quotes count, not single-quotes. + """ + res = [] + part = '' + + escape = quote = False + for cur in s: + if escape: + part += cur + escape = False + continue + if quote: + if cur == '\\': + escape = True + continue + elif cur == '"': + quote = False + part += cur + continue + + if cur == ',': + res.append(part) + part = '' + continue + + if cur == '"': + quote = True + + part += cur + + # append last part + if part: + res.append(part) + + return [part.strip() for part in res] + +class FileHandler(BaseHandler): + # Use local file or FTP depending on form of URL + def file_open(self, req): + url = req.selector + if url[:2] == '//' and url[2:3] != '/' and (req.host and + req.host != 'localhost'): + if not req.host is self.get_names(): + raise URLError("file:// scheme is supported only on localhost") + else: + return self.open_local_file(req) + + # names for the localhost + names = None + def get_names(self): + if FileHandler.names is None: + try: + FileHandler.names = tuple( + socket.gethostbyname_ex('localhost')[2] + + socket.gethostbyname_ex(socket.gethostname())[2]) + except socket.gaierror: + FileHandler.names = (socket.gethostbyname('localhost'),) + return FileHandler.names + + # not entirely sure what the rules are here + def open_local_file(self, req): + import future.backports.email.utils as email_utils + import mimetypes + host = req.host + filename = req.selector + localfile = url2pathname(filename) + try: + stats = os.stat(localfile) + size = stats.st_size + modified = email_utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(filename)[0] + headers = email.message_from_string( + 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' % + (mtype or 'text/plain', size, modified)) + if host: + host, port = splitport(host) + if not host or \ + (not port and _safe_gethostbyname(host) in self.get_names()): + if host: + origurl = 'file://' + host + filename + else: + origurl = 'file://' + filename + return addinfourl(open(localfile, 'rb'), headers, origurl) + except OSError as exp: + # users shouldn't expect OSErrors coming from urlopen() + raise URLError(exp) + raise URLError('file not on local host') + +def _safe_gethostbyname(host): + try: + return socket.gethostbyname(host) + except socket.gaierror: + return None + +class FTPHandler(BaseHandler): + def ftp_open(self, req): + import ftplib + import mimetypes + host = req.host + if not host: + raise URLError('ftp error: no host given') + host, port = splitport(host) + if port is None: + port = ftplib.FTP_PORT + else: + port = int(port) + + # username/password handling + user, host = splituser(host) + if user: + user, passwd = splitpasswd(user) + else: + passwd = None + host = unquote(host) + user = user or '' + passwd = passwd or '' + + try: + host = socket.gethostbyname(host) + except socket.error as msg: + raise URLError(msg) + path, attrs = splitattr(req.selector) + dirs = path.split('/') + dirs = list(map(unquote, dirs)) + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: + dirs = dirs[1:] + try: + fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) + type = file and 'I' or 'D' + for attr in attrs: + attr, value = splitvalue(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + fp, retrlen = fw.retrfile(file, type) + headers = "" + mtype = mimetypes.guess_type(req.full_url)[0] + if mtype: + headers += "Content-type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, req.full_url) + except ftplib.all_errors as exp: + exc = URLError('ftp error: %r' % exp) + raise_with_traceback(exc) + + def connect_ftp(self, user, passwd, host, port, dirs, timeout): + return ftpwrapper(user, passwd, host, port, dirs, timeout, + persistent=False) + +class CacheFTPHandler(FTPHandler): + # XXX would be nice to have pluggable cache strategies + # XXX this stuff is definitely not thread safe + def __init__(self): + self.cache = {} + self.timeout = {} + self.soonest = 0 + self.delay = 60 + self.max_conns = 16 + + def setTimeout(self, t): + self.delay = t + + def setMaxConns(self, m): + self.max_conns = m + + def connect_ftp(self, user, passwd, host, port, dirs, timeout): + key = user, host, port, '/'.join(dirs), timeout + if key in self.cache: + self.timeout[key] = time.time() + self.delay + else: + self.cache[key] = ftpwrapper(user, passwd, host, port, + dirs, timeout) + self.timeout[key] = time.time() + self.delay + self.check_cache() + return self.cache[key] + + def check_cache(self): + # first check for old ones + t = time.time() + if self.soonest <= t: + for k, v in list(self.timeout.items()): + if v < t: + self.cache[k].close() + del self.cache[k] + del self.timeout[k] + self.soonest = min(list(self.timeout.values())) + + # then check the size + if len(self.cache) == self.max_conns: + for k, v in list(self.timeout.items()): + if v == self.soonest: + del self.cache[k] + del self.timeout[k] + break + self.soonest = min(list(self.timeout.values())) + + def clear_cache(self): + for conn in self.cache.values(): + conn.close() + self.cache.clear() + self.timeout.clear() + + +# Code move from the old urllib module + +MAXFTPCACHE = 10 # Trim the ftp cache beyond this size + +# Helper for non-unix systems +if os.name == 'nt': + from nturl2path import url2pathname, pathname2url +else: + def url2pathname(pathname): + """OS-specific conversion from a relative URL of the 'file' scheme + to a file system path; not recommended for general use.""" + return unquote(pathname) + + def pathname2url(pathname): + """OS-specific conversion from a file system path to a relative URL + of the 'file' scheme; not recommended for general use.""" + return quote(pathname) + +# This really consists of two pieces: +# (1) a class which handles opening of all sorts of URLs +# (plus assorted utilities etc.) +# (2) a set of functions for parsing URLs +# XXX Should these be separated out into different modules? + + +ftpcache = {} +class URLopener(object): + """Class to open URLs. + This is a class rather than just a subroutine because we may need + more than one set of global protocol-specific options. + Note -- this is a base class for those who don't want the + automatic handling of errors type 302 (relocated) and 401 + (authorization needed).""" + + __tempfiles = None + + version = "Python-urllib/%s" % __version__ + + # Constructor + def __init__(self, proxies=None, **x509): + msg = "%(class)s style of invoking requests is deprecated. " \ + "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} + warnings.warn(msg, DeprecationWarning, stacklevel=3) + if proxies is None: + proxies = getproxies() + assert hasattr(proxies, 'keys'), "proxies must be a mapping" + self.proxies = proxies + self.key_file = x509.get('key_file') + self.cert_file = x509.get('cert_file') + self.addheaders = [('User-Agent', self.version)] + self.__tempfiles = [] + self.__unlink = os.unlink # See cleanup() + self.tempcache = None + # Undocumented feature: if you assign {} to tempcache, + # it is used to cache files retrieved with + # self.retrieve(). This is not enabled by default + # since it does not work for changing documents (and I + # haven't got the logic to check expiration headers + # yet). + self.ftpcache = ftpcache + # Undocumented feature: you can use a different + # ftp cache by assigning to the .ftpcache member; + # in case you want logically independent URL openers + # XXX This is not threadsafe. Bah. + + def __del__(self): + self.close() + + def close(self): + self.cleanup() + + def cleanup(self): + # This code sometimes runs when the rest of this module + # has already been deleted, so it can't use any globals + # or import anything. + if self.__tempfiles: + for file in self.__tempfiles: + try: + self.__unlink(file) + except OSError: + pass + del self.__tempfiles[:] + if self.tempcache: + self.tempcache.clear() + + def addheader(self, *args): + """Add a header to be used by the HTTP interface only + e.g. u.addheader('Accept', 'sound/basic')""" + self.addheaders.append(args) + + # External interface + def open(self, fullurl, data=None): + """Use URLopener().open(file) instead of open(file, 'r').""" + fullurl = unwrap(to_bytes(fullurl)) + fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") + if self.tempcache and fullurl in self.tempcache: + filename, headers = self.tempcache[fullurl] + fp = open(filename, 'rb') + return addinfourl(fp, headers, fullurl) + urltype, url = splittype(fullurl) + if not urltype: + urltype = 'file' + if urltype in self.proxies: + proxy = self.proxies[urltype] + urltype, proxyhost = splittype(proxy) + host, selector = splithost(proxyhost) + url = (host, fullurl) # Signal special case to open_*() + else: + proxy = None + name = 'open_' + urltype + self.type = urltype + name = name.replace('-', '_') + if not hasattr(self, name): + if proxy: + return self.open_unknown_proxy(proxy, fullurl, data) + else: + return self.open_unknown(fullurl, data) + try: + if data is None: + return getattr(self, name)(url) + else: + return getattr(self, name)(url, data) + except HTTPError: + raise + except socket.error as msg: + raise_with_traceback(IOError('socket error', msg)) + + def open_unknown(self, fullurl, data=None): + """Overridable interface to open unknown URL type.""" + type, url = splittype(fullurl) + raise IOError('url error', 'unknown url type', type) + + def open_unknown_proxy(self, proxy, fullurl, data=None): + """Overridable interface to open unknown URL type.""" + type, url = splittype(fullurl) + raise IOError('url error', 'invalid proxy for %s' % type, proxy) + + # External interface + def retrieve(self, url, filename=None, reporthook=None, data=None): + """retrieve(url) returns (filename, headers) for a local object + or (tempfilename, headers) for a remote object.""" + url = unwrap(to_bytes(url)) + if self.tempcache and url in self.tempcache: + return self.tempcache[url] + type, url1 = splittype(url) + if filename is None and (not type or type == 'file'): + try: + fp = self.open_local_file(url1) + hdrs = fp.info() + fp.close() + return url2pathname(splithost(url1)[1]), hdrs + except IOError as msg: + pass + fp = self.open(url, data) + try: + headers = fp.info() + if filename: + tfp = open(filename, 'wb') + else: + import tempfile + garbage, path = splittype(url) + garbage, path = splithost(path or "") + path, garbage = splitquery(path or "") + path, garbage = splitattr(path or "") + suffix = os.path.splitext(path)[1] + (fd, filename) = tempfile.mkstemp(suffix) + self.__tempfiles.append(filename) + tfp = os.fdopen(fd, 'wb') + try: + result = filename, headers + if self.tempcache is not None: + self.tempcache[url] = result + bs = 1024*8 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, bs, size) + while 1: + block = fp.read(bs) + if not block: + break + read += len(block) + tfp.write(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, bs, size) + finally: + tfp.close() + finally: + fp.close() + + # raise exception if actual size does not match content-length header + if size >= 0 and read < size: + raise ContentTooShortError( + "retrieval incomplete: got only %i out of %i bytes" + % (read, size), result) + + return result + + # Each method named open_ knows how to open that type of URL + + def _open_generic_http(self, connection_factory, url, data): + """Make an HTTP connection using connection_class. + + This is an internal method that should be called from + open_http() or open_https(). + + Arguments: + - connection_factory should take a host name and return an + HTTPConnection instance. + - url is the url to retrieval or a host, relative-path pair. + - data is payload for a POST request or None. + """ + + user_passwd = None + proxy_passwd= None + if isinstance(url, str): + host, selector = splithost(url) + if host: + user_passwd, host = splituser(host) + host = unquote(host) + realhost = host + else: + host, selector = url + # check whether the proxy contains authorization information + proxy_passwd, host = splituser(host) + # now we proceed with the url we want to obtain + urltype, rest = splittype(selector) + url = rest + user_passwd = None + if urltype.lower() != 'http': + realhost = None + else: + realhost, rest = splithost(rest) + if realhost: + user_passwd, realhost = splituser(realhost) + if user_passwd: + selector = "%s://%s%s" % (urltype, realhost, rest) + if proxy_bypass(realhost): + host = realhost + + if not host: raise IOError('http error', 'no host given') + + if proxy_passwd: + proxy_passwd = unquote(proxy_passwd) + proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') + else: + proxy_auth = None + + if user_passwd: + user_passwd = unquote(user_passwd) + auth = base64.b64encode(user_passwd.encode()).decode('ascii') + else: + auth = None + http_conn = connection_factory(host) + headers = {} + if proxy_auth: + headers["Proxy-Authorization"] = "Basic %s" % proxy_auth + if auth: + headers["Authorization"] = "Basic %s" % auth + if realhost: + headers["Host"] = realhost + + # Add Connection:close as we don't support persistent connections yet. + # This helps in closing the socket and avoiding ResourceWarning + + headers["Connection"] = "close" + + for header, value in self.addheaders: + headers[header] = value + + if data is not None: + headers["Content-Type"] = "application/x-www-form-urlencoded" + http_conn.request("POST", selector, data, headers) + else: + http_conn.request("GET", selector, headers=headers) + + try: + response = http_conn.getresponse() + except http_client.BadStatusLine: + # something went wrong with the HTTP status line + raise URLError("http protocol error: bad status line") + + # According to RFC 2616, "2xx" code indicates that the client's + # request was successfully received, understood, and accepted. + if 200 <= response.status < 300: + return addinfourl(response, response.msg, "http:" + url, + response.status) + else: + return self.http_error( + url, response.fp, + response.status, response.reason, response.msg, data) + + def open_http(self, url, data=None): + """Use HTTP protocol.""" + return self._open_generic_http(http_client.HTTPConnection, url, data) + + def http_error(self, url, fp, errcode, errmsg, headers, data=None): + """Handle http errors. + + Derived class can override this, or provide specific handlers + named http_error_DDD where DDD is the 3-digit error code.""" + # First check if there's a specific handler for this error + name = 'http_error_%d' % errcode + if hasattr(self, name): + method = getattr(self, name) + if data is None: + result = method(url, fp, errcode, errmsg, headers) + else: + result = method(url, fp, errcode, errmsg, headers, data) + if result: return result + return self.http_error_default(url, fp, errcode, errmsg, headers) + + def http_error_default(self, url, fp, errcode, errmsg, headers): + """Default error handler: close the connection and raise IOError.""" + fp.close() + raise HTTPError(url, errcode, errmsg, headers, None) + + if _have_ssl: + def _https_connection(self, host): + return http_client.HTTPSConnection(host, + key_file=self.key_file, + cert_file=self.cert_file) + + def open_https(self, url, data=None): + """Use HTTPS protocol.""" + return self._open_generic_http(self._https_connection, url, data) + + def open_file(self, url): + """Use local file or FTP depending on form of URL.""" + if not isinstance(url, str): + raise URLError('file error: proxy support for file protocol currently not implemented') + if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': + raise ValueError("file:// scheme is supported only on localhost") + else: + return self.open_local_file(url) + + def open_local_file(self, url): + """Use local file.""" + import future.backports.email.utils as email_utils + import mimetypes + host, file = splithost(url) + localname = url2pathname(file) + try: + stats = os.stat(localname) + except OSError as e: + raise URLError(e.strerror, e.filename) + size = stats.st_size + modified = email_utils.formatdate(stats.st_mtime, usegmt=True) + mtype = mimetypes.guess_type(url)[0] + headers = email.message_from_string( + 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % + (mtype or 'text/plain', size, modified)) + if not host: + urlfile = file + if file[:1] == '/': + urlfile = 'file://' + file + return addinfourl(open(localname, 'rb'), headers, urlfile) + host, port = splitport(host) + if (not port + and socket.gethostbyname(host) in ((localhost(),) + thishost())): + urlfile = file + if file[:1] == '/': + urlfile = 'file://' + file + elif file[:2] == './': + raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) + return addinfourl(open(localname, 'rb'), headers, urlfile) + raise URLError('local file error: not on local host') + + def open_ftp(self, url): + """Use FTP protocol.""" + if not isinstance(url, str): + raise URLError('ftp error: proxy support for ftp protocol currently not implemented') + import mimetypes + host, path = splithost(url) + if not host: raise URLError('ftp error: no host given') + host, port = splitport(host) + user, host = splituser(host) + if user: user, passwd = splitpasswd(user) + else: passwd = None + host = unquote(host) + user = unquote(user or '') + passwd = unquote(passwd or '') + host = socket.gethostbyname(host) + if not port: + import ftplib + port = ftplib.FTP_PORT + else: + port = int(port) + path, attrs = splitattr(path) + path = unquote(path) + dirs = path.split('/') + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: dirs = dirs[1:] + if dirs and not dirs[0]: dirs[0] = '/' + key = user, host, port, '/'.join(dirs) + # XXX thread unsafe! + if len(self.ftpcache) > MAXFTPCACHE: + # Prune the cache, rather arbitrarily + for k in self.ftpcache.keys(): + if k != key: + v = self.ftpcache[k] + del self.ftpcache[k] + v.close() + try: + if key not in self.ftpcache: + self.ftpcache[key] = \ + ftpwrapper(user, passwd, host, port, dirs) + if not file: type = 'D' + else: type = 'I' + for attr in attrs: + attr, value = splitvalue(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + (fp, retrlen) = self.ftpcache[key].retrfile(file, type) + mtype = mimetypes.guess_type("ftp:" + url)[0] + headers = "" + if mtype: + headers += "Content-Type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-Length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, "ftp:" + url) + except ftperrors() as exp: + raise_with_traceback(URLError('ftp error %r' % exp)) + + def open_data(self, url, data=None): + """Use "data" URL.""" + if not isinstance(url, str): + raise URLError('data error: proxy support for data protocol currently not implemented') + # ignore POSTed data + # + # syntax of data URLs: + # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + # mediatype := [ type "/" subtype ] *( ";" parameter ) + # data := *urlchar + # parameter := attribute "=" value + try: + [type, data] = url.split(',', 1) + except ValueError: + raise IOError('data error', 'bad data URL') + if not type: + type = 'text/plain;charset=US-ASCII' + semi = type.rfind(';') + if semi >= 0 and '=' not in type[semi:]: + encoding = type[semi+1:] + type = type[:semi] + else: + encoding = '' + msg = [] + msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', + time.gmtime(time.time()))) + msg.append('Content-type: %s' % type) + if encoding == 'base64': + # XXX is this encoding/decoding ok? + data = base64.decodebytes(data.encode('ascii')).decode('latin-1') + else: + data = unquote(data) + msg.append('Content-Length: %d' % len(data)) + msg.append('') + msg.append(data) + msg = '\n'.join(msg) + headers = email.message_from_string(msg) + f = io.StringIO(msg) + #f.fileno = None # needed for addinfourl + return addinfourl(f, headers, url) + + +class FancyURLopener(URLopener): + """Derived class with handlers for errors we can handle (perhaps).""" + + def __init__(self, *args, **kwargs): + URLopener.__init__(self, *args, **kwargs) + self.auth_cache = {} + self.tries = 0 + self.maxtries = 10 + + def http_error_default(self, url, fp, errcode, errmsg, headers): + """Default error handling -- don't raise an exception.""" + return addinfourl(fp, headers, "http:" + url, errcode) + + def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): + """Error 302 -- relocated (temporarily).""" + self.tries += 1 + if self.maxtries and self.tries >= self.maxtries: + if hasattr(self, "http_error_500"): + meth = self.http_error_500 + else: + meth = self.http_error_default + self.tries = 0 + return meth(url, fp, 500, + "Internal Server Error: Redirect Recursion", headers) + result = self.redirect_internal(url, fp, errcode, errmsg, headers, + data) + self.tries = 0 + return result + + def redirect_internal(self, url, fp, errcode, errmsg, headers, data): + if 'location' in headers: + newurl = headers['location'] + elif 'uri' in headers: + newurl = headers['uri'] + else: + return + fp.close() + + # In case the server sent a relative URL, join with original: + newurl = urljoin(self.type + ":" + url, newurl) + + urlparts = urlparse(newurl) + + # For security reasons, we don't allow redirection to anything other + # than http, https and ftp. + + # We are using newer HTTPError with older redirect_internal method + # This older method will get deprecated in 3.3 + + if urlparts.scheme not in ('http', 'https', 'ftp', ''): + raise HTTPError(newurl, errcode, + errmsg + + " Redirection to url '%s' is not allowed." % newurl, + headers, fp) + + return self.open(newurl) + + def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): + """Error 301 -- also relocated (permanently).""" + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + + def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): + """Error 303 -- also relocated (essentially identical to 302).""" + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + + def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): + """Error 307 -- relocated, but turn POST into error.""" + if data is None: + return self.http_error_302(url, fp, errcode, errmsg, headers, data) + else: + return self.http_error_default(url, fp, errcode, errmsg, headers) + + def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, + retry=False): + """Error 401 -- authentication required. + This function supports Basic authentication only.""" + if 'www-authenticate' not in headers: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + stuff = headers['www-authenticate'] + match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) + if not match: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + scheme, realm = match.groups() + if scheme.lower() != 'basic': + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + if not retry: + URLopener.http_error_default(self, url, fp, errcode, errmsg, + headers) + name = 'retry_' + self.type + '_basic_auth' + if data is None: + return getattr(self,name)(url, realm) + else: + return getattr(self,name)(url, realm, data) + + def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, + retry=False): + """Error 407 -- proxy authentication required. + This function supports Basic authentication only.""" + if 'proxy-authenticate' not in headers: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + stuff = headers['proxy-authenticate'] + match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) + if not match: + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + scheme, realm = match.groups() + if scheme.lower() != 'basic': + URLopener.http_error_default(self, url, fp, + errcode, errmsg, headers) + if not retry: + URLopener.http_error_default(self, url, fp, errcode, errmsg, + headers) + name = 'retry_proxy_' + self.type + '_basic_auth' + if data is None: + return getattr(self,name)(url, realm) + else: + return getattr(self,name)(url, realm, data) + + def retry_proxy_http_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + newurl = 'http://' + host + selector + proxy = self.proxies['http'] + urltype, proxyhost = splittype(proxy) + proxyhost, proxyselector = splithost(proxyhost) + i = proxyhost.find('@') + 1 + proxyhost = proxyhost[i:] + user, passwd = self.get_user_passwd(proxyhost, realm, i) + if not (user or passwd): return None + proxyhost = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), proxyhost) + self.proxies['http'] = 'http://' + proxyhost + proxyselector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_proxy_https_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + newurl = 'https://' + host + selector + proxy = self.proxies['https'] + urltype, proxyhost = splittype(proxy) + proxyhost, proxyselector = splithost(proxyhost) + i = proxyhost.find('@') + 1 + proxyhost = proxyhost[i:] + user, passwd = self.get_user_passwd(proxyhost, realm, i) + if not (user or passwd): return None + proxyhost = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), proxyhost) + self.proxies['https'] = 'https://' + proxyhost + proxyselector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_http_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + i = host.find('@') + 1 + host = host[i:] + user, passwd = self.get_user_passwd(host, realm, i) + if not (user or passwd): return None + host = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), host) + newurl = 'http://' + host + selector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def retry_https_basic_auth(self, url, realm, data=None): + host, selector = splithost(url) + i = host.find('@') + 1 + host = host[i:] + user, passwd = self.get_user_passwd(host, realm, i) + if not (user or passwd): return None + host = "%s:%s@%s" % (quote(user, safe=''), + quote(passwd, safe=''), host) + newurl = 'https://' + host + selector + if data is None: + return self.open(newurl) + else: + return self.open(newurl, data) + + def get_user_passwd(self, host, realm, clear_cache=0): + key = realm + '@' + host.lower() + if key in self.auth_cache: + if clear_cache: + del self.auth_cache[key] + else: + return self.auth_cache[key] + user, passwd = self.prompt_user_passwd(host, realm) + if user or passwd: self.auth_cache[key] = (user, passwd) + return user, passwd + + def prompt_user_passwd(self, host, realm): + """Override this in a GUI environment!""" + import getpass + try: + user = input("Enter username for %s at %s: " % (realm, host)) + passwd = getpass.getpass("Enter password for %s in %s at %s: " % + (user, realm, host)) + return user, passwd + except KeyboardInterrupt: + print() + return None, None + + +# Utility functions + +_localhost = None +def localhost(): + """Return the IP address of the magic hostname 'localhost'.""" + global _localhost + if _localhost is None: + _localhost = socket.gethostbyname('localhost') + return _localhost + +_thishost = None +def thishost(): + """Return the IP addresses of the current host.""" + global _thishost + if _thishost is None: + try: + _thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2]) + except socket.gaierror: + _thishost = tuple(socket.gethostbyname_ex('localhost')[2]) + return _thishost + +_ftperrors = None +def ftperrors(): + """Return the set of errors raised by the FTP class.""" + global _ftperrors + if _ftperrors is None: + import ftplib + _ftperrors = ftplib.all_errors + return _ftperrors + +_noheaders = None +def noheaders(): + """Return an empty email Message object.""" + global _noheaders + if _noheaders is None: + _noheaders = email.message_from_string("") + return _noheaders + + +# Utility classes + +class ftpwrapper(object): + """Class used by open_ftp() for cache of open FTP connections.""" + + def __init__(self, user, passwd, host, port, dirs, timeout=None, + persistent=True): + self.user = user + self.passwd = passwd + self.host = host + self.port = port + self.dirs = dirs + self.timeout = timeout + self.refcount = 0 + self.keepalive = persistent + self.init() + + def init(self): + import ftplib + self.busy = 0 + self.ftp = ftplib.FTP() + self.ftp.connect(self.host, self.port, self.timeout) + self.ftp.login(self.user, self.passwd) + _target = '/'.join(self.dirs) + self.ftp.cwd(_target) + + def retrfile(self, file, type): + import ftplib + self.endtransfer() + if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 + else: cmd = 'TYPE ' + type; isdir = 0 + try: + self.ftp.voidcmd(cmd) + except ftplib.all_errors: + self.init() + self.ftp.voidcmd(cmd) + conn = None + if file and not isdir: + # Try to retrieve as a file + try: + cmd = 'RETR ' + file + conn, retrlen = self.ftp.ntransfercmd(cmd) + except ftplib.error_perm as reason: + if str(reason)[:3] != '550': + raise_with_traceback(URLError('ftp error: %r' % reason)) + if not conn: + # Set transfer mode to ASCII! + self.ftp.voidcmd('TYPE A') + # Try a directory listing. Verify that directory exists. + if file: + pwd = self.ftp.pwd() + try: + try: + self.ftp.cwd(file) + except ftplib.error_perm as reason: + ### Was: + # raise URLError('ftp error: %r' % reason) from reason + exc = URLError('ftp error: %r' % reason) + exc.__cause__ = reason + raise exc + finally: + self.ftp.cwd(pwd) + cmd = 'LIST ' + file + else: + cmd = 'LIST' + conn, retrlen = self.ftp.ntransfercmd(cmd) + self.busy = 1 + + ftpobj = addclosehook(conn.makefile('rb'), self.file_close) + self.refcount += 1 + conn.close() + # Pass back both a suitably decorated object and a retrieval length + return (ftpobj, retrlen) + + def endtransfer(self): + self.busy = 0 + + def close(self): + self.keepalive = False + if self.refcount <= 0: + self.real_close() + + def file_close(self): + self.endtransfer() + self.refcount -= 1 + if self.refcount <= 0 and not self.keepalive: + self.real_close() + + def real_close(self): + self.endtransfer() + try: + self.ftp.close() + except ftperrors(): + pass + +# Proxy handling +def getproxies_environment(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Scan the environment for variables named _proxy; + this seems to be the standard convention. If you need a + different way, you can pass a proxies dictionary to the + [Fancy]URLopener constructor. + + """ + proxies = {} + for name, value in os.environ.items(): + name = name.lower() + if value and name[-6:] == '_proxy': + proxies[name[:-6]] = value + return proxies + +def proxy_bypass_environment(host): + """Test if proxies should not be used for a particular host. + + Checks the environment for a variable named no_proxy, which should + be a list of DNS suffixes separated by commas, or '*' for all hosts. + """ + no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '') + # '*' is special case for always bypass + if no_proxy == '*': + return 1 + # strip port off host + hostonly, port = splitport(host) + # check if the host ends with any of the DNS suffixes + no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')] + for name in no_proxy_list: + if name and (hostonly.endswith(name) or host.endswith(name)): + return 1 + # otherwise, don't bypass + return 0 + + +# This code tests an OSX specific data structure but is testable on all +# platforms +def _proxy_bypass_macosx_sysconf(host, proxy_settings): + """ + Return True iff this host shouldn't be accessed using a proxy + + This function uses the MacOSX framework SystemConfiguration + to fetch the proxy information. + + proxy_settings come from _scproxy._get_proxy_settings or get mocked ie: + { 'exclude_simple': bool, + 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16'] + } + """ + from fnmatch import fnmatch + + hostonly, port = splitport(host) + + def ip2num(ipAddr): + parts = ipAddr.split('.') + parts = list(map(int, parts)) + if len(parts) != 4: + parts = (parts + [0, 0, 0, 0])[:4] + return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3] + + # Check for simple host names: + if '.' not in host: + if proxy_settings['exclude_simple']: + return True + + hostIP = None + + for value in proxy_settings.get('exceptions', ()): + # Items in the list are strings like these: *.local, 169.254/16 + if not value: continue + + m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) + if m is not None: + if hostIP is None: + try: + hostIP = socket.gethostbyname(hostonly) + hostIP = ip2num(hostIP) + except socket.error: + continue + + base = ip2num(m.group(1)) + mask = m.group(2) + if mask is None: + mask = 8 * (m.group(1).count('.') + 1) + else: + mask = int(mask[1:]) + mask = 32 - mask + + if (hostIP >> mask) == (base >> mask): + return True + + elif fnmatch(host, value): + return True + + return False + + +if sys.platform == 'darwin': + from _scproxy import _get_proxy_settings, _get_proxies + + def proxy_bypass_macosx_sysconf(host): + proxy_settings = _get_proxy_settings() + return _proxy_bypass_macosx_sysconf(host, proxy_settings) + + def getproxies_macosx_sysconf(): + """Return a dictionary of scheme -> proxy server URL mappings. + + This function uses the MacOSX framework SystemConfiguration + to fetch the proxy information. + """ + return _get_proxies() + + + + def proxy_bypass(host): + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_macosx_sysconf(host) + + def getproxies(): + return getproxies_environment() or getproxies_macosx_sysconf() + + +elif os.name == 'nt': + def getproxies_registry(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Win32 uses the registry to store proxies. + + """ + proxies = {} + try: + import winreg + except ImportError: + # Std module, so should be around - but you never know! + return proxies + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + proxyEnable = winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0] + if proxyEnable: + # Returned as Unicode but problems if not converted to ASCII + proxyServer = str(winreg.QueryValueEx(internetSettings, + 'ProxyServer')[0]) + if '=' in proxyServer: + # Per-protocol settings + for p in proxyServer.split(';'): + protocol, address = p.split('=', 1) + # See if address has a type:// prefix + if not re.match('^([^/:]+)://', address): + address = '%s://%s' % (protocol, address) + proxies[protocol] = address + else: + # Use one setting for all protocols + if proxyServer[:5] == 'http:': + proxies['http'] = proxyServer + else: + proxies['http'] = 'http://%s' % proxyServer + proxies['https'] = 'https://%s' % proxyServer + proxies['ftp'] = 'ftp://%s' % proxyServer + internetSettings.Close() + except (WindowsError, ValueError, TypeError): + # Either registry key not found etc, or the value in an + # unexpected format. + # proxies already set up to be empty so nothing to do + pass + return proxies + + def getproxies(): + """Return a dictionary of scheme -> proxy server URL mappings. + + Returns settings gathered from the environment, if specified, + or the registry. + + """ + return getproxies_environment() or getproxies_registry() + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + # Std modules, so should be around - but you never know! + return 0 + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + proxyEnable = winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0] + proxyOverride = str(winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0]) + # ^^^^ Returned as Unicode but problems if not converted to ASCII + except WindowsError: + return 0 + if not proxyEnable or not proxyOverride: + return 0 + # try to make a host list from name and IP address. + rawHost, port = splitport(host) + host = [rawHost] + try: + addr = socket.gethostbyname(rawHost) + if addr != rawHost: + host.append(addr) + except socket.error: + pass + try: + fqdn = socket.getfqdn(rawHost) + if fqdn != rawHost: + host.append(fqdn) + except socket.error: + pass + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '': + if '.' not in rawHost: + return 1 + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + for val in host: + if re.match(test, val, re.I): + return 1 + return 0 + + def proxy_bypass(host): + """Return a dictionary of scheme -> proxy server URL mappings. + + Returns settings gathered from the environment, if specified, + or the registry. + + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + +else: + # By default use environment variables + getproxies = getproxies_environment + proxy_bypass = proxy_bypass_environment diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/response.py b/minor_project/lib/python3.6/site-packages/future/backports/urllib/response.py new file mode 100644 index 0000000..adbf6e5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/urllib/response.py @@ -0,0 +1,103 @@ +"""Response classes used by urllib. + +The base class, addbase, defines a minimal file-like interface, +including read() and readline(). The typical response object is an +addinfourl instance, which defines an info() method that returns +headers and a geturl() method that returns the url. +""" +from __future__ import absolute_import, division, unicode_literals +from future.builtins import object + +class addbase(object): + """Base class for addinfo and addclosehook.""" + + # XXX Add a method to expose the timeout on the underlying socket? + + def __init__(self, fp): + # TODO(jhylton): Is there a better way to delegate using io? + self.fp = fp + self.read = self.fp.read + self.readline = self.fp.readline + # TODO(jhylton): Make sure an object with readlines() is also iterable + if hasattr(self.fp, "readlines"): + self.readlines = self.fp.readlines + if hasattr(self.fp, "fileno"): + self.fileno = self.fp.fileno + else: + self.fileno = lambda: None + + def __iter__(self): + # Assigning `__iter__` to the instance doesn't work as intended + # because the iter builtin does something like `cls.__iter__(obj)` + # and thus fails to find the _bound_ method `obj.__iter__`. + # Returning just `self.fp` works for built-in file objects but + # might not work for general file-like objects. + return iter(self.fp) + + def __repr__(self): + return '<%s at %r whose fp = %r>' % (self.__class__.__name__, + id(self), self.fp) + + def close(self): + if self.fp: + self.fp.close() + self.fp = None + self.read = None + self.readline = None + self.readlines = None + self.fileno = None + self.__iter__ = None + self.__next__ = None + + def __enter__(self): + if self.fp is None: + raise ValueError("I/O operation on closed file") + return self + + def __exit__(self, type, value, traceback): + self.close() + +class addclosehook(addbase): + """Class to add a close hook to an open file.""" + + def __init__(self, fp, closehook, *hookargs): + addbase.__init__(self, fp) + self.closehook = closehook + self.hookargs = hookargs + + def close(self): + if self.closehook: + self.closehook(*self.hookargs) + self.closehook = None + self.hookargs = None + addbase.close(self) + +class addinfo(addbase): + """class to add an info() method to an open file.""" + + def __init__(self, fp, headers): + addbase.__init__(self, fp) + self.headers = headers + + def info(self): + return self.headers + +class addinfourl(addbase): + """class to add info() and geturl() methods to an open file.""" + + def __init__(self, fp, headers, url, code=None): + addbase.__init__(self, fp) + self.headers = headers + self.url = url + self.code = code + + def info(self): + return self.headers + + def getcode(self): + return self.code + + def geturl(self): + return self.url + +del absolute_import, division, unicode_literals, object diff --git a/minor_project/lib/python3.6/site-packages/future/backports/urllib/robotparser.py b/minor_project/lib/python3.6/site-packages/future/backports/urllib/robotparser.py new file mode 100644 index 0000000..a0f3651 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/urllib/robotparser.py @@ -0,0 +1,211 @@ +from __future__ import absolute_import, division, unicode_literals +from future.builtins import str +""" robotparser.py + + Copyright (C) 2000 Bastian Kleineidam + + You can choose between two licenses when using this package: + 1) GNU GPLv2 + 2) PSF license for Python 2.2 + + The robots.txt Exclusion Protocol is implemented as specified in + http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html +""" + +# Was: import urllib.parse, urllib.request +from future.backports import urllib +from future.backports.urllib import parse as _parse, request as _request +urllib.parse = _parse +urllib.request = _request + + +__all__ = ["RobotFileParser"] + +class RobotFileParser(object): + """ This class provides a set of methods to read, parse and answer + questions about a single robots.txt file. + + """ + + def __init__(self, url=''): + self.entries = [] + self.default_entry = None + self.disallow_all = False + self.allow_all = False + self.set_url(url) + self.last_checked = 0 + + def mtime(self): + """Returns the time the robots.txt file was last fetched. + + This is useful for long-running web spiders that need to + check for new robots.txt files periodically. + + """ + return self.last_checked + + def modified(self): + """Sets the time the robots.txt file was last fetched to the + current time. + + """ + import time + self.last_checked = time.time() + + def set_url(self, url): + """Sets the URL referring to a robots.txt file.""" + self.url = url + self.host, self.path = urllib.parse.urlparse(url)[1:3] + + def read(self): + """Reads the robots.txt URL and feeds it to the parser.""" + try: + f = urllib.request.urlopen(self.url) + except urllib.error.HTTPError as err: + if err.code in (401, 403): + self.disallow_all = True + elif err.code >= 400: + self.allow_all = True + else: + raw = f.read() + self.parse(raw.decode("utf-8").splitlines()) + + def _add_entry(self, entry): + if "*" in entry.useragents: + # the default entry is considered last + if self.default_entry is None: + # the first default entry wins + self.default_entry = entry + else: + self.entries.append(entry) + + def parse(self, lines): + """Parse the input lines from a robots.txt file. + + We allow that a user-agent: line is not preceded by + one or more blank lines. + """ + # states: + # 0: start state + # 1: saw user-agent line + # 2: saw an allow or disallow line + state = 0 + entry = Entry() + + for line in lines: + if not line: + if state == 1: + entry = Entry() + state = 0 + elif state == 2: + self._add_entry(entry) + entry = Entry() + state = 0 + # remove optional comment and strip line + i = line.find('#') + if i >= 0: + line = line[:i] + line = line.strip() + if not line: + continue + line = line.split(':', 1) + if len(line) == 2: + line[0] = line[0].strip().lower() + line[1] = urllib.parse.unquote(line[1].strip()) + if line[0] == "user-agent": + if state == 2: + self._add_entry(entry) + entry = Entry() + entry.useragents.append(line[1]) + state = 1 + elif line[0] == "disallow": + if state != 0: + entry.rulelines.append(RuleLine(line[1], False)) + state = 2 + elif line[0] == "allow": + if state != 0: + entry.rulelines.append(RuleLine(line[1], True)) + state = 2 + if state == 2: + self._add_entry(entry) + + + def can_fetch(self, useragent, url): + """using the parsed robots.txt decide if useragent can fetch url""" + if self.disallow_all: + return False + if self.allow_all: + return True + # search for given user agent matches + # the first match counts + parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url)) + url = urllib.parse.urlunparse(('','',parsed_url.path, + parsed_url.params,parsed_url.query, parsed_url.fragment)) + url = urllib.parse.quote(url) + if not url: + url = "/" + for entry in self.entries: + if entry.applies_to(useragent): + return entry.allowance(url) + # try the default entry last + if self.default_entry: + return self.default_entry.allowance(url) + # agent not found ==> access granted + return True + + def __str__(self): + return ''.join([str(entry) + "\n" for entry in self.entries]) + + +class RuleLine(object): + """A rule line is a single "Allow:" (allowance==True) or "Disallow:" + (allowance==False) followed by a path.""" + def __init__(self, path, allowance): + if path == '' and not allowance: + # an empty value means allow all + allowance = True + self.path = urllib.parse.quote(path) + self.allowance = allowance + + def applies_to(self, filename): + return self.path == "*" or filename.startswith(self.path) + + def __str__(self): + return (self.allowance and "Allow" or "Disallow") + ": " + self.path + + +class Entry(object): + """An entry has one or more user-agents and zero or more rulelines""" + def __init__(self): + self.useragents = [] + self.rulelines = [] + + def __str__(self): + ret = [] + for agent in self.useragents: + ret.extend(["User-agent: ", agent, "\n"]) + for line in self.rulelines: + ret.extend([str(line), "\n"]) + return ''.join(ret) + + def applies_to(self, useragent): + """check if this entry applies to the specified agent""" + # split the name token and make it lower case + useragent = useragent.split("/")[0].lower() + for agent in self.useragents: + if agent == '*': + # we have the catch-all agent + return True + agent = agent.lower() + if agent in useragent: + return True + return False + + def allowance(self, filename): + """Preconditions: + - our agent applies to this entry + - filename is URL decoded""" + for line in self.rulelines: + if line.applies_to(filename): + return line.allowance + return True diff --git a/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__init__.py b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__init__.py new file mode 100644 index 0000000..196d378 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__init__.py @@ -0,0 +1 @@ +# This directory is a Python package. diff --git a/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..42bcad9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/client.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/client.cpython-36.pyc new file mode 100644 index 0000000..f7dc194 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/client.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/server.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/server.cpython-36.pyc new file mode 100644 index 0000000..cde0132 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/__pycache__/server.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/client.py b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/client.py new file mode 100644 index 0000000..b78e5ba --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/client.py @@ -0,0 +1,1496 @@ +# +# XML-RPC CLIENT LIBRARY +# $Id$ +# +# an XML-RPC client interface for Python. +# +# the marshalling and response parser code can also be used to +# implement XML-RPC servers. +# +# Notes: +# this version is designed to work with Python 2.1 or newer. +# +# History: +# 1999-01-14 fl Created +# 1999-01-15 fl Changed dateTime to use localtime +# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service +# 1999-01-19 fl Fixed array data element (from Skip Montanaro) +# 1999-01-21 fl Fixed dateTime constructor, etc. +# 1999-02-02 fl Added fault handling, handle empty sequences, etc. +# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro) +# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8) +# 2000-11-28 fl Changed boolean to check the truth value of its argument +# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches +# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1) +# 2001-03-28 fl Make sure response tuple is a singleton +# 2001-03-29 fl Don't require empty params element (from Nicholas Riley) +# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2) +# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod) +# 2001-09-03 fl Allow Transport subclass to override getparser +# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup) +# 2001-10-01 fl Remove containers from memo cache when done with them +# 2001-10-01 fl Use faster escape method (80% dumps speedup) +# 2001-10-02 fl More dumps microtuning +# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum) +# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow +# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems) +# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix) +# 2002-03-17 fl Avoid buffered read when possible (from James Rucker) +# 2002-04-07 fl Added pythondoc comments +# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers +# 2002-05-15 fl Added error constants (from Andrew Kuchling) +# 2002-06-27 fl Merged with Python CVS version +# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby) +# 2003-01-22 sm Add support for the bool type +# 2003-02-27 gvr Remove apply calls +# 2003-04-24 sm Use cStringIO if available +# 2003-04-25 ak Add support for nil +# 2003-06-15 gn Add support for time.struct_time +# 2003-07-12 gp Correct marshalling of Faults +# 2003-10-31 mvl Add multicall support +# 2004-08-20 mvl Bump minimum supported Python version to 2.1 +# +# Copyright (c) 1999-2002 by Secret Labs AB. +# Copyright (c) 1999-2002 by Fredrik Lundh. +# +# info@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The XML-RPC client interface is +# +# Copyright (c) 1999-2002 by Secret Labs AB +# Copyright (c) 1999-2002 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +""" +Ported using Python-Future from the Python 3.3 standard library. + +An XML-RPC client interface for Python. + +The marshalling and response parser code can also be used to +implement XML-RPC servers. + +Exported exceptions: + + Error Base class for client errors + ProtocolError Indicates an HTTP protocol error + ResponseError Indicates a broken response package + Fault Indicates an XML-RPC fault package + +Exported classes: + + ServerProxy Represents a logical connection to an XML-RPC server + + MultiCall Executor of boxcared xmlrpc requests + DateTime dateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate a "dateTime.iso8601" + XML-RPC value + Binary binary data wrapper + + Marshaller Generate an XML-RPC params chunk from a Python data structure + Unmarshaller Unmarshal an XML-RPC response from incoming XML event message + Transport Handles an HTTP transaction to an XML-RPC server + SafeTransport Handles an HTTPS transaction to an XML-RPC server + +Exported constants: + + (none) + +Exported functions: + + getparser Create instance of the fastest available parser & attach + to an unmarshalling object + dumps Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + loads Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). +""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) +from future.builtins import bytes, dict, int, range, str + +import base64 +# Py2.7 compatibility hack +base64.encodebytes = base64.encodestring +base64.decodebytes = base64.decodestring +import sys +import time +from datetime import datetime +from future.backports.http import client as http_client +from future.backports.urllib import parse as urllib_parse +from future.utils import ensure_new_type +from xml.parsers import expat +import socket +import errno +from io import BytesIO +try: + import gzip +except ImportError: + gzip = None #python can be built without zlib/gzip support + +# -------------------------------------------------------------------- +# Internal stuff + +def escape(s): + s = s.replace("&", "&") + s = s.replace("<", "<") + return s.replace(">", ">",) + +# used in User-Agent header sent +__version__ = sys.version[:3] + +# xmlrpc integer limits +MAXINT = 2**31-1 +MININT = -2**31 + +# -------------------------------------------------------------------- +# Error constants (from Dan Libby's specification at +# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php) + +# Ranges of errors +PARSE_ERROR = -32700 +SERVER_ERROR = -32600 +APPLICATION_ERROR = -32500 +SYSTEM_ERROR = -32400 +TRANSPORT_ERROR = -32300 + +# Specific errors +NOT_WELLFORMED_ERROR = -32700 +UNSUPPORTED_ENCODING = -32701 +INVALID_ENCODING_CHAR = -32702 +INVALID_XMLRPC = -32600 +METHOD_NOT_FOUND = -32601 +INVALID_METHOD_PARAMS = -32602 +INTERNAL_ERROR = -32603 + +# -------------------------------------------------------------------- +# Exceptions + +## +# Base class for all kinds of client-side errors. + +class Error(Exception): + """Base class for client errors.""" + def __str__(self): + return repr(self) + +## +# Indicates an HTTP-level protocol error. This is raised by the HTTP +# transport layer, if the server returns an error code other than 200 +# (OK). +# +# @param url The target URL. +# @param errcode The HTTP error code. +# @param errmsg The HTTP error message. +# @param headers The HTTP header dictionary. + +class ProtocolError(Error): + """Indicates an HTTP protocol error.""" + def __init__(self, url, errcode, errmsg, headers): + Error.__init__(self) + self.url = url + self.errcode = errcode + self.errmsg = errmsg + self.headers = headers + def __repr__(self): + return ( + "" % + (self.url, self.errcode, self.errmsg) + ) + +## +# Indicates a broken XML-RPC response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response is +# malformed. + +class ResponseError(Error): + """Indicates a broken response package.""" + pass + +## +# Indicates an XML-RPC fault response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response contains +# a fault string. This exception can also be used as a class, to +# generate a fault XML-RPC message. +# +# @param faultCode The XML-RPC fault code. +# @param faultString The XML-RPC fault string. + +class Fault(Error): + """Indicates an XML-RPC fault package.""" + def __init__(self, faultCode, faultString, **extra): + Error.__init__(self) + self.faultCode = faultCode + self.faultString = faultString + def __repr__(self): + return "" % (ensure_new_type(self.faultCode), + ensure_new_type(self.faultString)) + +# -------------------------------------------------------------------- +# Special values + +## +# Backwards compatibility + +boolean = Boolean = bool + +## +# Wrapper for XML-RPC DateTime values. This converts a time value to +# the format used by XML-RPC. +#

+# The value can be given as a datetime object, as a string in the +# format "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by +# time.localtime()), or an integer value (as returned by time.time()). +# The wrapper uses time.localtime() to convert an integer to a time +# tuple. +# +# @param value The time, given as a datetime object, an ISO 8601 string, +# a time tuple, or an integer time value. + + +### For Python-Future: +def _iso8601_format(value): + return "%04d%02d%02dT%02d:%02d:%02d" % ( + value.year, value.month, value.day, + value.hour, value.minute, value.second) +### +# Issue #13305: different format codes across platforms +# _day0 = datetime(1, 1, 1) +# if _day0.strftime('%Y') == '0001': # Mac OS X +# def _iso8601_format(value): +# return value.strftime("%Y%m%dT%H:%M:%S") +# elif _day0.strftime('%4Y') == '0001': # Linux +# def _iso8601_format(value): +# return value.strftime("%4Y%m%dT%H:%M:%S") +# else: +# def _iso8601_format(value): +# return value.strftime("%Y%m%dT%H:%M:%S").zfill(17) +# del _day0 + + +def _strftime(value): + if isinstance(value, datetime): + return _iso8601_format(value) + + if not isinstance(value, (tuple, time.struct_time)): + if value == 0: + value = time.time() + value = time.localtime(value) + + return "%04d%02d%02dT%02d:%02d:%02d" % value[:6] + +class DateTime(object): + """DateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate 'dateTime.iso8601' XML-RPC + value. + """ + + def __init__(self, value=0): + if isinstance(value, str): + self.value = value + else: + self.value = _strftime(value) + + def make_comparable(self, other): + if isinstance(other, DateTime): + s = self.value + o = other.value + elif isinstance(other, datetime): + s = self.value + o = _iso8601_format(other) + elif isinstance(other, str): + s = self.value + o = other + elif hasattr(other, "timetuple"): + s = self.timetuple() + o = other.timetuple() + else: + otype = (hasattr(other, "__class__") + and other.__class__.__name__ + or type(other)) + raise TypeError("Can't compare %s and %s" % + (self.__class__.__name__, otype)) + return s, o + + def __lt__(self, other): + s, o = self.make_comparable(other) + return s < o + + def __le__(self, other): + s, o = self.make_comparable(other) + return s <= o + + def __gt__(self, other): + s, o = self.make_comparable(other) + return s > o + + def __ge__(self, other): + s, o = self.make_comparable(other) + return s >= o + + def __eq__(self, other): + s, o = self.make_comparable(other) + return s == o + + def __ne__(self, other): + s, o = self.make_comparable(other) + return s != o + + def timetuple(self): + return time.strptime(self.value, "%Y%m%dT%H:%M:%S") + + ## + # Get date/time value. + # + # @return Date/time value, as an ISO 8601 string. + + def __str__(self): + return self.value + + def __repr__(self): + return "" % (ensure_new_type(self.value), id(self)) + + def decode(self, data): + self.value = str(data).strip() + + def encode(self, out): + out.write("") + out.write(self.value) + out.write("\n") + +def _datetime(data): + # decode xml element contents into a DateTime structure. + value = DateTime() + value.decode(data) + return value + +def _datetime_type(data): + return datetime.strptime(data, "%Y%m%dT%H:%M:%S") + +## +# Wrapper for binary data. This can be used to transport any kind +# of binary data over XML-RPC, using BASE64 encoding. +# +# @param data An 8-bit string containing arbitrary data. + +class Binary(object): + """Wrapper for binary data.""" + + def __init__(self, data=None): + if data is None: + data = b"" + else: + if not isinstance(data, (bytes, bytearray)): + raise TypeError("expected bytes or bytearray, not %s" % + data.__class__.__name__) + data = bytes(data) # Make a copy of the bytes! + self.data = data + + ## + # Get buffer contents. + # + # @return Buffer contents, as an 8-bit string. + + def __str__(self): + return str(self.data, "latin-1") # XXX encoding?! + + def __eq__(self, other): + if isinstance(other, Binary): + other = other.data + return self.data == other + + def __ne__(self, other): + if isinstance(other, Binary): + other = other.data + return self.data != other + + def decode(self, data): + self.data = base64.decodebytes(data) + + def encode(self, out): + out.write("\n") + encoded = base64.encodebytes(self.data) + out.write(encoded.decode('ascii')) + out.write("\n") + +def _binary(data): + # decode xml element contents into a Binary structure + value = Binary() + value.decode(data) + return value + +WRAPPERS = (DateTime, Binary) + +# -------------------------------------------------------------------- +# XML parsers + +class ExpatParser(object): + # fast expat parser for Python 2.0 and later. + def __init__(self, target): + self._parser = parser = expat.ParserCreate(None, None) + self._target = target + parser.StartElementHandler = target.start + parser.EndElementHandler = target.end + parser.CharacterDataHandler = target.data + encoding = None + target.xml(encoding, None) + + def feed(self, data): + self._parser.Parse(data, 0) + + def close(self): + self._parser.Parse("", 1) # end of data + del self._target, self._parser # get rid of circular references + +# -------------------------------------------------------------------- +# XML-RPC marshalling and unmarshalling code + +## +# XML-RPC marshaller. +# +# @param encoding Default encoding for 8-bit strings. The default +# value is None (interpreted as UTF-8). +# @see dumps + +class Marshaller(object): + """Generate an XML-RPC params chunk from a Python data structure. + + Create a Marshaller instance for each set of parameters, and use + the "dumps" method to convert your data (represented as a tuple) + to an XML-RPC params chunk. To write a fault response, pass a + Fault instance instead. You may prefer to use the "dumps" module + function for this purpose. + """ + + # by the way, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, encoding=None, allow_none=False): + self.memo = {} + self.data = None + self.encoding = encoding + self.allow_none = allow_none + + dispatch = {} + + def dumps(self, values): + out = [] + write = out.append + dump = self.__dump + if isinstance(values, Fault): + # fault instance + write("\n") + dump({'faultCode': values.faultCode, + 'faultString': values.faultString}, + write) + write("\n") + else: + # parameter block + # FIXME: the xml-rpc specification allows us to leave out + # the entire block if there are no parameters. + # however, changing this may break older code (including + # old versions of xmlrpclib.py), so this is better left as + # is for now. See @XMLRPC3 for more information. /F + write("\n") + for v in values: + write("\n") + dump(v, write) + write("\n") + write("\n") + result = "".join(out) + return str(result) + + def __dump(self, value, write): + try: + f = self.dispatch[type(ensure_new_type(value))] + except KeyError: + # check if this object can be marshalled as a structure + if not hasattr(value, '__dict__'): + raise TypeError("cannot marshal %s objects" % type(value)) + # check if this class is a sub-class of a basic type, + # because we don't know how to marshal these types + # (e.g. a string sub-class) + for type_ in type(value).__mro__: + if type_ in self.dispatch.keys(): + raise TypeError("cannot marshal %s objects" % type(value)) + # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix + # for the p3yk merge, this should probably be fixed more neatly. + f = self.dispatch["_arbitrary_instance"] + f(self, value, write) + + def dump_nil (self, value, write): + if not self.allow_none: + raise TypeError("cannot marshal None unless allow_none is enabled") + write("") + dispatch[type(None)] = dump_nil + + def dump_bool(self, value, write): + write("") + write(value and "1" or "0") + write("\n") + dispatch[bool] = dump_bool + + def dump_long(self, value, write): + if value > MAXINT or value < MININT: + raise OverflowError("long int exceeds XML-RPC limits") + write("") + write(str(int(value))) + write("\n") + dispatch[int] = dump_long + + # backward compatible + dump_int = dump_long + + def dump_double(self, value, write): + write("") + write(repr(ensure_new_type(value))) + write("\n") + dispatch[float] = dump_double + + def dump_unicode(self, value, write, escape=escape): + write("") + write(escape(value)) + write("\n") + dispatch[str] = dump_unicode + + def dump_bytes(self, value, write): + write("\n") + encoded = base64.encodebytes(value) + write(encoded.decode('ascii')) + write("\n") + dispatch[bytes] = dump_bytes + dispatch[bytearray] = dump_bytes + + def dump_array(self, value, write): + i = id(value) + if i in self.memo: + raise TypeError("cannot marshal recursive sequences") + self.memo[i] = None + dump = self.__dump + write("\n") + for v in value: + dump(v, write) + write("\n") + del self.memo[i] + dispatch[tuple] = dump_array + dispatch[list] = dump_array + + def dump_struct(self, value, write, escape=escape): + i = id(value) + if i in self.memo: + raise TypeError("cannot marshal recursive dictionaries") + self.memo[i] = None + dump = self.__dump + write("\n") + for k, v in value.items(): + write("\n") + if not isinstance(k, str): + raise TypeError("dictionary key must be string") + write("%s\n" % escape(k)) + dump(v, write) + write("\n") + write("\n") + del self.memo[i] + dispatch[dict] = dump_struct + + def dump_datetime(self, value, write): + write("") + write(_strftime(value)) + write("\n") + dispatch[datetime] = dump_datetime + + def dump_instance(self, value, write): + # check for special wrappers + if value.__class__ in WRAPPERS: + self.write = write + value.encode(self) + del self.write + else: + # store instance attributes as a struct (really?) + self.dump_struct(value.__dict__, write) + dispatch[DateTime] = dump_instance + dispatch[Binary] = dump_instance + # XXX(twouters): using "_arbitrary_instance" as key as a quick-fix + # for the p3yk merge, this should probably be fixed more neatly. + dispatch["_arbitrary_instance"] = dump_instance + +## +# XML-RPC unmarshaller. +# +# @see loads + +class Unmarshaller(object): + """Unmarshal an XML-RPC response, based on incoming XML event + messages (start, data, end). Call close() to get the resulting + data structure. + + Note that this reader is fairly tolerant, and gladly accepts bogus + XML-RPC data without complaining (but not bogus XML). + """ + + # and again, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, use_datetime=False, use_builtin_types=False): + self._type = None + self._stack = [] + self._marks = [] + self._data = [] + self._methodname = None + self._encoding = "utf-8" + self.append = self._stack.append + self._use_datetime = use_builtin_types or use_datetime + self._use_bytes = use_builtin_types + + def close(self): + # return response tuple and target method + if self._type is None or self._marks: + raise ResponseError() + if self._type == "fault": + raise Fault(**self._stack[0]) + return tuple(self._stack) + + def getmethodname(self): + return self._methodname + + # + # event handlers + + def xml(self, encoding, standalone): + self._encoding = encoding + # FIXME: assert standalone == 1 ??? + + def start(self, tag, attrs): + # prepare to handle this element + if tag == "array" or tag == "struct": + self._marks.append(len(self._stack)) + self._data = [] + self._value = (tag == "value") + + def data(self, text): + self._data.append(text) + + def end(self, tag): + # call the appropriate end tag handler + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, "".join(self._data)) + + # + # accelerator support + + def end_dispatch(self, tag, data): + # dispatch data + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, data) + + # + # element decoders + + dispatch = {} + + def end_nil (self, data): + self.append(None) + self._value = 0 + dispatch["nil"] = end_nil + + def end_boolean(self, data): + if data == "0": + self.append(False) + elif data == "1": + self.append(True) + else: + raise TypeError("bad boolean value") + self._value = 0 + dispatch["boolean"] = end_boolean + + def end_int(self, data): + self.append(int(data)) + self._value = 0 + dispatch["i4"] = end_int + dispatch["i8"] = end_int + dispatch["int"] = end_int + + def end_double(self, data): + self.append(float(data)) + self._value = 0 + dispatch["double"] = end_double + + def end_string(self, data): + if self._encoding: + data = data.decode(self._encoding) + self.append(data) + self._value = 0 + dispatch["string"] = end_string + dispatch["name"] = end_string # struct keys are always strings + + def end_array(self, data): + mark = self._marks.pop() + # map arrays to Python lists + self._stack[mark:] = [self._stack[mark:]] + self._value = 0 + dispatch["array"] = end_array + + def end_struct(self, data): + mark = self._marks.pop() + # map structs to Python dictionaries + dict = {} + items = self._stack[mark:] + for i in range(0, len(items), 2): + dict[items[i]] = items[i+1] + self._stack[mark:] = [dict] + self._value = 0 + dispatch["struct"] = end_struct + + def end_base64(self, data): + value = Binary() + value.decode(data.encode("ascii")) + if self._use_bytes: + value = value.data + self.append(value) + self._value = 0 + dispatch["base64"] = end_base64 + + def end_dateTime(self, data): + value = DateTime() + value.decode(data) + if self._use_datetime: + value = _datetime_type(data) + self.append(value) + dispatch["dateTime.iso8601"] = end_dateTime + + def end_value(self, data): + # if we stumble upon a value element with no internal + # elements, treat it as a string element + if self._value: + self.end_string(data) + dispatch["value"] = end_value + + def end_params(self, data): + self._type = "params" + dispatch["params"] = end_params + + def end_fault(self, data): + self._type = "fault" + dispatch["fault"] = end_fault + + def end_methodName(self, data): + if self._encoding: + data = data.decode(self._encoding) + self._methodname = data + self._type = "methodName" # no params + dispatch["methodName"] = end_methodName + +## Multicall support +# + +class _MultiCallMethod(object): + # some lesser magic to store calls made to a MultiCall object + # for batch execution + def __init__(self, call_list, name): + self.__call_list = call_list + self.__name = name + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + self.__call_list.append((self.__name, args)) + +class MultiCallIterator(object): + """Iterates over the results of a multicall. Exceptions are + raised in response to xmlrpc faults.""" + + def __init__(self, results): + self.results = results + + def __getitem__(self, i): + item = self.results[i] + if isinstance(type(item), dict): + raise Fault(item['faultCode'], item['faultString']) + elif type(item) == type([]): + return item[0] + else: + raise ValueError("unexpected type in multicall result") + +class MultiCall(object): + """server -> a object used to boxcar method calls + + server should be a ServerProxy object. + + Methods can be added to the MultiCall using normal + method call syntax e.g.: + + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") + + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ + + def __init__(self, server): + self.__server = server + self.__call_list = [] + + def __repr__(self): + return "" % id(self) + + __str__ = __repr__ + + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, name) + + def __call__(self): + marshalled_list = [] + for name, args in self.__call_list: + marshalled_list.append({'methodName' : name, 'params' : args}) + + return MultiCallIterator(self.__server.system.multicall(marshalled_list)) + +# -------------------------------------------------------------------- +# convenience functions + +FastMarshaller = FastParser = FastUnmarshaller = None + +## +# Create a parser object, and connect it to an unmarshalling instance. +# This function picks the fastest available XML parser. +# +# return A (parser, unmarshaller) tuple. + +def getparser(use_datetime=False, use_builtin_types=False): + """getparser() -> parser, unmarshaller + + Create an instance of the fastest available parser, and attach it + to an unmarshalling object. Return both objects. + """ + if FastParser and FastUnmarshaller: + if use_builtin_types: + mkdatetime = _datetime_type + mkbytes = base64.decodebytes + elif use_datetime: + mkdatetime = _datetime_type + mkbytes = _binary + else: + mkdatetime = _datetime + mkbytes = _binary + target = FastUnmarshaller(True, False, mkbytes, mkdatetime, Fault) + parser = FastParser(target) + else: + target = Unmarshaller(use_datetime=use_datetime, use_builtin_types=use_builtin_types) + if FastParser: + parser = FastParser(target) + else: + parser = ExpatParser(target) + return parser, target + +## +# Convert a Python tuple or a Fault instance to an XML-RPC packet. +# +# @def dumps(params, **options) +# @param params A tuple or Fault instance. +# @keyparam methodname If given, create a methodCall request for +# this method name. +# @keyparam methodresponse If given, create a methodResponse packet. +# If used with a tuple, the tuple must be a singleton (that is, +# it must contain exactly one element). +# @keyparam encoding The packet encoding. +# @return A string containing marshalled data. + +def dumps(params, methodname=None, methodresponse=None, encoding=None, + allow_none=False): + """data [,options] -> marshalled data + + Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + + In addition to the data object, the following options can be given + as keyword arguments: + + methodname: the method name for a methodCall packet + + methodresponse: true to create a methodResponse packet. + If this option is used with a tuple, the tuple must be + a singleton (i.e. it can contain only one element). + + encoding: the packet encoding (default is UTF-8) + + All byte strings in the data structure are assumed to use the + packet encoding. Unicode strings are automatically converted, + where necessary. + """ + + assert isinstance(params, (tuple, Fault)), "argument must be tuple or Fault instance" + if isinstance(params, Fault): + methodresponse = 1 + elif methodresponse and isinstance(params, tuple): + assert len(params) == 1, "response tuple must be a singleton" + + if not encoding: + encoding = "utf-8" + + if FastMarshaller: + m = FastMarshaller(encoding) + else: + m = Marshaller(encoding, allow_none) + + data = m.dumps(params) + + if encoding != "utf-8": + xmlheader = "\n" % str(encoding) + else: + xmlheader = "\n" # utf-8 is default + + # standard XML-RPC wrappings + if methodname: + # a method call + if not isinstance(methodname, str): + methodname = methodname.encode(encoding) + data = ( + xmlheader, + "\n" + "", methodname, "\n", + data, + "\n" + ) + elif methodresponse: + # a method response, or a fault structure + data = ( + xmlheader, + "\n", + data, + "\n" + ) + else: + return data # return as is + return str("").join(data) + +## +# Convert an XML-RPC packet to a Python object. If the XML-RPC packet +# represents a fault condition, this function raises a Fault exception. +# +# @param data An XML-RPC packet, given as an 8-bit string. +# @return A tuple containing the unpacked data, and the method name +# (None if not present). +# @see Fault + +def loads(data, use_datetime=False, use_builtin_types=False): + """data -> unmarshalled data, method name + + Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). + + If the XML-RPC packet represents a fault condition, this function + raises a Fault exception. + """ + p, u = getparser(use_datetime=use_datetime, use_builtin_types=use_builtin_types) + p.feed(data) + p.close() + return u.close(), u.getmethodname() + +## +# Encode a string using the gzip content encoding such as specified by the +# Content-Encoding: gzip +# in the HTTP header, as described in RFC 1952 +# +# @param data the unencoded data +# @return the encoded data + +def gzip_encode(data): + """data -> gzip encoded data + + Encode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: + raise NotImplementedError + f = BytesIO() + gzf = gzip.GzipFile(mode="wb", fileobj=f, compresslevel=1) + gzf.write(data) + gzf.close() + encoded = f.getvalue() + f.close() + return encoded + +## +# Decode a string using the gzip content encoding such as specified by the +# Content-Encoding: gzip +# in the HTTP header, as described in RFC 1952 +# +# @param data The encoded data +# @return the unencoded data +# @raises ValueError if data is not correctly coded. + +def gzip_decode(data): + """gzip encoded data -> unencoded data + + Decode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: + raise NotImplementedError + f = BytesIO(data) + gzf = gzip.GzipFile(mode="rb", fileobj=f) + try: + decoded = gzf.read() + except IOError: + raise ValueError("invalid data") + f.close() + gzf.close() + return decoded + +## +# Return a decoded file-like object for the gzip encoding +# as described in RFC 1952. +# +# @param response A stream supporting a read() method +# @return a file-like object that the decoded data can be read() from + +class GzipDecodedResponse(gzip.GzipFile if gzip else object): + """a file-like object to decode a response encoded with the gzip + method, as described in RFC 1952. + """ + def __init__(self, response): + #response doesn't support tell() and read(), required by + #GzipFile + if not gzip: + raise NotImplementedError + self.io = BytesIO(response.read()) + gzip.GzipFile.__init__(self, mode="rb", fileobj=self.io) + + def close(self): + gzip.GzipFile.close(self) + self.io.close() + + +# -------------------------------------------------------------------- +# request dispatcher + +class _Method(object): + # some magic to bind an XML-RPC method to an RPC server. + # supports "nested" methods (e.g. examples.getStateName) + def __init__(self, send, name): + self.__send = send + self.__name = name + def __getattr__(self, name): + return _Method(self.__send, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + return self.__send(self.__name, args) + +## +# Standard transport class for XML-RPC over HTTP. +#

+# You can create custom transports by subclassing this method, and +# overriding selected methods. + +class Transport(object): + """Handles an HTTP transaction to an XML-RPC server.""" + + # client identifier (may be overridden) + user_agent = "Python-xmlrpc/%s" % __version__ + + #if true, we'll request gzip encoding + accept_gzip_encoding = True + + # if positive, encode request using gzip if it exceeds this threshold + # note that many server will get confused, so only use it if you know + # that they can decode such a request + encode_threshold = None #None = don't encode + + def __init__(self, use_datetime=False, use_builtin_types=False): + self._use_datetime = use_datetime + self._use_builtin_types = use_builtin_types + self._connection = (None, None) + self._extra_headers = [] + + ## + # Send a complete request, and parse the response. + # Retry request if a cached connection has disconnected. + # + # @param host Target host. + # @param handler Target PRC handler. + # @param request_body XML-RPC request body. + # @param verbose Debugging flag. + # @return Parsed response. + + def request(self, host, handler, request_body, verbose=False): + #retry request once if cached connection has gone cold + for i in (0, 1): + try: + return self.single_request(host, handler, request_body, verbose) + except socket.error as e: + if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE): + raise + except http_client.BadStatusLine: #close after we sent request + if i: + raise + + def single_request(self, host, handler, request_body, verbose=False): + # issue XML-RPC request + try: + http_conn = self.send_request(host, handler, request_body, verbose) + resp = http_conn.getresponse() + if resp.status == 200: + self.verbose = verbose + return self.parse_response(resp) + + except Fault: + raise + except Exception: + #All unexpected errors leave connection in + # a strange state, so we clear it. + self.close() + raise + + #We got an error response. + #Discard any response data and raise exception + if resp.getheader("content-length", ""): + resp.read() + raise ProtocolError( + host + handler, + resp.status, resp.reason, + dict(resp.getheaders()) + ) + + + ## + # Create parser. + # + # @return A 2-tuple containing a parser and a unmarshaller. + + def getparser(self): + # get parser and unmarshaller + return getparser(use_datetime=self._use_datetime, + use_builtin_types=self._use_builtin_types) + + ## + # Get authorization info from host parameter + # Host may be a string, or a (host, x509-dict) tuple; if a string, + # it is checked for a "user:pw@host" format, and a "Basic + # Authentication" header is added if appropriate. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @return A 3-tuple containing (actual host, extra headers, + # x509 info). The header and x509 fields may be None. + + def get_host_info(self, host): + + x509 = {} + if isinstance(host, tuple): + host, x509 = host + + auth, host = urllib_parse.splituser(host) + + if auth: + auth = urllib_parse.unquote_to_bytes(auth) + auth = base64.encodebytes(auth).decode("utf-8") + auth = "".join(auth.split()) # get rid of whitespace + extra_headers = [ + ("Authorization", "Basic " + auth) + ] + else: + extra_headers = [] + + return host, extra_headers, x509 + + ## + # Connect to server. + # + # @param host Target host. + # @return An HTTPConnection object + + def make_connection(self, host): + #return an existing connection if possible. This allows + #HTTP/1.1 keep-alive. + if self._connection and host == self._connection[0]: + return self._connection[1] + # create a HTTP connection object from a host descriptor + chost, self._extra_headers, x509 = self.get_host_info(host) + self._connection = host, http_client.HTTPConnection(chost) + return self._connection[1] + + ## + # Clear any cached connection object. + # Used in the event of socket errors. + # + def close(self): + if self._connection[1]: + self._connection[1].close() + self._connection = (None, None) + + ## + # Send HTTP request. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @param handler Targer RPC handler (a path relative to host) + # @param request_body The XML-RPC request body + # @param debug Enable debugging if debug is true. + # @return An HTTPConnection. + + def send_request(self, host, handler, request_body, debug): + connection = self.make_connection(host) + headers = self._extra_headers[:] + if debug: + connection.set_debuglevel(1) + if self.accept_gzip_encoding and gzip: + connection.putrequest("POST", handler, skip_accept_encoding=True) + headers.append(("Accept-Encoding", "gzip")) + else: + connection.putrequest("POST", handler) + headers.append(("Content-Type", "text/xml")) + headers.append(("User-Agent", self.user_agent)) + self.send_headers(connection, headers) + self.send_content(connection, request_body) + return connection + + ## + # Send request headers. + # This function provides a useful hook for subclassing + # + # @param connection httpConnection. + # @param headers list of key,value pairs for HTTP headers + + def send_headers(self, connection, headers): + for key, val in headers: + connection.putheader(key, val) + + ## + # Send request body. + # This function provides a useful hook for subclassing + # + # @param connection httpConnection. + # @param request_body XML-RPC request body. + + def send_content(self, connection, request_body): + #optionally encode the request + if (self.encode_threshold is not None and + self.encode_threshold < len(request_body) and + gzip): + connection.putheader("Content-Encoding", "gzip") + request_body = gzip_encode(request_body) + + connection.putheader("Content-Length", str(len(request_body))) + connection.endheaders(request_body) + + ## + # Parse response. + # + # @param file Stream. + # @return Response tuple and target method. + + def parse_response(self, response): + # read response data from httpresponse, and parse it + # Check for new http response object, otherwise it is a file object. + if hasattr(response, 'getheader'): + if response.getheader("Content-Encoding", "") == "gzip": + stream = GzipDecodedResponse(response) + else: + stream = response + else: + stream = response + + p, u = self.getparser() + + while 1: + data = stream.read(1024) + if not data: + break + if self.verbose: + print("body:", repr(data)) + p.feed(data) + + if stream is not response: + stream.close() + p.close() + + return u.close() + +## +# Standard transport class for XML-RPC over HTTPS. + +class SafeTransport(Transport): + """Handles an HTTPS transaction to an XML-RPC server.""" + + # FIXME: mostly untested + + def make_connection(self, host): + if self._connection and host == self._connection[0]: + return self._connection[1] + + if not hasattr(http_client, "HTTPSConnection"): + raise NotImplementedError( + "your version of http.client doesn't support HTTPS") + # create a HTTPS connection object from a host descriptor + # host may be a string, or a (host, x509-dict) tuple + chost, self._extra_headers, x509 = self.get_host_info(host) + self._connection = host, http_client.HTTPSConnection(chost, + None, **(x509 or {})) + return self._connection[1] + +## +# Standard server proxy. This class establishes a virtual connection +# to an XML-RPC server. +#

+# This class is available as ServerProxy and Server. New code should +# use ServerProxy, to avoid confusion. +# +# @def ServerProxy(uri, **options) +# @param uri The connection point on the server. +# @keyparam transport A transport factory, compatible with the +# standard transport class. +# @keyparam encoding The default encoding used for 8-bit strings +# (default is UTF-8). +# @keyparam verbose Use a true value to enable debugging output. +# (printed to standard output). +# @see Transport + +class ServerProxy(object): + """uri [,options] -> a logical connection to an XML-RPC server + + uri is the connection point on the server, given as + scheme://host/target. + + The standard implementation always supports the "http" scheme. If + SSL socket support is available (Python 2.0), it also supports + "https". + + If the target part and the slash preceding it are both omitted, + "/RPC2" is assumed. + + The following options can be given as keyword arguments: + + transport: a transport factory + encoding: the request encoding (default is UTF-8) + + All 8-bit strings passed to the server proxy are assumed to use + the given encoding. + """ + + def __init__(self, uri, transport=None, encoding=None, verbose=False, + allow_none=False, use_datetime=False, use_builtin_types=False): + # establish a "logical" server connection + + # get the url + type, uri = urllib_parse.splittype(uri) + if type not in ("http", "https"): + raise IOError("unsupported XML-RPC protocol") + self.__host, self.__handler = urllib_parse.splithost(uri) + if not self.__handler: + self.__handler = "/RPC2" + + if transport is None: + if type == "https": + handler = SafeTransport + else: + handler = Transport + transport = handler(use_datetime=use_datetime, + use_builtin_types=use_builtin_types) + self.__transport = transport + + self.__encoding = encoding or 'utf-8' + self.__verbose = verbose + self.__allow_none = allow_none + + def __close(self): + self.__transport.close() + + def __request(self, methodname, params): + # call a method on the remote server + + request = dumps(params, methodname, encoding=self.__encoding, + allow_none=self.__allow_none).encode(self.__encoding) + + response = self.__transport.request( + self.__host, + self.__handler, + request, + verbose=self.__verbose + ) + + if len(response) == 1: + response = response[0] + + return response + + def __repr__(self): + return ( + "" % + (self.__host, self.__handler) + ) + + __str__ = __repr__ + + def __getattr__(self, name): + # magic method dispatcher + return _Method(self.__request, name) + + # note: to call a remote object with an non-standard name, use + # result getattr(server, "strange-python-name")(args) + + def __call__(self, attr): + """A workaround to get special attributes on the ServerProxy + without interfering with the magic __getattr__ + """ + if attr == "close": + return self.__close + elif attr == "transport": + return self.__transport + raise AttributeError("Attribute %r not found" % (attr,)) + +# compatibility + +Server = ServerProxy + +# -------------------------------------------------------------------- +# test code + +if __name__ == "__main__": + + # simple test program (from the XML-RPC specification) + + # local server, available from Lib/xmlrpc/server.py + server = ServerProxy("http://localhost:8000") + + try: + print(server.currentTime.getCurrentTime()) + except Error as v: + print("ERROR", v) + + multi = MultiCall(server) + multi.getData() + multi.pow(2,9) + multi.add(1,2) + try: + for response in multi(): + print(response) + except Error as v: + print("ERROR", v) diff --git a/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/server.py b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/server.py new file mode 100644 index 0000000..28072bf --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/backports/xmlrpc/server.py @@ -0,0 +1,999 @@ +r""" +Ported using Python-Future from the Python 3.3 standard library. + +XML-RPC Servers. + +This module can be used to create simple XML-RPC servers +by creating a server and either installing functions, a +class instance, or by extending the SimpleXMLRPCServer +class. + +It can also be used to handle XML-RPC requests in a CGI +environment using CGIXMLRPCRequestHandler. + +The Doc* classes can be used to create XML-RPC servers that +serve pydoc-style documentation in response to HTTP +GET requests. This documentation is dynamically generated +based on the functions and methods registered with the +server. + +A list of possible usage patterns follows: + +1. Install functions: + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_function(pow) +server.register_function(lambda x,y: x+y, 'add') +server.serve_forever() + +2. Install an instance: + +class MyFuncs: + def __init__(self): + # make all of the sys functions available through sys.func_name + import sys + self.sys = sys + def _listMethods(self): + # implement this method so that system.listMethods + # knows to advertise the sys methods + return list_public_methods(self) + \ + ['sys.' + method for method in list_public_methods(self.sys)] + def pow(self, x, y): return pow(x, y) + def add(self, x, y) : return x + y + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(MyFuncs()) +server.serve_forever() + +3. Install an instance with custom dispatch method: + +class Math: + def _listMethods(self): + # this method must be present for system.listMethods + # to work + return ['add', 'pow'] + def _methodHelp(self, method): + # this method must be present for system.methodHelp + # to work + if method == 'add': + return "add(2,3) => 5" + elif method == 'pow': + return "pow(x, y[, z]) => number" + else: + # By convention, return empty + # string if no help is available + return "" + def _dispatch(self, method, params): + if method == 'pow': + return pow(*params) + elif method == 'add': + return params[0] + params[1] + else: + raise ValueError('bad method') + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(Math()) +server.serve_forever() + +4. Subclass SimpleXMLRPCServer: + +class MathServer(SimpleXMLRPCServer): + def _dispatch(self, method, params): + try: + # We are forcing the 'export_' prefix on methods that are + # callable through XML-RPC to prevent potential security + # problems + func = getattr(self, 'export_' + method) + except AttributeError: + raise Exception('method "%s" is not supported' % method) + else: + return func(*params) + + def export_add(self, x, y): + return x + y + +server = MathServer(("localhost", 8000)) +server.serve_forever() + +5. CGI script: + +server = CGIXMLRPCRequestHandler() +server.register_function(pow) +server.handle_request() +""" + +from __future__ import absolute_import, division, print_function, unicode_literals +from future.builtins import int, str + +# Written by Brian Quinlan (brian@sweetapp.com). +# Based on code written by Fredrik Lundh. + +from future.backports.xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode +from future.backports.http.server import BaseHTTPRequestHandler +import future.backports.http.server as http_server +from future.backports import socketserver +import sys +import os +import re +import pydoc +import inspect +import traceback +try: + import fcntl +except ImportError: + fcntl = None + +def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): + """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d + + Resolves a dotted attribute name to an object. Raises + an AttributeError if any attribute in the chain starts with a '_'. + + If the optional allow_dotted_names argument is false, dots are not + supported and this function operates similar to getattr(obj, attr). + """ + + if allow_dotted_names: + attrs = attr.split('.') + else: + attrs = [attr] + + for i in attrs: + if i.startswith('_'): + raise AttributeError( + 'attempt to access private attribute "%s"' % i + ) + else: + obj = getattr(obj,i) + return obj + +def list_public_methods(obj): + """Returns a list of attribute strings, found in the specified + object, which represent callable attributes""" + + return [member for member in dir(obj) + if not member.startswith('_') and + callable(getattr(obj, member))] + +class SimpleXMLRPCDispatcher(object): + """Mix-in class that dispatches XML-RPC requests. + + This class is used to register XML-RPC method handlers + and then to dispatch them. This class doesn't need to be + instanced directly when used by SimpleXMLRPCServer but it + can be instanced when used by the MultiPathXMLRPCServer + """ + + def __init__(self, allow_none=False, encoding=None, + use_builtin_types=False): + self.funcs = {} + self.instance = None + self.allow_none = allow_none + self.encoding = encoding or 'utf-8' + self.use_builtin_types = use_builtin_types + + def register_instance(self, instance, allow_dotted_names=False): + """Registers an instance to respond to XML-RPC requests. + + Only one instance can be installed at a time. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. Methods beginning with an '_' + are considered private and will not be called by + SimpleXMLRPCServer. + + If a registered function matches a XML-RPC request, then it + will be called instead of the registered instance. + + If the optional allow_dotted_names argument is true and the + instance does not have a _dispatch method, method names + containing dots are supported and resolved, as long as none of + the name segments start with an '_'. + + *** SECURITY WARNING: *** + + Enabling the allow_dotted_names options allows intruders + to access your module's global variables and may allow + intruders to execute arbitrary code on your machine. Only + use this option on a secure, closed network. + + """ + + self.instance = instance + self.allow_dotted_names = allow_dotted_names + + def register_function(self, function, name=None): + """Registers a function to respond to XML-RPC requests. + + The optional name argument can be used to set a Unicode name + for the function. + """ + + if name is None: + name = function.__name__ + self.funcs[name] = function + + def register_introspection_functions(self): + """Registers the XML-RPC introspection methods in the system + namespace. + + see http://xmlrpc.usefulinc.com/doc/reserved.html + """ + + self.funcs.update({'system.listMethods' : self.system_listMethods, + 'system.methodSignature' : self.system_methodSignature, + 'system.methodHelp' : self.system_methodHelp}) + + def register_multicall_functions(self): + """Registers the XML-RPC multicall method in the system + namespace. + + see http://www.xmlrpc.com/discuss/msgReader$1208""" + + self.funcs.update({'system.multicall' : self.system_multicall}) + + def _marshaled_dispatch(self, data, dispatch_method = None, path = None): + """Dispatches an XML-RPC method from marshalled (XML) data. + + XML-RPC methods are dispatched from the marshalled (XML) data + using the _dispatch method and the result is returned as + marshalled data. For backwards compatibility, a dispatch + function can be provided as an argument (see comment in + SimpleXMLRPCRequestHandler.do_POST) but overriding the + existing method through subclassing is the preferred means + of changing method dispatch behavior. + """ + + try: + params, method = loads(data, use_builtin_types=self.use_builtin_types) + + # generate response + if dispatch_method is not None: + response = dispatch_method(method, params) + else: + response = self._dispatch(method, params) + # wrap response in a singleton tuple + response = (response,) + response = dumps(response, methodresponse=1, + allow_none=self.allow_none, encoding=self.encoding) + except Fault as fault: + response = dumps(fault, allow_none=self.allow_none, + encoding=self.encoding) + except: + # report exception back to server + exc_type, exc_value, exc_tb = sys.exc_info() + response = dumps( + Fault(1, "%s:%s" % (exc_type, exc_value)), + encoding=self.encoding, allow_none=self.allow_none, + ) + + return response.encode(self.encoding) + + def system_listMethods(self): + """system.listMethods() => ['add', 'subtract', 'multiple'] + + Returns a list of the methods supported by the server.""" + + methods = set(self.funcs.keys()) + if self.instance is not None: + # Instance can implement _listMethod to return a list of + # methods + if hasattr(self.instance, '_listMethods'): + methods |= set(self.instance._listMethods()) + # if the instance has a _dispatch method then we + # don't have enough information to provide a list + # of methods + elif not hasattr(self.instance, '_dispatch'): + methods |= set(list_public_methods(self.instance)) + return sorted(methods) + + def system_methodSignature(self, method_name): + """system.methodSignature('add') => [double, int, int] + + Returns a list describing the signature of the method. In the + above example, the add method takes two integers as arguments + and returns a double result. + + This server does NOT support system.methodSignature.""" + + # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html + + return 'signatures not supported' + + def system_methodHelp(self, method_name): + """system.methodHelp('add') => "Adds two integers together" + + Returns a string containing documentation for the specified method.""" + + method = None + if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + # Instance can implement _methodHelp to return help for a method + if hasattr(self.instance, '_methodHelp'): + return self.instance._methodHelp(method_name) + # if the instance has a _dispatch method then we + # don't have enough information to provide help + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name, + self.allow_dotted_names + ) + except AttributeError: + pass + + # Note that we aren't checking that the method actually + # be a callable object of some kind + if method is None: + return "" + else: + return pydoc.getdoc(method) + + def system_multicall(self, call_list): + """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \ +[[4], ...] + + Allows the caller to package multiple XML-RPC calls into a single + request. + + See http://www.xmlrpc.com/discuss/msgReader$1208 + """ + + results = [] + for call in call_list: + method_name = call['methodName'] + params = call['params'] + + try: + # XXX A marshalling error in any response will fail the entire + # multicall. If someone cares they should fix this. + results.append([self._dispatch(method_name, params)]) + except Fault as fault: + results.append( + {'faultCode' : fault.faultCode, + 'faultString' : fault.faultString} + ) + except: + exc_type, exc_value, exc_tb = sys.exc_info() + results.append( + {'faultCode' : 1, + 'faultString' : "%s:%s" % (exc_type, exc_value)} + ) + return results + + def _dispatch(self, method, params): + """Dispatches the XML-RPC method. + + XML-RPC calls are forwarded to a registered function that + matches the called XML-RPC method name. If no such function + exists then the call is forwarded to the registered instance, + if available. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. + + Methods beginning with an '_' are considered private and will + not be called. + """ + + func = None + try: + # check to see if a matching function has been registered + func = self.funcs[method] + except KeyError: + if self.instance is not None: + # check for a _dispatch method + if hasattr(self.instance, '_dispatch'): + return self.instance._dispatch(method, params) + else: + # call instance method directly + try: + func = resolve_dotted_attribute( + self.instance, + method, + self.allow_dotted_names + ) + except AttributeError: + pass + + if func is not None: + return func(*params) + else: + raise Exception('method "%s" is not supported' % method) + +class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler): + """Simple XML-RPC request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + """ + + # Class attribute listing the accessible path components; + # paths not on this list will result in a 404 error. + rpc_paths = ('/', '/RPC2') + + #if not None, encode responses larger than this, if possible + encode_threshold = 1400 #a common MTU + + #Override form StreamRequestHandler: full buffering of output + #and no Nagle. + wbufsize = -1 + disable_nagle_algorithm = True + + # a re to match a gzip Accept-Encoding + aepattern = re.compile(r""" + \s* ([^\s;]+) \s* #content-coding + (;\s* q \s*=\s* ([0-9\.]+))? #q + """, re.VERBOSE | re.IGNORECASE) + + def accept_encodings(self): + r = {} + ae = self.headers.get("Accept-Encoding", "") + for e in ae.split(","): + match = self.aepattern.match(e) + if match: + v = match.group(3) + v = float(v) if v else 1.0 + r[match.group(1)] = v + return r + + def is_rpc_path_valid(self): + if self.rpc_paths: + return self.path in self.rpc_paths + else: + # If .rpc_paths is empty, just assume all paths are legal + return True + + def do_POST(self): + """Handles the HTTP POST request. + + Attempts to interpret all HTTP POST requests as XML-RPC calls, + which are forwarded to the server's _dispatch method for handling. + """ + + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + try: + # Get arguments by reading body of request. + # We read this in chunks to avoid straining + # socket.read(); around the 10 or 15Mb mark, some platforms + # begin to have problems (bug #792570). + max_chunk_size = 10*1024*1024 + size_remaining = int(self.headers["content-length"]) + L = [] + while size_remaining: + chunk_size = min(size_remaining, max_chunk_size) + chunk = self.rfile.read(chunk_size) + if not chunk: + break + L.append(chunk) + size_remaining -= len(L[-1]) + data = b''.join(L) + + data = self.decode_request_content(data) + if data is None: + return #response has been sent + + # In previous versions of SimpleXMLRPCServer, _dispatch + # could be overridden in this class, instead of in + # SimpleXMLRPCDispatcher. To maintain backwards compatibility, + # check to see if a subclass implements _dispatch and dispatch + # using that method if present. + response = self.server._marshaled_dispatch( + data, getattr(self, '_dispatch', None), self.path + ) + except Exception as e: # This should only happen if the module is buggy + # internal error, report as HTTP server error + self.send_response(500) + + # Send information about the exception if requested + if hasattr(self.server, '_send_traceback_header') and \ + self.server._send_traceback_header: + self.send_header("X-exception", str(e)) + trace = traceback.format_exc() + trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII') + self.send_header("X-traceback", trace) + + self.send_header("Content-length", "0") + self.end_headers() + else: + self.send_response(200) + self.send_header("Content-type", "text/xml") + if self.encode_threshold is not None: + if len(response) > self.encode_threshold: + q = self.accept_encodings().get("gzip", 0) + if q: + try: + response = gzip_encode(response) + self.send_header("Content-Encoding", "gzip") + except NotImplementedError: + pass + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + def decode_request_content(self, data): + #support gzip encoding of request + encoding = self.headers.get("content-encoding", "identity").lower() + if encoding == "identity": + return data + if encoding == "gzip": + try: + return gzip_decode(data) + except NotImplementedError: + self.send_response(501, "encoding %r not supported" % encoding) + except ValueError: + self.send_response(400, "error decoding gzip content") + else: + self.send_response(501, "encoding %r not supported" % encoding) + self.send_header("Content-length", "0") + self.end_headers() + + def report_404 (self): + # Report a 404 error + self.send_response(404) + response = b'No such page' + self.send_header("Content-type", "text/plain") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + def log_request(self, code='-', size='-'): + """Selectively log an accepted request.""" + + if self.server.logRequests: + BaseHTTPRequestHandler.log_request(self, code, size) + +class SimpleXMLRPCServer(socketserver.TCPServer, + SimpleXMLRPCDispatcher): + """Simple XML-RPC server. + + Simple XML-RPC server that allows functions and a single instance + to be installed to handle requests. The default implementation + attempts to dispatch XML-RPC calls to the functions or instance + installed in the server. Override the _dispatch method inherited + from SimpleXMLRPCDispatcher to change this behavior. + """ + + allow_reuse_address = True + + # Warning: this is for debugging purposes only! Never set this to True in + # production code, as will be sending out sensitive information (exception + # and stack trace details) when exceptions are raised inside + # SimpleXMLRPCRequestHandler.do_POST + _send_traceback_header = False + + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + self.logRequests = logRequests + + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) + socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate) + + # [Bug #1222790] If possible, set close-on-exec flag; if a + # method spawns a subprocess, the subprocess shouldn't have + # the listening socket open. + if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): + flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) + +class MultiPathXMLRPCServer(SimpleXMLRPCServer): + """Multipath XML-RPC Server + This specialization of SimpleXMLRPCServer allows the user to create + multiple Dispatcher instances and assign them to different + HTTP request paths. This makes it possible to run two or more + 'virtual XML-RPC servers' at the same port. + Make sure that the requestHandler accepts the paths in question. + """ + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + + SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none, + encoding, bind_and_activate, use_builtin_types) + self.dispatchers = {} + self.allow_none = allow_none + self.encoding = encoding or 'utf-8' + + def add_dispatcher(self, path, dispatcher): + self.dispatchers[path] = dispatcher + return dispatcher + + def get_dispatcher(self, path): + return self.dispatchers[path] + + def _marshaled_dispatch(self, data, dispatch_method = None, path = None): + try: + response = self.dispatchers[path]._marshaled_dispatch( + data, dispatch_method, path) + except: + # report low level exception back to server + # (each dispatcher should have handled their own + # exceptions) + exc_type, exc_value = sys.exc_info()[:2] + response = dumps( + Fault(1, "%s:%s" % (exc_type, exc_value)), + encoding=self.encoding, allow_none=self.allow_none) + response = response.encode(self.encoding) + return response + +class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + """Simple handler for XML-RPC data passed through CGI.""" + + def __init__(self, allow_none=False, encoding=None, use_builtin_types=False): + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types) + + def handle_xmlrpc(self, request_text): + """Handle a single XML-RPC request""" + + response = self._marshaled_dispatch(request_text) + + print('Content-Type: text/xml') + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def handle_get(self): + """Handle a single HTTP GET request. + + Default implementation indicates an error because + XML-RPC uses the POST method. + """ + + code = 400 + message, explain = BaseHTTPRequestHandler.responses[code] + + response = http_server.DEFAULT_ERROR_MESSAGE % \ + { + 'code' : code, + 'message' : message, + 'explain' : explain + } + response = response.encode('utf-8') + print('Status: %d %s' % (code, message)) + print('Content-Type: %s' % http_server.DEFAULT_ERROR_CONTENT_TYPE) + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def handle_request(self, request_text=None): + """Handle a single XML-RPC request passed through a CGI post method. + + If no XML data is given then it is read from stdin. The resulting + XML-RPC response is printed to stdout along with the correct HTTP + headers. + """ + + if request_text is None and \ + os.environ.get('REQUEST_METHOD', None) == 'GET': + self.handle_get() + else: + # POST data is normally available through stdin + try: + length = int(os.environ.get('CONTENT_LENGTH', None)) + except (ValueError, TypeError): + length = -1 + if request_text is None: + request_text = sys.stdin.read(length) + + self.handle_xmlrpc(request_text) + + +# ----------------------------------------------------------------------------- +# Self documenting XML-RPC Server. + +class ServerHTMLDoc(pydoc.HTMLDoc): + """Class used to generate pydoc HTML document for a server""" + + def markup(self, text, escape=None, funcs={}, classes={}, methods={}): + """Mark up some plain text, given a context of symbols to look for. + Each context dictionary maps object names to anchor names.""" + escape = escape or self.escape + results = [] + here = 0 + + # XXX Note that this regular expression does not allow for the + # hyperlinking of arbitrary strings being used as method + # names. Only methods with names consisting of word characters + # and '.'s are hyperlinked. + pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|' + r'RFC[- ]?(\d+)|' + r'PEP[- ]?(\d+)|' + r'(self\.)?((?:\w|\.)+))\b') + while 1: + match = pattern.search(text, here) + if not match: break + start, end = match.span() + results.append(escape(text[here:start])) + + all, scheme, rfc, pep, selfdot, name = match.groups() + if scheme: + url = escape(all).replace('"', '"') + results.append('%s' % (url, url)) + elif rfc: + url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) + results.append('%s' % (url, escape(all))) + elif pep: + url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) + results.append('%s' % (url, escape(all))) + elif text[end:end+1] == '(': + results.append(self.namelink(name, methods, funcs, classes)) + elif selfdot: + results.append('self.%s' % name) + else: + results.append(self.namelink(name, classes)) + here = end + results.append(escape(text[here:])) + return ''.join(results) + + def docroutine(self, object, name, mod=None, + funcs={}, classes={}, methods={}, cl=None): + """Produce HTML documentation for a function or method object.""" + + anchor = (cl and cl.__name__ or '') + '-' + name + note = '' + + title = '%s' % ( + self.escape(anchor), self.escape(name)) + + if inspect.ismethod(object): + args = inspect.getfullargspec(object) + # exclude the argument bound to the instance, it will be + # confusing to the non-Python user + argspec = inspect.formatargspec ( + args.args[1:], + args.varargs, + args.varkw, + args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue + ) + elif inspect.isfunction(object): + args = inspect.getfullargspec(object) + argspec = inspect.formatargspec( + args.args, args.varargs, args.varkw, args.defaults, + annotations=args.annotations, + formatvalue=self.formatvalue) + else: + argspec = '(...)' + + if isinstance(object, tuple): + argspec = object[0] or argspec + docstring = object[1] or "" + else: + docstring = pydoc.getdoc(object) + + decl = title + argspec + (note and self.grey( + '%s' % note)) + + doc = self.markup( + docstring, self.preformat, funcs, classes, methods) + doc = doc and '

%s
' % doc + return '
%s
%s
\n' % (decl, doc) + + def docserver(self, server_name, package_documentation, methods): + """Produce HTML documentation for an XML-RPC server.""" + + fdict = {} + for key, value in methods.items(): + fdict[key] = '#-' + key + fdict[value] = fdict[key] + + server_name = self.escape(server_name) + head = '%s' % server_name + result = self.heading(head, '#ffffff', '#7799ee') + + doc = self.markup(package_documentation, self.preformat, fdict) + doc = doc and '%s' % doc + result = result + '

%s

\n' % doc + + contents = [] + method_items = sorted(methods.items()) + for key, value in method_items: + contents.append(self.docroutine(value, key, funcs=fdict)) + result = result + self.bigsection( + 'Methods', '#ffffff', '#eeaa77', ''.join(contents)) + + return result + +class XMLRPCDocGenerator(object): + """Generates documentation for an XML-RPC server. + + This class is designed as mix-in and should not + be constructed directly. + """ + + def __init__(self): + # setup variables used for HTML documentation + self.server_name = 'XML-RPC Server Documentation' + self.server_documentation = \ + "This server exports the following methods through the XML-RPC "\ + "protocol." + self.server_title = 'XML-RPC Server Documentation' + + def set_server_title(self, server_title): + """Set the HTML title of the generated server documentation""" + + self.server_title = server_title + + def set_server_name(self, server_name): + """Set the name of the generated HTML server documentation""" + + self.server_name = server_name + + def set_server_documentation(self, server_documentation): + """Set the documentation string for the entire server.""" + + self.server_documentation = server_documentation + + def generate_html_documentation(self): + """generate_html_documentation() => html documentation for the server + + Generates HTML documentation for the server using introspection for + installed functions and instances that do not implement the + _dispatch method. Alternatively, instances can choose to implement + the _get_method_argstring(method_name) method to provide the + argument string used in the documentation and the + _methodHelp(method_name) method to provide the help text used + in the documentation.""" + + methods = {} + + for method_name in self.system_listMethods(): + if method_name in self.funcs: + method = self.funcs[method_name] + elif self.instance is not None: + method_info = [None, None] # argspec, documentation + if hasattr(self.instance, '_get_method_argstring'): + method_info[0] = self.instance._get_method_argstring(method_name) + if hasattr(self.instance, '_methodHelp'): + method_info[1] = self.instance._methodHelp(method_name) + + method_info = tuple(method_info) + if method_info != (None, None): + method = method_info + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name + ) + except AttributeError: + method = method_info + else: + method = method_info + else: + assert 0, "Could not find method in self.functions and no "\ + "instance installed" + + methods[method_name] = method + + documenter = ServerHTMLDoc() + documentation = documenter.docserver( + self.server_name, + self.server_documentation, + methods + ) + + return documenter.page(self.server_title, documentation) + +class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): + """XML-RPC and documentation request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + + Handles all HTTP GET requests and interprets them as requests + for documentation. + """ + + def do_GET(self): + """Handles the HTTP GET request. + + Interpret all HTTP GET requests as requests for server + documentation. + """ + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + response = self.server.generate_html_documentation().encode('utf-8') + self.send_response(200) + self.send_header("Content-type", "text/html") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + +class DocXMLRPCServer( SimpleXMLRPCServer, + XMLRPCDocGenerator): + """XML-RPC and HTML documentation server. + + Adds the ability to serve server documentation to the capabilities + of SimpleXMLRPCServer. + """ + + def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None, + bind_and_activate=True, use_builtin_types=False): + SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, + allow_none, encoding, bind_and_activate, + use_builtin_types) + XMLRPCDocGenerator.__init__(self) + +class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler, + XMLRPCDocGenerator): + """Handler for XML-RPC data and documentation requests passed through + CGI""" + + def handle_get(self): + """Handles the HTTP GET request. + + Interpret all HTTP GET requests as requests for server + documentation. + """ + + response = self.generate_html_documentation().encode('utf-8') + + print('Content-Type: text/html') + print('Content-Length: %d' % len(response)) + print() + sys.stdout.flush() + sys.stdout.buffer.write(response) + sys.stdout.buffer.flush() + + def __init__(self): + CGIXMLRPCRequestHandler.__init__(self) + XMLRPCDocGenerator.__init__(self) + + +if __name__ == '__main__': + import datetime + + class ExampleService: + def getData(self): + return '42' + + class currentTime: + @staticmethod + def getCurrentTime(): + return datetime.datetime.now() + + server = SimpleXMLRPCServer(("localhost", 8000)) + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_instance(ExampleService(), allow_dotted_names=True) + server.register_multicall_functions() + print('Serving XML-RPC on localhost port 8000') + print('It is advisable to run this example server within a secure, closed network.') + try: + server.serve_forever() + except KeyboardInterrupt: + print("\nKeyboard interrupt received, exiting.") + server.server_close() + sys.exit(0) diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__init__.py b/minor_project/lib/python3.6/site-packages/future/builtins/__init__.py new file mode 100644 index 0000000..8bc1649 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/__init__.py @@ -0,0 +1,51 @@ +""" +A module that brings in equivalents of the new and modified Python 3 +builtins into Py2. Has no effect on Py3. + +See the docs `here `_ +(``docs/what-else.rst``) for more information. + +""" + +from future.builtins.iterators import (filter, map, zip) +# The isinstance import is no longer needed. We provide it only for +# backward-compatibility with future v0.8.2. It will be removed in future v1.0. +from future.builtins.misc import (ascii, chr, hex, input, isinstance, next, + oct, open, pow, round, super, max, min) +from future.utils import PY3 + +if PY3: + import builtins + bytes = builtins.bytes + dict = builtins.dict + int = builtins.int + list = builtins.list + object = builtins.object + range = builtins.range + str = builtins.str + __all__ = [] +else: + from future.types import (newbytes as bytes, + newdict as dict, + newint as int, + newlist as list, + newobject as object, + newrange as range, + newstr as str) +from future import utils + + +if not utils.PY3: + # We only import names that shadow the builtins on Py2. No other namespace + # pollution on Py2. + + # Only shadow builtins on Py2; no new names + __all__ = ['filter', 'map', 'zip', + 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow', + 'round', 'super', + 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', 'max', 'min' + ] + +else: + # No namespace pollution on Py3 + __all__ = [] diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..6aa63d5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/disabled.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/disabled.cpython-36.pyc new file mode 100644 index 0000000..3fc61af Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/disabled.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/iterators.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/iterators.cpython-36.pyc new file mode 100644 index 0000000..251d234 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/iterators.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/misc.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/misc.cpython-36.pyc new file mode 100644 index 0000000..2e7ed46 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/misc.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/new_min_max.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/new_min_max.cpython-36.pyc new file mode 100644 index 0000000..5747139 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/new_min_max.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newnext.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newnext.cpython-36.pyc new file mode 100644 index 0000000..f1caa44 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newnext.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newround.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newround.cpython-36.pyc new file mode 100644 index 0000000..002c0cf Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newround.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newsuper.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newsuper.cpython-36.pyc new file mode 100644 index 0000000..aba6eb0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/builtins/__pycache__/newsuper.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/disabled.py b/minor_project/lib/python3.6/site-packages/future/builtins/disabled.py new file mode 100644 index 0000000..f6d6ea9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/disabled.py @@ -0,0 +1,66 @@ +""" +This disables builtin functions (and one exception class) which are +removed from Python 3.3. + +This module is designed to be used like this:: + + from future.builtins.disabled import * + +This disables the following obsolete Py2 builtin functions:: + + apply, cmp, coerce, execfile, file, input, long, + raw_input, reduce, reload, unicode, xrange + +We don't hack __builtin__, which is very fragile because it contaminates +imported modules too. Instead, we just create new functions with +the same names as the obsolete builtins from Python 2 which raise +NameError exceptions when called. + +Note that both ``input()`` and ``raw_input()`` are among the disabled +functions (in this module). Although ``input()`` exists as a builtin in +Python 3, the Python 2 ``input()`` builtin is unsafe to use because it +can lead to shell injection. Therefore we shadow it by default upon ``from +future.builtins.disabled import *``, in case someone forgets to import our +replacement ``input()`` somehow and expects Python 3 semantics. + +See the ``future.builtins.misc`` module for a working version of +``input`` with Python 3 semantics. + +(Note that callable() is not among the functions disabled; this was +reintroduced into Python 3.2.) + +This exception class is also disabled: + + StandardError + +""" + +from __future__ import division, absolute_import, print_function + +from future import utils + + +OBSOLETE_BUILTINS = ['apply', 'chr', 'cmp', 'coerce', 'execfile', 'file', + 'input', 'long', 'raw_input', 'reduce', 'reload', + 'unicode', 'xrange', 'StandardError'] + + +def disabled_function(name): + ''' + Returns a function that cannot be called + ''' + def disabled(*args, **kwargs): + ''' + A function disabled by the ``future`` module. This function is + no longer a builtin in Python 3. + ''' + raise NameError('obsolete Python 2 builtin {0} is disabled'.format(name)) + return disabled + + +if not utils.PY3: + for fname in OBSOLETE_BUILTINS: + locals()[fname] = disabled_function(fname) + __all__ = OBSOLETE_BUILTINS +else: + __all__ = [] diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/iterators.py b/minor_project/lib/python3.6/site-packages/future/builtins/iterators.py new file mode 100644 index 0000000..dff651e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/iterators.py @@ -0,0 +1,52 @@ +""" +This module is designed to be used as follows:: + + from future.builtins.iterators import * + +And then, for example:: + + for i in range(10**15): + pass + + for (a, b) in zip(range(10**15), range(-10**15, 0)): + pass + +Note that this is standard Python 3 code, plus some imports that do +nothing on Python 3. + +The iterators this brings in are:: + +- ``range`` +- ``filter`` +- ``map`` +- ``zip`` + +On Python 2, ``range`` is a pure-Python backport of Python 3's ``range`` +iterator with slicing support. The other iterators (``filter``, ``map``, +``zip``) are from the ``itertools`` module on Python 2. On Python 3 these +are available in the module namespace but not exported for * imports via +__all__ (zero no namespace pollution). + +Note that these are also available in the standard library +``future_builtins`` module on Python 2 -- but not Python 3, so using +the standard library version is not portable, nor anywhere near complete. +""" + +from __future__ import division, absolute_import, print_function + +import itertools +from future import utils + +if not utils.PY3: + filter = itertools.ifilter + map = itertools.imap + from future.types import newrange as range + zip = itertools.izip + __all__ = ['filter', 'map', 'range', 'zip'] +else: + import builtins + filter = builtins.filter + map = builtins.map + range = builtins.range + zip = builtins.zip + __all__ = [] diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/misc.py b/minor_project/lib/python3.6/site-packages/future/builtins/misc.py new file mode 100644 index 0000000..f86ce5f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/misc.py @@ -0,0 +1,135 @@ +""" +A module that brings in equivalents of various modified Python 3 builtins +into Py2. Has no effect on Py3. + +The builtin functions are: + +- ``ascii`` (from Py2's future_builtins module) +- ``hex`` (from Py2's future_builtins module) +- ``oct`` (from Py2's future_builtins module) +- ``chr`` (equivalent to ``unichr`` on Py2) +- ``input`` (equivalent to ``raw_input`` on Py2) +- ``next`` (calls ``__next__`` if it exists, else ``next`` method) +- ``open`` (equivalent to io.open on Py2) +- ``super`` (backport of Py3's magic zero-argument super() function +- ``round`` (new "Banker's Rounding" behaviour from Py3) +- ``max`` (new default option from Py3.4) +- ``min`` (new default option from Py3.4) + +``isinstance`` is also currently exported for backwards compatibility +with v0.8.2, although this has been deprecated since v0.9. + + +input() +------- +Like the new ``input()`` function from Python 3 (without eval()), except +that it returns bytes. Equivalent to Python 2's ``raw_input()``. + +Warning: By default, importing this module *removes* the old Python 2 +input() function entirely from ``__builtin__`` for safety. This is +because forgetting to import the new ``input`` from ``future`` might +otherwise lead to a security vulnerability (shell injection) on Python 2. + +To restore it, you can retrieve it yourself from +``__builtin__._old_input``. + +Fortunately, ``input()`` seems to be seldom used in the wild in Python +2... + +""" + +from future import utils + + +if utils.PY2: + from io import open + from future_builtins import ascii, oct, hex + from __builtin__ import unichr as chr, pow as _builtin_pow + import __builtin__ + + # Only for backward compatibility with future v0.8.2: + isinstance = __builtin__.isinstance + + # Warning: Python 2's input() is unsafe and MUST not be able to be used + # accidentally by someone who expects Python 3 semantics but forgets + # to import it on Python 2. Versions of ``future`` prior to 0.11 + # deleted it from __builtin__. Now we keep in __builtin__ but shadow + # the name like all others. Just be sure to import ``input``. + + input = raw_input + + from future.builtins.newnext import newnext as next + from future.builtins.newround import newround as round + from future.builtins.newsuper import newsuper as super + from future.builtins.new_min_max import newmax as max + from future.builtins.new_min_max import newmin as min + from future.types.newint import newint + + _SENTINEL = object() + + def pow(x, y, z=_SENTINEL): + """ + pow(x, y[, z]) -> number + + With two arguments, equivalent to x**y. With three arguments, + equivalent to (x**y) % z, but may be more efficient (e.g. for ints). + """ + # Handle newints + if isinstance(x, newint): + x = long(x) + if isinstance(y, newint): + y = long(y) + if isinstance(z, newint): + z = long(z) + + try: + if z == _SENTINEL: + return _builtin_pow(x, y) + else: + return _builtin_pow(x, y, z) + except ValueError: + if z == _SENTINEL: + return _builtin_pow(x+0j, y) + else: + return _builtin_pow(x+0j, y, z) + + + # ``future`` doesn't support Py3.0/3.1. If we ever did, we'd add this: + # callable = __builtin__.callable + + __all__ = ['ascii', 'chr', 'hex', 'input', 'isinstance', 'next', 'oct', + 'open', 'pow', 'round', 'super', 'max', 'min'] + +else: + import builtins + ascii = builtins.ascii + chr = builtins.chr + hex = builtins.hex + input = builtins.input + next = builtins.next + # Only for backward compatibility with future v0.8.2: + isinstance = builtins.isinstance + oct = builtins.oct + open = builtins.open + pow = builtins.pow + round = builtins.round + super = builtins.super + if utils.PY34_PLUS: + max = builtins.max + min = builtins.min + __all__ = [] + else: + from future.builtins.new_min_max import newmax as max + from future.builtins.new_min_max import newmin as min + __all__ = ['min', 'max'] + + # The callable() function was removed from Py3.0 and 3.1 and + # reintroduced into Py3.2+. ``future`` doesn't support Py3.0/3.1. If we ever + # did, we'd add this: + # try: + # callable = builtins.callable + # except AttributeError: + # # Definition from Pandas + # def callable(obj): + # return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + # __all__.append('callable') diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/new_min_max.py b/minor_project/lib/python3.6/site-packages/future/builtins/new_min_max.py new file mode 100644 index 0000000..6f0c2a8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/new_min_max.py @@ -0,0 +1,59 @@ +import itertools + +from future import utils +if utils.PY2: + from __builtin__ import max as _builtin_max, min as _builtin_min +else: + from builtins import max as _builtin_max, min as _builtin_min + +_SENTINEL = object() + + +def newmin(*args, **kwargs): + return new_min_max(_builtin_min, *args, **kwargs) + + +def newmax(*args, **kwargs): + return new_min_max(_builtin_max, *args, **kwargs) + + +def new_min_max(_builtin_func, *args, **kwargs): + """ + To support the argument "default" introduced in python 3.4 for min and max + :param _builtin_func: builtin min or builtin max + :param args: + :param kwargs: + :return: returns the min or max based on the arguments passed + """ + + for key, _ in kwargs.items(): + if key not in set(['key', 'default']): + raise TypeError('Illegal argument %s', key) + + if len(args) == 0: + raise TypeError + + if len(args) != 1 and kwargs.get('default', _SENTINEL) is not _SENTINEL: + raise TypeError + + if len(args) == 1: + iterator = iter(args[0]) + try: + first = next(iterator) + except StopIteration: + if kwargs.get('default', _SENTINEL) is not _SENTINEL: + return kwargs.get('default') + else: + raise ValueError('{}() arg is an empty sequence'.format(_builtin_func.__name__)) + else: + iterator = itertools.chain([first], iterator) + if kwargs.get('key') is not None: + return _builtin_func(iterator, key=kwargs.get('key')) + else: + return _builtin_func(iterator) + + if len(args) > 1: + if kwargs.get('key') is not None: + return _builtin_func(args, key=kwargs.get('key')) + else: + return _builtin_func(args) diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/newnext.py b/minor_project/lib/python3.6/site-packages/future/builtins/newnext.py new file mode 100644 index 0000000..097638a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/newnext.py @@ -0,0 +1,70 @@ +''' +This module provides a newnext() function in Python 2 that mimics the +behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for +compatibility if this fails. + +``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this +doesn't exist, it falls back to calling a ``next()`` method. + +For example: + + >>> class Odds(object): + ... def __init__(self, start=1): + ... self.value = start - 2 + ... def __next__(self): # note the Py3 interface + ... self.value += 2 + ... return self.value + ... def __iter__(self): + ... return self + ... + >>> iterator = Odds() + >>> next(iterator) + 1 + >>> next(iterator) + 3 + +If you are defining your own custom iterator class as above, it is preferable +to explicitly decorate the class with the @implements_iterator decorator from +``future.utils`` as follows: + + >>> @implements_iterator + ... class Odds(object): + ... # etc + ... pass + +This next() function is primarily for consuming iterators defined in Python 3 +code elsewhere that we would like to run on Python 2 or 3. +''' + +_builtin_next = next + +_SENTINEL = object() + +def newnext(iterator, default=_SENTINEL): + """ + next(iterator[, default]) + + Return the next item from the iterator. If default is given and the iterator + is exhausted, it is returned instead of raising StopIteration. + """ + + # args = [] + # if default is not _SENTINEL: + # args.append(default) + try: + try: + return iterator.__next__() + except AttributeError: + try: + return iterator.next() + except AttributeError: + raise TypeError("'{0}' object is not an iterator".format( + iterator.__class__.__name__)) + except StopIteration as e: + if default is _SENTINEL: + raise e + else: + return default + + +__all__ = ['newnext'] diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/newround.py b/minor_project/lib/python3.6/site-packages/future/builtins/newround.py new file mode 100644 index 0000000..394a2c6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/newround.py @@ -0,0 +1,102 @@ +""" +``python-future``: pure Python implementation of Python 3 round(). +""" + +from future.utils import PYPY, PY26, bind_method + +# Use the decimal module for simplicity of implementation (and +# hopefully correctness). +from decimal import Decimal, ROUND_HALF_EVEN + + +def newround(number, ndigits=None): + """ + See Python 3 documentation: uses Banker's Rounding. + + Delegates to the __round__ method if for some reason this exists. + + If not, rounds a number to a given precision in decimal digits (default + 0 digits). This returns an int when called with one argument, + otherwise the same type as the number. ndigits may be negative. + + See the test_round method in future/tests/test_builtins.py for + examples. + """ + return_int = False + if ndigits is None: + return_int = True + ndigits = 0 + if hasattr(number, '__round__'): + return number.__round__(ndigits) + + if ndigits < 0: + raise NotImplementedError('negative ndigits not supported yet') + exponent = Decimal('10') ** (-ndigits) + + if PYPY: + # Work around issue #24: round() breaks on PyPy with NumPy's types + if 'numpy' in repr(type(number)): + number = float(number) + + if isinstance(number, Decimal): + d = number + else: + if not PY26: + d = Decimal.from_float(number).quantize(exponent, + rounding=ROUND_HALF_EVEN) + else: + d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN) + + if return_int: + return int(d) + else: + return float(d) + + +### From Python 2.7's decimal.py. Only needed to support Py2.6: + +def from_float_26(f): + """Converts a float to a decimal number, exactly. + + Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). + Since 0.1 is not exactly representable in binary floating point, the + value is stored as the nearest representable value which is + 0x1.999999999999ap-4. The exact equivalent of the value in decimal + is 0.1000000000000000055511151231257827021181583404541015625. + + >>> Decimal.from_float(0.1) + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_float(float('nan')) + Decimal('NaN') + >>> Decimal.from_float(float('inf')) + Decimal('Infinity') + >>> Decimal.from_float(-float('inf')) + Decimal('-Infinity') + >>> Decimal.from_float(-0.0) + Decimal('-0') + + """ + import math as _math + from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) + + if isinstance(f, (int, long)): # handle integer inputs + return Decimal(f) + if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float + return Decimal(repr(f)) + if _math.copysign(1.0, f) == 1.0: + sign = 0 + else: + sign = 1 + n, d = abs(f).as_integer_ratio() + # int.bit_length() method doesn't exist on Py2.6: + def bit_length(d): + if d != 0: + return len(bin(abs(d))) - 2 + else: + return 0 + k = bit_length(d) - 1 + result = _dec_from_triple(sign, str(n*5**k), -k) + return result + + +__all__ = ['newround'] diff --git a/minor_project/lib/python3.6/site-packages/future/builtins/newsuper.py b/minor_project/lib/python3.6/site-packages/future/builtins/newsuper.py new file mode 100644 index 0000000..5d3402b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/builtins/newsuper.py @@ -0,0 +1,114 @@ +''' +This module provides a newsuper() function in Python 2 that mimics the +behaviour of super() in Python 3. It is designed to be used as follows: + + from __future__ import division, absolute_import, print_function + from future.builtins import super + +And then, for example: + + class VerboseList(list): + def append(self, item): + print('Adding an item') + super().append(item) # new simpler super() function + +Importing this module on Python 3 has no effect. + +This is based on (i.e. almost identical to) Ryan Kelly's magicsuper +module here: + + https://github.com/rfk/magicsuper.git + +Excerpts from Ryan's docstring: + + "Of course, you can still explicitly pass in the arguments if you want + to do something strange. Sometimes you really do want that, e.g. to + skip over some classes in the method resolution order. + + "How does it work? By inspecting the calling frame to determine the + function object being executed and the object on which it's being + called, and then walking the object's __mro__ chain to find out where + that function was defined. Yuck, but it seems to work..." +''' + +from __future__ import absolute_import +import sys +from types import FunctionType + +from future.utils import PY3, PY26 + + +_builtin_super = super + +_SENTINEL = object() + +def newsuper(typ=_SENTINEL, type_or_obj=_SENTINEL, framedepth=1): + '''Like builtin super(), but capable of magic. + + This acts just like the builtin super() function, but if called + without any arguments it attempts to infer them at runtime. + ''' + # Infer the correct call if used without arguments. + if typ is _SENTINEL: + # We'll need to do some frame hacking. + f = sys._getframe(framedepth) + + try: + # Get the function's first positional argument. + type_or_obj = f.f_locals[f.f_code.co_varnames[0]] + except (IndexError, KeyError,): + raise RuntimeError('super() used in a function with no args') + + try: + # Get the MRO so we can crawl it. + mro = type_or_obj.__mro__ + except (AttributeError, RuntimeError): # see issue #160 + try: + mro = type_or_obj.__class__.__mro__ + except AttributeError: + raise RuntimeError('super() used with a non-newstyle class') + + # A ``for...else`` block? Yes! It's odd, but useful. + # If unfamiliar with for...else, see: + # + # http://psung.blogspot.com/2007/12/for-else-in-python.html + for typ in mro: + # Find the class that owns the currently-executing method. + for meth in typ.__dict__.values(): + # Drill down through any wrappers to the underlying func. + # This handles e.g. classmethod() and staticmethod(). + try: + while not isinstance(meth,FunctionType): + if isinstance(meth, property): + # Calling __get__ on the property will invoke + # user code which might throw exceptions or have + # side effects + meth = meth.fget + else: + try: + meth = meth.__func__ + except AttributeError: + meth = meth.__get__(type_or_obj, typ) + except (AttributeError, TypeError): + continue + if meth.func_code is f.f_code: + break # Aha! Found you. + else: + continue # Not found! Move onto the next class in MRO. + break # Found! Break out of the search loop. + else: + raise RuntimeError('super() called outside a method') + + # Dispatch to builtin super(). + if type_or_obj is not _SENTINEL: + return _builtin_super(typ, type_or_obj) + return _builtin_super(typ) + + +def superm(*args, **kwds): + f = sys._getframe(1) + nm = f.f_code.co_name + return getattr(newsuper(framedepth=2),nm)(*args, **kwds) + + +__all__ = ['newsuper'] diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/__init__.py new file mode 100644 index 0000000..0cd60d3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/__init__.py @@ -0,0 +1,8 @@ +# future.moves package +from __future__ import absolute_import +import sys +__future_module__ = True +from future.standard_library import import_top_level_modules + +if sys.version_info[0] >= 3: + import_top_level_modules() diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..b5389e8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_dummy_thread.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_dummy_thread.cpython-36.pyc new file mode 100644 index 0000000..3d69552 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_dummy_thread.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_markupbase.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_markupbase.cpython-36.pyc new file mode 100644 index 0000000..9f5f1ee Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_markupbase.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_thread.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_thread.cpython-36.pyc new file mode 100644 index 0000000..c97693a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/_thread.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/builtins.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/builtins.cpython-36.pyc new file mode 100644 index 0000000..1db81d8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/builtins.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/collections.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/collections.cpython-36.pyc new file mode 100644 index 0000000..3a8c471 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/collections.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/configparser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/configparser.cpython-36.pyc new file mode 100644 index 0000000..fc9388a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/configparser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/copyreg.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/copyreg.cpython-36.pyc new file mode 100644 index 0000000..f70dae3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/copyreg.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/itertools.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/itertools.cpython-36.pyc new file mode 100644 index 0000000..07c0d7f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/itertools.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/pickle.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/pickle.cpython-36.pyc new file mode 100644 index 0000000..4549877 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/pickle.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/queue.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/queue.cpython-36.pyc new file mode 100644 index 0000000..ee8b87d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/queue.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/reprlib.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/reprlib.cpython-36.pyc new file mode 100644 index 0000000..a97715f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/reprlib.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/socketserver.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/socketserver.cpython-36.pyc new file mode 100644 index 0000000..5885cd7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/socketserver.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/subprocess.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/subprocess.cpython-36.pyc new file mode 100644 index 0000000..d8b5c60 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/subprocess.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/sys.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/sys.cpython-36.pyc new file mode 100644 index 0000000..2d44dd5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/sys.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/winreg.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/winreg.cpython-36.pyc new file mode 100644 index 0000000..e20f607 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/__pycache__/winreg.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/_dummy_thread.py b/minor_project/lib/python3.6/site-packages/future/moves/_dummy_thread.py new file mode 100644 index 0000000..688d249 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/_dummy_thread.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _dummy_thread import * +else: + __future_module__ = True + from dummy_thread import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/_markupbase.py b/minor_project/lib/python3.6/site-packages/future/moves/_markupbase.py new file mode 100644 index 0000000..f9fb4bb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/_markupbase.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _markupbase import * +else: + __future_module__ = True + from markupbase import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/_thread.py b/minor_project/lib/python3.6/site-packages/future/moves/_thread.py new file mode 100644 index 0000000..c68018b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/_thread.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from _thread import * +else: + __future_module__ = True + from thread import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/builtins.py b/minor_project/lib/python3.6/site-packages/future/moves/builtins.py new file mode 100644 index 0000000..e4b6221 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/builtins.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from builtins import * +else: + __future_module__ = True + from __builtin__ import * + # Overwrite any old definitions with the equivalent future.builtins ones: + from future.builtins import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/collections.py b/minor_project/lib/python3.6/site-packages/future/moves/collections.py new file mode 100644 index 0000000..664ee6a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/collections.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import +import sys + +from future.utils import PY2, PY26 +__future_module__ = True + +from collections import * + +if PY2: + from UserDict import UserDict + from UserList import UserList + from UserString import UserString + +if PY26: + from future.backports.misc import OrderedDict, Counter + +if sys.version_info < (3, 3): + from future.backports.misc import ChainMap, _count_elements diff --git a/minor_project/lib/python3.6/site-packages/future/moves/configparser.py b/minor_project/lib/python3.6/site-packages/future/moves/configparser.py new file mode 100644 index 0000000..33d9cf9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/configparser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from future.utils import PY2 + +if PY2: + from ConfigParser import * +else: + from configparser import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/copyreg.py b/minor_project/lib/python3.6/site-packages/future/moves/copyreg.py new file mode 100644 index 0000000..9d08cdc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/copyreg.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + import copyreg, sys + # A "*" import uses Python 3's copyreg.__all__ which does not include + # all public names in the API surface for copyreg, this avoids that + # problem by just making our module _be_ a reference to the actual module. + sys.modules['future.moves.copyreg'] = copyreg +else: + __future_module__ = True + from copy_reg import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__init__.py new file mode 100644 index 0000000..626b406 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__init__.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from dbm import * +else: + __future_module__ = True + from whichdb import * + from anydbm import * + +# Py3.3's dbm/__init__.py imports ndbm but doesn't expose it via __all__. +# In case some (badly written) code depends on dbm.ndbm after import dbm, +# we simulate this: +if PY3: + from dbm import ndbm +else: + try: + from future.moves.dbm import ndbm + except ImportError: + ndbm = None diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..2061677 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/dumb.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/dumb.cpython-36.pyc new file mode 100644 index 0000000..fedc4be Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/dumb.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/gnu.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/gnu.cpython-36.pyc new file mode 100644 index 0000000..385ccc5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/gnu.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/ndbm.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/ndbm.cpython-36.pyc new file mode 100644 index 0000000..526e29a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/dbm/__pycache__/ndbm.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/dumb.py b/minor_project/lib/python3.6/site-packages/future/moves/dbm/dumb.py new file mode 100644 index 0000000..528383f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/dbm/dumb.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.dumb import * +else: + __future_module__ = True + from dumbdbm import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/gnu.py b/minor_project/lib/python3.6/site-packages/future/moves/dbm/gnu.py new file mode 100644 index 0000000..68ccf67 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/dbm/gnu.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.gnu import * +else: + __future_module__ = True + from gdbm import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/dbm/ndbm.py b/minor_project/lib/python3.6/site-packages/future/moves/dbm/ndbm.py new file mode 100644 index 0000000..8c6fff8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/dbm/ndbm.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from dbm.ndbm import * +else: + __future_module__ = True + from dbm import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/html/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/html/__init__.py new file mode 100644 index 0000000..22ed6e7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/html/__init__.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if PY3: + from html import * +else: + # cgi.escape isn't good enough for the single Py3.3 html test to pass. + # Define it inline here instead. From the Py3.4 stdlib. Note that the + # html.escape() function from the Py3.3 stdlib is not suitable for use on + # Py2.x. + """ + General functions for HTML manipulation. + """ + + def escape(s, quote=True): + """ + Replace special characters "&", "<" and ">" to HTML-safe sequences. + If the optional flag quote is true (the default), the quotation mark + characters, both double quote (") and single quote (') characters are also + translated. + """ + s = s.replace("&", "&") # Must be done first! + s = s.replace("<", "<") + s = s.replace(">", ">") + if quote: + s = s.replace('"', """) + s = s.replace('\'', "'") + return s + + __all__ = ['escape'] diff --git a/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..4e2967c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/entities.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/entities.cpython-36.pyc new file mode 100644 index 0000000..f319ccc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/entities.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/parser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/parser.cpython-36.pyc new file mode 100644 index 0000000..0280758 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/html/__pycache__/parser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/html/entities.py b/minor_project/lib/python3.6/site-packages/future/moves/html/entities.py new file mode 100644 index 0000000..56a8860 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/html/entities.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from html.entities import * +else: + __future_module__ = True + from htmlentitydefs import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/html/parser.py b/minor_project/lib/python3.6/site-packages/future/moves/html/parser.py new file mode 100644 index 0000000..a6115b5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/html/parser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if PY3: + from html.parser import * +else: + from HTMLParser import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/http/__init__.py new file mode 100644 index 0000000..917b3d7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/http/__init__.py @@ -0,0 +1,4 @@ +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..ea8539b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/client.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/client.cpython-36.pyc new file mode 100644 index 0000000..96b7e17 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/client.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/cookiejar.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/cookiejar.cpython-36.pyc new file mode 100644 index 0000000..a810f08 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/cookiejar.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/cookies.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/cookies.cpython-36.pyc new file mode 100644 index 0000000..7e98ff5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/cookies.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/server.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/server.cpython-36.pyc new file mode 100644 index 0000000..186befa Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/http/__pycache__/server.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/client.py b/minor_project/lib/python3.6/site-packages/future/moves/http/client.py new file mode 100644 index 0000000..55f9c9c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/http/client.py @@ -0,0 +1,8 @@ +from future.utils import PY3 + +if PY3: + from http.client import * +else: + from httplib import * + from httplib import HTTPMessage + __future_module__ = True diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/cookiejar.py b/minor_project/lib/python3.6/site-packages/future/moves/http/cookiejar.py new file mode 100644 index 0000000..ea00df7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/http/cookiejar.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.cookiejar import * +else: + __future_module__ = True + from cookielib import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/cookies.py b/minor_project/lib/python3.6/site-packages/future/moves/http/cookies.py new file mode 100644 index 0000000..1b74fe2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/http/cookies.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.cookies import * +else: + __future_module__ = True + from Cookie import * + from Cookie import Morsel # left out of __all__ on Py2.7! diff --git a/minor_project/lib/python3.6/site-packages/future/moves/http/server.py b/minor_project/lib/python3.6/site-packages/future/moves/http/server.py new file mode 100644 index 0000000..4e75cc1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/http/server.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from http.server import * +else: + __future_module__ = True + from BaseHTTPServer import * + from CGIHTTPServer import * + from SimpleHTTPServer import * + try: + from CGIHTTPServer import _url_collapse_path # needed for a test + except ImportError: + try: + # Python 2.7.0 to 2.7.3 + from CGIHTTPServer import ( + _url_collapse_path_split as _url_collapse_path) + except ImportError: + # Doesn't exist on Python 2.6.x. Ignore it. + pass diff --git a/minor_project/lib/python3.6/site-packages/future/moves/itertools.py b/minor_project/lib/python3.6/site-packages/future/moves/itertools.py new file mode 100644 index 0000000..e5eb20d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/itertools.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from itertools import * +try: + zip_longest = izip_longest + filterfalse = ifilterfalse +except NameError: + pass diff --git a/minor_project/lib/python3.6/site-packages/future/moves/pickle.py b/minor_project/lib/python3.6/site-packages/future/moves/pickle.py new file mode 100644 index 0000000..c53d693 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/pickle.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from pickle import * +else: + __future_module__ = True + try: + from cPickle import * + except ImportError: + from pickle import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/queue.py b/minor_project/lib/python3.6/site-packages/future/moves/queue.py new file mode 100644 index 0000000..1cb1437 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/queue.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from queue import * +else: + __future_module__ = True + from Queue import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/reprlib.py b/minor_project/lib/python3.6/site-packages/future/moves/reprlib.py new file mode 100644 index 0000000..a313a13 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/reprlib.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from reprlib import * +else: + __future_module__ = True + from repr import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/socketserver.py b/minor_project/lib/python3.6/site-packages/future/moves/socketserver.py new file mode 100644 index 0000000..062e084 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/socketserver.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from socketserver import * +else: + __future_module__ = True + from SocketServer import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/subprocess.py b/minor_project/lib/python3.6/site-packages/future/moves/subprocess.py new file mode 100644 index 0000000..43ffd2a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/subprocess.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import +from future.utils import PY2, PY26 + +from subprocess import * + +if PY2: + __future_module__ = True + from commands import getoutput, getstatusoutput + +if PY26: + from future.backports.misc import check_output diff --git a/minor_project/lib/python3.6/site-packages/future/moves/sys.py b/minor_project/lib/python3.6/site-packages/future/moves/sys.py new file mode 100644 index 0000000..1293bcb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/sys.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +from future.utils import PY2 + +from sys import * + +if PY2: + from __builtin__ import intern diff --git a/minor_project/lib/python3.6/site-packages/future/moves/test/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/test/__init__.py new file mode 100644 index 0000000..5cf428b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/test/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/minor_project/lib/python3.6/site-packages/future/moves/test/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/test/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..c1a5ea1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/test/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/test/__pycache__/support.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/test/__pycache__/support.cpython-36.pyc new file mode 100644 index 0000000..348574b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/test/__pycache__/support.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/test/support.py b/minor_project/lib/python3.6/site-packages/future/moves/test/support.py new file mode 100644 index 0000000..e9aa0f4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/test/support.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks +from future.utils import PY3 + +if PY3: + from test.support import * +else: + __future_module__ = True + with suspend_hooks(): + from test.test_support import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__init__.py new file mode 100644 index 0000000..e408296 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__init__.py @@ -0,0 +1,27 @@ +from __future__ import absolute_import +from future.utils import PY3 +__future_module__ = True + +if not PY3: + from Tkinter import * + from Tkinter import (_cnfmerge, _default_root, _flatten, + _support_default_root, _test, + _tkinter, _setit) + + try: # >= 2.7.4 + from Tkinter import (_join) + except ImportError: + pass + + try: # >= 2.7.4 + from Tkinter import (_stringify) + except ImportError: + pass + + try: # >= 2.7.9 + from Tkinter import (_splitdict) + except ImportError: + pass + +else: + from tkinter import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..9de98dd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/colorchooser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/colorchooser.cpython-36.pyc new file mode 100644 index 0000000..4535164 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/colorchooser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/commondialog.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/commondialog.cpython-36.pyc new file mode 100644 index 0000000..2332e9e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/commondialog.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/constants.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/constants.cpython-36.pyc new file mode 100644 index 0000000..cafa4f0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/constants.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/dialog.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/dialog.cpython-36.pyc new file mode 100644 index 0000000..f432b1e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/dialog.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/dnd.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/dnd.cpython-36.pyc new file mode 100644 index 0000000..c98ec73 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/dnd.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/filedialog.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/filedialog.cpython-36.pyc new file mode 100644 index 0000000..198663b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/filedialog.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/font.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/font.cpython-36.pyc new file mode 100644 index 0000000..4573adc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/font.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/messagebox.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/messagebox.cpython-36.pyc new file mode 100644 index 0000000..f92afeb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/messagebox.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/scrolledtext.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/scrolledtext.cpython-36.pyc new file mode 100644 index 0000000..3ce686c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/scrolledtext.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/simpledialog.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/simpledialog.cpython-36.pyc new file mode 100644 index 0000000..1770318 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/simpledialog.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/tix.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/tix.cpython-36.pyc new file mode 100644 index 0000000..cd53e24 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/tix.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/ttk.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/ttk.cpython-36.pyc new file mode 100644 index 0000000..a3b71bc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/__pycache__/ttk.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/colorchooser.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/colorchooser.py new file mode 100644 index 0000000..6dde6e8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/colorchooser.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.colorchooser import * +else: + try: + from tkColorChooser import * + except ImportError: + raise ImportError('The tkColorChooser module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/commondialog.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/commondialog.py new file mode 100644 index 0000000..eb7ae8d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/commondialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.commondialog import * +else: + try: + from tkCommonDialog import * + except ImportError: + raise ImportError('The tkCommonDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/constants.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/constants.py new file mode 100644 index 0000000..ffe0981 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/constants.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.constants import * +else: + try: + from Tkconstants import * + except ImportError: + raise ImportError('The Tkconstants module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/dialog.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/dialog.py new file mode 100644 index 0000000..113370c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/dialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.dialog import * +else: + try: + from Dialog import * + except ImportError: + raise ImportError('The Dialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/dnd.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/dnd.py new file mode 100644 index 0000000..1ab4379 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/dnd.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.dnd import * +else: + try: + from Tkdnd import * + except ImportError: + raise ImportError('The Tkdnd module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/filedialog.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/filedialog.py new file mode 100644 index 0000000..973923e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/filedialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.filedialog import * +else: + try: + from FileDialog import * + except ImportError: + raise ImportError('The FileDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/font.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/font.py new file mode 100644 index 0000000..628f399 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/font.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.font import * +else: + try: + from tkFont import * + except ImportError: + raise ImportError('The tkFont module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/messagebox.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/messagebox.py new file mode 100644 index 0000000..b43d870 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/messagebox.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.messagebox import * +else: + try: + from tkMessageBox import * + except ImportError: + raise ImportError('The tkMessageBox module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/scrolledtext.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/scrolledtext.py new file mode 100644 index 0000000..1c69db6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/scrolledtext.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.scrolledtext import * +else: + try: + from ScrolledText import * + except ImportError: + raise ImportError('The ScrolledText module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/simpledialog.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/simpledialog.py new file mode 100644 index 0000000..dba93fb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/simpledialog.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.simpledialog import * +else: + try: + from SimpleDialog import * + except ImportError: + raise ImportError('The SimpleDialog module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/tix.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/tix.py new file mode 100644 index 0000000..8d1718a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/tix.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.tix import * +else: + try: + from Tix import * + except ImportError: + raise ImportError('The Tix module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/tkinter/ttk.py b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/ttk.py new file mode 100644 index 0000000..081c1b4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/tkinter/ttk.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import + +from future.utils import PY3 + +if PY3: + from tkinter.ttk import * +else: + try: + from ttk import * + except ImportError: + raise ImportError('The ttk module is missing. Does your Py2 ' + 'installation include tkinter?') diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__init__.py new file mode 100644 index 0000000..5cf428b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if not PY3: + __future_module__ = True diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..6ee0dd7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/error.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/error.cpython-36.pyc new file mode 100644 index 0000000..cb6b7d4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/error.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/parse.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/parse.cpython-36.pyc new file mode 100644 index 0000000..b706431 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/parse.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/request.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/request.cpython-36.pyc new file mode 100644 index 0000000..d2e5474 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/request.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/response.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/response.cpython-36.pyc new file mode 100644 index 0000000..a5b85d4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/response.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/robotparser.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/robotparser.cpython-36.pyc new file mode 100644 index 0000000..38b27e7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/urllib/__pycache__/robotparser.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/error.py b/minor_project/lib/python3.6/site-packages/future/moves/urllib/error.py new file mode 100644 index 0000000..7d8ada7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/urllib/error.py @@ -0,0 +1,16 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks + +from future.utils import PY3 + +if PY3: + from urllib.error import * +else: + __future_module__ = True + + # We use this method to get at the original Py2 urllib before any renaming magic + # ContentTooShortError = sys.py2_modules['urllib'].ContentTooShortError + + with suspend_hooks(): + from urllib import ContentTooShortError + from urllib2 import URLError, HTTPError diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/parse.py b/minor_project/lib/python3.6/site-packages/future/moves/urllib/parse.py new file mode 100644 index 0000000..9074b81 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/urllib/parse.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import +from future.standard_library import suspend_hooks + +from future.utils import PY3 + +if PY3: + from urllib.parse import * +else: + __future_module__ = True + from urlparse import (ParseResult, SplitResult, parse_qs, parse_qsl, + urldefrag, urljoin, urlparse, urlsplit, + urlunparse, urlunsplit) + + # we use this method to get at the original py2 urllib before any renaming + # quote = sys.py2_modules['urllib'].quote + # quote_plus = sys.py2_modules['urllib'].quote_plus + # unquote = sys.py2_modules['urllib'].unquote + # unquote_plus = sys.py2_modules['urllib'].unquote_plus + # urlencode = sys.py2_modules['urllib'].urlencode + # splitquery = sys.py2_modules['urllib'].splitquery + + with suspend_hooks(): + from urllib import (quote, + quote_plus, + unquote, + unquote_plus, + urlencode, + splitquery) diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/request.py b/minor_project/lib/python3.6/site-packages/future/moves/urllib/request.py new file mode 100644 index 0000000..972aa4a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/urllib/request.py @@ -0,0 +1,94 @@ +from __future__ import absolute_import + +from future.standard_library import suspend_hooks +from future.utils import PY3 + +if PY3: + from urllib.request import * + # This aren't in __all__: + from urllib.request import (getproxies, + pathname2url, + proxy_bypass, + quote, + request_host, + thishost, + unquote, + url2pathname, + urlcleanup, + urljoin, + urlopen, + urlparse, + urlretrieve, + urlsplit, + urlunparse) + + from urllib.parse import (splitattr, + splithost, + splitpasswd, + splitport, + splitquery, + splittag, + splittype, + splituser, + splitvalue, + to_bytes, + unwrap) +else: + __future_module__ = True + with suspend_hooks(): + from urllib import * + from urllib2 import * + from urlparse import * + + # Rename: + from urllib import toBytes # missing from __all__ on Py2.6 + to_bytes = toBytes + + # from urllib import (pathname2url, + # url2pathname, + # getproxies, + # urlretrieve, + # urlcleanup, + # URLopener, + # FancyURLopener, + # proxy_bypass) + + # from urllib2 import ( + # AbstractBasicAuthHandler, + # AbstractDigestAuthHandler, + # BaseHandler, + # CacheFTPHandler, + # FileHandler, + # FTPHandler, + # HTTPBasicAuthHandler, + # HTTPCookieProcessor, + # HTTPDefaultErrorHandler, + # HTTPDigestAuthHandler, + # HTTPErrorProcessor, + # HTTPHandler, + # HTTPPasswordMgr, + # HTTPPasswordMgrWithDefaultRealm, + # HTTPRedirectHandler, + # HTTPSHandler, + # URLError, + # build_opener, + # install_opener, + # OpenerDirector, + # ProxyBasicAuthHandler, + # ProxyDigestAuthHandler, + # ProxyHandler, + # Request, + # UnknownHandler, + # urlopen, + # ) + + # from urlparse import ( + # urldefrag + # urljoin, + # urlparse, + # urlunparse, + # urlsplit, + # urlunsplit, + # parse_qs, + # parse_q" + # ) diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/response.py b/minor_project/lib/python3.6/site-packages/future/moves/urllib/response.py new file mode 100644 index 0000000..a287ae2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/urllib/response.py @@ -0,0 +1,12 @@ +from future import standard_library +from future.utils import PY3 + +if PY3: + from urllib.response import * +else: + __future_module__ = True + with standard_library.suspend_hooks(): + from urllib import (addbase, + addclosehook, + addinfo, + addinfourl) diff --git a/minor_project/lib/python3.6/site-packages/future/moves/urllib/robotparser.py b/minor_project/lib/python3.6/site-packages/future/moves/urllib/robotparser.py new file mode 100644 index 0000000..0dc8f57 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/urllib/robotparser.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from urllib.robotparser import * +else: + __future_module__ = True + from robotparser import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/winreg.py b/minor_project/lib/python3.6/site-packages/future/moves/winreg.py new file mode 100644 index 0000000..c8b1475 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/winreg.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from winreg import * +else: + __future_module__ = True + from _winreg import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__init__.py b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..f8e4b81 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/client.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/client.cpython-36.pyc new file mode 100644 index 0000000..07ec7f9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/client.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/server.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/server.cpython-36.pyc new file mode 100644 index 0000000..80219d7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/__pycache__/server.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/client.py b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/client.py new file mode 100644 index 0000000..4708cf8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/client.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from xmlrpc.client import * +else: + from xmlrpclib import * diff --git a/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/server.py b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/server.py new file mode 100644 index 0000000..1a8af34 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/moves/xmlrpc/server.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import +from future.utils import PY3 + +if PY3: + from xmlrpc.server import * +else: + from xmlrpclib import * diff --git a/minor_project/lib/python3.6/site-packages/future/standard_library/__init__.py b/minor_project/lib/python3.6/site-packages/future/standard_library/__init__.py new file mode 100644 index 0000000..cff02f9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/standard_library/__init__.py @@ -0,0 +1,815 @@ +""" +Python 3 reorganized the standard library (PEP 3108). This module exposes +several standard library modules to Python 2 under their new Python 3 +names. + +It is designed to be used as follows:: + + from future import standard_library + standard_library.install_aliases() + +And then these normal Py3 imports work on both Py3 and Py2:: + + import builtins + import copyreg + import queue + import reprlib + import socketserver + import winreg # on Windows only + import test.support + import html, html.parser, html.entites + import http, http.client, http.server + import http.cookies, http.cookiejar + import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser + import xmlrpc.client, xmlrpc.server + + import _thread + import _dummy_thread + import _markupbase + + from itertools import filterfalse, zip_longest + from sys import intern + from collections import UserDict, UserList, UserString + from collections import OrderedDict, Counter, ChainMap # even on Py2.6 + from subprocess import getoutput, getstatusoutput + from subprocess import check_output # even on Py2.6 + +(The renamed modules and functions are still available under their old +names on Python 2.) + +This is a cleaner alternative to this idiom (see +http://docs.pythonsprints.com/python3_porting/py-porting.html):: + + try: + import queue + except ImportError: + import Queue as queue + + +Limitations +----------- +We don't currently support these modules, but would like to:: + + import dbm + import dbm.dumb + import dbm.gnu + import collections.abc # on Py33 + import pickle # should (optionally) bring in cPickle on Python 2 + +""" + +from __future__ import absolute_import, division, print_function + +import sys +import logging +import imp +import contextlib +import types +import copy +import os + +# Make a dedicated logger; leave the root logger to be configured +# by the application. +flog = logging.getLogger('future_stdlib') +_formatter = logging.Formatter(logging.BASIC_FORMAT) +_handler = logging.StreamHandler() +_handler.setFormatter(_formatter) +flog.addHandler(_handler) +flog.setLevel(logging.WARN) + +from future.utils import PY2, PY3 + +# The modules that are defined under the same names on Py3 but with +# different contents in a significant way (e.g. submodules) are: +# pickle (fast one) +# dbm +# urllib +# test +# email + +REPLACED_MODULES = set(['test', 'urllib', 'pickle', 'dbm']) # add email and dbm when we support it + +# The following module names are not present in Python 2.x, so they cause no +# potential clashes between the old and new names: +# http +# html +# tkinter +# xmlrpc +# Keys: Py2 / real module names +# Values: Py3 / simulated module names +RENAMES = { + # 'cStringIO': 'io', # there's a new io module in Python 2.6 + # that provides StringIO and BytesIO + # 'StringIO': 'io', # ditto + # 'cPickle': 'pickle', + '__builtin__': 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'future.moves.socketserver': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + # 'FileDialog': 'tkinter.filedialog', + # 'tkFileDialog': 'tkinter.filedialog', + # 'SimpleDialog': 'tkinter.simpledialog', + # 'tkSimpleDialog': 'tkinter.simpledialog', + # 'tkColorChooser': 'tkinter.colorchooser', + # 'tkCommonDialog': 'tkinter.commondialog', + # 'Dialog': 'tkinter.dialog', + # 'Tkdnd': 'tkinter.dnd', + # 'tkFont': 'tkinter.font', + # 'tkMessageBox': 'tkinter.messagebox', + # 'ScrolledText': 'tkinter.scrolledtext', + # 'Tkconstants': 'tkinter.constants', + # 'Tix': 'tkinter.tix', + # 'ttk': 'tkinter.ttk', + # 'Tkinter': 'tkinter', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + # 'anydbm': 'dbm', # causes infinite import loop + # 'whichdb': 'dbm', # causes infinite import loop + # anydbm and whichdb are handled by fix_imports2 + # 'dbhash': 'dbm.bsd', + # 'dumbdbm': 'dbm.dumb', + # 'dbm': 'dbm.ndbm', + # 'gdbm': 'dbm.gnu', + 'future.moves.xmlrpc': 'xmlrpc', + # 'future.backports.email': 'email', # for use by urllib + # 'DocXMLRPCServer': 'xmlrpc.server', + # 'SimpleXMLRPCServer': 'xmlrpc.server', + # 'httplib': 'http.client', + # 'htmlentitydefs' : 'html.entities', + # 'HTMLParser' : 'html.parser', + # 'Cookie': 'http.cookies', + # 'cookielib': 'http.cookiejar', + # 'BaseHTTPServer': 'http.server', + # 'SimpleHTTPServer': 'http.server', + # 'CGIHTTPServer': 'http.server', + # 'future.backports.test': 'test', # primarily for renaming test_support to support + # 'commands': 'subprocess', + # 'urlparse' : 'urllib.parse', + # 'robotparser' : 'urllib.robotparser', + # 'abc': 'collections.abc', # for Py33 + # 'future.utils.six.moves.html': 'html', + # 'future.utils.six.moves.http': 'http', + 'future.moves.html': 'html', + 'future.moves.http': 'http', + # 'future.backports.urllib': 'urllib', + # 'future.utils.six.moves.urllib': 'urllib', + 'future.moves._markupbase': '_markupbase', + } + + +# It is complicated and apparently brittle to mess around with the +# ``sys.modules`` cache in order to support "import urllib" meaning two +# different things (Py2.7 urllib and backported Py3.3-like urllib) in different +# contexts. So we require explicit imports for these modules. +assert len(set(RENAMES.values()) & set(REPLACED_MODULES)) == 0 + + +# Harmless renames that we can insert. +# These modules need names from elsewhere being added to them: +# subprocess: should provide getoutput and other fns from commands +# module but these fns are missing: getstatus, mk2arg, +# mkarg +# re: needs an ASCII constant that works compatibly with Py3 + +# etc: see lib2to3/fixes/fix_imports.py + +# (New module name, new object name, old module name, old object name) +MOVES = [('collections', 'UserList', 'UserList', 'UserList'), + ('collections', 'UserDict', 'UserDict', 'UserDict'), + ('collections', 'UserString','UserString', 'UserString'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), + ('itertools', 'filterfalse','itertools', 'ifilterfalse'), + ('itertools', 'zip_longest','itertools', 'izip_longest'), + ('sys', 'intern','__builtin__', 'intern'), + # The re module has no ASCII flag in Py2, but this is the default. + # Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one + # (and it exists on Py2.6+). + ('re', 'ASCII','stat', 'ST_MODE'), + ('base64', 'encodebytes','base64', 'encodestring'), + ('base64', 'decodebytes','base64', 'decodestring'), + ('subprocess', 'getoutput', 'commands', 'getoutput'), + ('subprocess', 'getstatusoutput', 'commands', 'getstatusoutput'), + ('subprocess', 'check_output', 'future.backports.misc', 'check_output'), + ('math', 'ceil', 'future.backports.misc', 'ceil'), + ('collections', 'OrderedDict', 'future.backports.misc', 'OrderedDict'), + ('collections', 'Counter', 'future.backports.misc', 'Counter'), + ('collections', 'ChainMap', 'future.backports.misc', 'ChainMap'), + ('itertools', 'count', 'future.backports.misc', 'count'), + ('reprlib', 'recursive_repr', 'future.backports.misc', 'recursive_repr'), + ('functools', 'cmp_to_key', 'future.backports.misc', 'cmp_to_key'), + +# This is no use, since "import urllib.request" etc. still fails: +# ('urllib', 'error', 'future.moves.urllib', 'error'), +# ('urllib', 'parse', 'future.moves.urllib', 'parse'), +# ('urllib', 'request', 'future.moves.urllib', 'request'), +# ('urllib', 'response', 'future.moves.urllib', 'response'), +# ('urllib', 'robotparser', 'future.moves.urllib', 'robotparser'), + ] + + +# A minimal example of an import hook: +# class WarnOnImport(object): +# def __init__(self, *args): +# self.module_names = args +# +# def find_module(self, fullname, path=None): +# if fullname in self.module_names: +# self.path = path +# return self +# return None +# +# def load_module(self, name): +# if name in sys.modules: +# return sys.modules[name] +# module_info = imp.find_module(name, self.path) +# module = imp.load_module(name, *module_info) +# sys.modules[name] = module +# flog.warning("Imported deprecated module %s", name) +# return module + + +class RenameImport(object): + """ + A class for import hooks mapping Py3 module names etc. to the Py2 equivalents. + """ + # Different RenameImport classes are created when importing this module from + # different source files. This causes isinstance(hook, RenameImport) checks + # to produce inconsistent results. We add this RENAMER attribute here so + # remove_hooks() and install_hooks() can find instances of these classes + # easily: + RENAMER = True + + def __init__(self, old_to_new): + ''' + Pass in a dictionary-like object mapping from old names to new + names. E.g. {'ConfigParser': 'configparser', 'cPickle': 'pickle'} + ''' + self.old_to_new = old_to_new + both = set(old_to_new.keys()) & set(old_to_new.values()) + assert (len(both) == 0 and + len(set(old_to_new.values())) == len(old_to_new.values())), \ + 'Ambiguity in renaming (handler not implemented)' + self.new_to_old = dict((new, old) for (old, new) in old_to_new.items()) + + def find_module(self, fullname, path=None): + # Handles hierarchical importing: package.module.module2 + new_base_names = set([s.split('.')[0] for s in self.new_to_old]) + # Before v0.12: Was: if fullname in set(self.old_to_new) | new_base_names: + if fullname in new_base_names: + return self + return None + + def load_module(self, name): + path = None + if name in sys.modules: + return sys.modules[name] + elif name in self.new_to_old: + # New name. Look up the corresponding old (Py2) name: + oldname = self.new_to_old[name] + module = self._find_and_load_module(oldname) + # module.__future_module__ = True + else: + module = self._find_and_load_module(name) + # In any case, make it available under the requested (Py3) name + sys.modules[name] = module + return module + + def _find_and_load_module(self, name, path=None): + """ + Finds and loads it. But if there's a . in the name, handles it + properly. + """ + bits = name.split('.') + while len(bits) > 1: + # Treat the first bit as a package + packagename = bits.pop(0) + package = self._find_and_load_module(packagename, path) + try: + path = package.__path__ + except AttributeError: + # This could be e.g. moves. + flog.debug('Package {0} has no __path__.'.format(package)) + if name in sys.modules: + return sys.modules[name] + flog.debug('What to do here?') + + name = bits[0] + module_info = imp.find_module(name, path) + return imp.load_module(name, *module_info) + + +class hooks(object): + """ + Acts as a context manager. Saves the state of sys.modules and restores it + after the 'with' block. + + Use like this: + + >>> from future import standard_library + >>> with standard_library.hooks(): + ... import http.client + >>> import requests + + For this to work, http.client will be scrubbed from sys.modules after the + 'with' block. That way the modules imported in the 'with' block will + continue to be accessible in the current namespace but not from any + imported modules (like requests). + """ + def __enter__(self): + # flog.debug('Entering hooks context manager') + self.old_sys_modules = copy.copy(sys.modules) + self.hooks_were_installed = detect_hooks() + # self.scrubbed = scrub_py2_sys_modules() + install_hooks() + return self + + def __exit__(self, *args): + # flog.debug('Exiting hooks context manager') + # restore_sys_modules(self.scrubbed) + if not self.hooks_were_installed: + remove_hooks() + # scrub_future_sys_modules() + +# Sanity check for is_py2_stdlib_module(): We aren't replacing any +# builtin modules names: +if PY2: + assert len(set(RENAMES.values()) & set(sys.builtin_module_names)) == 0 + + +def is_py2_stdlib_module(m): + """ + Tries to infer whether the module m is from the Python 2 standard library. + This may not be reliable on all systems. + """ + if PY3: + return False + if not 'stdlib_path' in is_py2_stdlib_module.__dict__: + stdlib_files = [contextlib.__file__, os.__file__, copy.__file__] + stdlib_paths = [os.path.split(f)[0] for f in stdlib_files] + if not len(set(stdlib_paths)) == 1: + # This seems to happen on travis-ci.org. Very strange. We'll try to + # ignore it. + flog.warn('Multiple locations found for the Python standard ' + 'library: %s' % stdlib_paths) + # Choose the first one arbitrarily + is_py2_stdlib_module.stdlib_path = stdlib_paths[0] + + if m.__name__ in sys.builtin_module_names: + return True + + if hasattr(m, '__file__'): + modpath = os.path.split(m.__file__) + if (modpath[0].startswith(is_py2_stdlib_module.stdlib_path) and + 'site-packages' not in modpath[0]): + return True + + return False + + +def scrub_py2_sys_modules(): + """ + Removes any Python 2 standard library modules from ``sys.modules`` that + would interfere with Py3-style imports using import hooks. Examples are + modules with the same names (like urllib or email). + + (Note that currently import hooks are disabled for modules like these + with ambiguous names anyway ...) + """ + if PY3: + return {} + scrubbed = {} + for modulename in REPLACED_MODULES & set(RENAMES.keys()): + if not modulename in sys.modules: + continue + + module = sys.modules[modulename] + + if is_py2_stdlib_module(module): + flog.debug('Deleting (Py2) {} from sys.modules'.format(modulename)) + scrubbed[modulename] = sys.modules[modulename] + del sys.modules[modulename] + return scrubbed + + +def scrub_future_sys_modules(): + """ + Deprecated. + """ + return {} + +class suspend_hooks(object): + """ + Acts as a context manager. Use like this: + + >>> from future import standard_library + >>> standard_library.install_hooks() + >>> import http.client + >>> # ... + >>> with standard_library.suspend_hooks(): + >>> import requests # incompatible with ``future``'s standard library hooks + + If the hooks were disabled before the context, they are not installed when + the context is left. + """ + def __enter__(self): + self.hooks_were_installed = detect_hooks() + remove_hooks() + # self.scrubbed = scrub_future_sys_modules() + return self + + def __exit__(self, *args): + if self.hooks_were_installed: + install_hooks() + # restore_sys_modules(self.scrubbed) + + +def restore_sys_modules(scrubbed): + """ + Add any previously scrubbed modules back to the sys.modules cache, + but only if it's safe to do so. + """ + clash = set(sys.modules) & set(scrubbed) + if len(clash) != 0: + # If several, choose one arbitrarily to raise an exception about + first = list(clash)[0] + raise ImportError('future module {} clashes with Py2 module' + .format(first)) + sys.modules.update(scrubbed) + + +def install_aliases(): + """ + Monkey-patches the standard library in Py2.6/7 to provide + aliases for better Py3 compatibility. + """ + if PY3: + return + # if hasattr(install_aliases, 'run_already'): + # return + for (newmodname, newobjname, oldmodname, oldobjname) in MOVES: + __import__(newmodname) + # We look up the module in sys.modules because __import__ just returns the + # top-level package: + newmod = sys.modules[newmodname] + # newmod.__future_module__ = True + + __import__(oldmodname) + oldmod = sys.modules[oldmodname] + + obj = getattr(oldmod, oldobjname) + setattr(newmod, newobjname, obj) + + # Hack for urllib so it appears to have the same structure on Py2 as on Py3 + import urllib + from future.backports.urllib import request + from future.backports.urllib import response + from future.backports.urllib import parse + from future.backports.urllib import error + from future.backports.urllib import robotparser + urllib.request = request + urllib.response = response + urllib.parse = parse + urllib.error = error + urllib.robotparser = robotparser + sys.modules['urllib.request'] = request + sys.modules['urllib.response'] = response + sys.modules['urllib.parse'] = parse + sys.modules['urllib.error'] = error + sys.modules['urllib.robotparser'] = robotparser + + # Patch the test module so it appears to have the same structure on Py2 as on Py3 + try: + import test + except ImportError: + pass + try: + from future.moves.test import support + except ImportError: + pass + else: + test.support = support + sys.modules['test.support'] = support + + # Patch the dbm module so it appears to have the same structure on Py2 as on Py3 + try: + import dbm + except ImportError: + pass + else: + from future.moves.dbm import dumb + dbm.dumb = dumb + sys.modules['dbm.dumb'] = dumb + try: + from future.moves.dbm import gnu + except ImportError: + pass + else: + dbm.gnu = gnu + sys.modules['dbm.gnu'] = gnu + try: + from future.moves.dbm import ndbm + except ImportError: + pass + else: + dbm.ndbm = ndbm + sys.modules['dbm.ndbm'] = ndbm + + # install_aliases.run_already = True + + +def install_hooks(): + """ + This function installs the future.standard_library import hook into + sys.meta_path. + """ + if PY3: + return + + install_aliases() + + flog.debug('sys.meta_path was: {0}'.format(sys.meta_path)) + flog.debug('Installing hooks ...') + + # Add it unless it's there already + newhook = RenameImport(RENAMES) + if not detect_hooks(): + sys.meta_path.append(newhook) + flog.debug('sys.meta_path is now: {0}'.format(sys.meta_path)) + + +def enable_hooks(): + """ + Deprecated. Use install_hooks() instead. This will be removed by + ``future`` v1.0. + """ + install_hooks() + + +def remove_hooks(scrub_sys_modules=False): + """ + This function removes the import hook from sys.meta_path. + """ + if PY3: + return + flog.debug('Uninstalling hooks ...') + # Loop backwards, so deleting items keeps the ordering: + for i, hook in list(enumerate(sys.meta_path))[::-1]: + if hasattr(hook, 'RENAMER'): + del sys.meta_path[i] + + # Explicit is better than implicit. In the future the interface should + # probably change so that scrubbing the import hooks requires a separate + # function call. Left as is for now for backward compatibility with + # v0.11.x. + if scrub_sys_modules: + scrub_future_sys_modules() + + +def disable_hooks(): + """ + Deprecated. Use remove_hooks() instead. This will be removed by + ``future`` v1.0. + """ + remove_hooks() + + +def detect_hooks(): + """ + Returns True if the import hooks are installed, False if not. + """ + flog.debug('Detecting hooks ...') + present = any([hasattr(hook, 'RENAMER') for hook in sys.meta_path]) + if present: + flog.debug('Detected.') + else: + flog.debug('Not detected.') + return present + + +# As of v0.12, this no longer happens implicitly: +# if not PY3: +# install_hooks() + + +if not hasattr(sys, 'py2_modules'): + sys.py2_modules = {} + +def cache_py2_modules(): + """ + Currently this function is unneeded, as we are not attempting to provide import hooks + for modules with ambiguous names: email, urllib, pickle. + """ + if len(sys.py2_modules) != 0: + return + assert not detect_hooks() + import urllib + sys.py2_modules['urllib'] = urllib + + import email + sys.py2_modules['email'] = email + + import pickle + sys.py2_modules['pickle'] = pickle + + # Not all Python installations have test module. (Anaconda doesn't, for example.) + # try: + # import test + # except ImportError: + # sys.py2_modules['test'] = None + # sys.py2_modules['test'] = test + + # import dbm + # sys.py2_modules['dbm'] = dbm + + +def import_(module_name, backport=False): + """ + Pass a (potentially dotted) module name of a Python 3 standard library + module. This function imports the module compatibly on Py2 and Py3 and + returns the top-level module. + + Example use: + >>> http = import_('http.client') + >>> http = import_('http.server') + >>> urllib = import_('urllib.request') + + Then: + >>> conn = http.client.HTTPConnection(...) + >>> response = urllib.request.urlopen('http://mywebsite.com') + >>> # etc. + + Use as follows: + >>> package_name = import_(module_name) + + On Py3, equivalent to this: + + >>> import module_name + + On Py2, equivalent to this if backport=False: + + >>> from future.moves import module_name + + or to this if backport=True: + + >>> from future.backports import module_name + + except that it also handles dotted module names such as ``http.client`` + The effect then is like this: + + >>> from future.backports import module + >>> from future.backports.module import submodule + >>> module.submodule = submodule + + Note that this would be a SyntaxError in Python: + + >>> from future.backports import http.client + + """ + # Python 2.6 doesn't have importlib in the stdlib, so it requires + # the backported ``importlib`` package from PyPI as a dependency to use + # this function: + import importlib + + if PY3: + return __import__(module_name) + else: + # client.blah = blah + # Then http.client = client + # etc. + if backport: + prefix = 'future.backports' + else: + prefix = 'future.moves' + parts = prefix.split('.') + module_name.split('.') + + modules = [] + for i, part in enumerate(parts): + sofar = '.'.join(parts[:i+1]) + modules.append(importlib.import_module(sofar)) + for i, part in reversed(list(enumerate(parts))): + if i == 0: + break + setattr(modules[i-1], part, modules[i]) + + # Return the next-most top-level module after future.backports / future.moves: + return modules[2] + + +def from_import(module_name, *symbol_names, **kwargs): + """ + Example use: + >>> HTTPConnection = from_import('http.client', 'HTTPConnection') + >>> HTTPServer = from_import('http.server', 'HTTPServer') + >>> urlopen, urlparse = from_import('urllib.request', 'urlopen', 'urlparse') + + Equivalent to this on Py3: + + >>> from module_name import symbol_names[0], symbol_names[1], ... + + and this on Py2: + + >>> from future.moves.module_name import symbol_names[0], ... + + or: + + >>> from future.backports.module_name import symbol_names[0], ... + + except that it also handles dotted module names such as ``http.client``. + """ + + if PY3: + return __import__(module_name) + else: + if 'backport' in kwargs and bool(kwargs['backport']): + prefix = 'future.backports' + else: + prefix = 'future.moves' + parts = prefix.split('.') + module_name.split('.') + module = importlib.import_module(prefix + '.' + module_name) + output = [getattr(module, name) for name in symbol_names] + if len(output) == 1: + return output[0] + else: + return output + + +class exclude_local_folder_imports(object): + """ + A context-manager that prevents standard library modules like configparser + from being imported from the local python-future source folder on Py3. + + (This was need prior to v0.16.0 because the presence of a configparser + folder would otherwise have prevented setuptools from running on Py3. Maybe + it's not needed any more?) + """ + def __init__(self, *args): + assert len(args) > 0 + self.module_names = args + # Disallow dotted module names like http.client: + if any(['.' in m for m in self.module_names]): + raise NotImplementedError('Dotted module names are not supported') + + def __enter__(self): + self.old_sys_path = copy.copy(sys.path) + self.old_sys_modules = copy.copy(sys.modules) + if sys.version_info[0] < 3: + return + # The presence of all these indicates we've found our source folder, + # because `builtins` won't have been installed in site-packages by setup.py: + FUTURE_SOURCE_SUBFOLDERS = ['future', 'past', 'libfuturize', 'libpasteurize', 'builtins'] + + # Look for the future source folder: + for folder in self.old_sys_path: + if all([os.path.exists(os.path.join(folder, subfolder)) + for subfolder in FUTURE_SOURCE_SUBFOLDERS]): + # Found it. Remove it. + sys.path.remove(folder) + + # Ensure we import the system module: + for m in self.module_names: + # Delete the module and any submodules from sys.modules: + # for key in list(sys.modules): + # if key == m or key.startswith(m + '.'): + # try: + # del sys.modules[key] + # except KeyError: + # pass + try: + module = __import__(m, level=0) + except ImportError: + # There's a problem importing the system module. E.g. the + # winreg module is not available except on Windows. + pass + + def __exit__(self, *args): + # Restore sys.path and sys.modules: + sys.path = self.old_sys_path + for m in set(self.old_sys_modules.keys()) - set(sys.modules.keys()): + sys.modules[m] = self.old_sys_modules[m] + +TOP_LEVEL_MODULES = ['builtins', + 'copyreg', + 'html', + 'http', + 'queue', + 'reprlib', + 'socketserver', + 'test', + 'tkinter', + 'winreg', + 'xmlrpc', + '_dummy_thread', + '_markupbase', + '_thread', + ] + +def import_top_level_modules(): + with exclude_local_folder_imports(*TOP_LEVEL_MODULES): + for m in TOP_LEVEL_MODULES: + try: + __import__(m) + except ImportError: # e.g. winreg + pass diff --git a/minor_project/lib/python3.6/site-packages/future/standard_library/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/standard_library/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..b962c22 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/standard_library/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/tests/__init__.py b/minor_project/lib/python3.6/site-packages/future/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/future/tests/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/tests/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..cf6d456 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/tests/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/tests/__pycache__/base.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/tests/__pycache__/base.cpython-36.pyc new file mode 100644 index 0000000..d67b1ad Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/tests/__pycache__/base.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/tests/base.py b/minor_project/lib/python3.6/site-packages/future/tests/base.py new file mode 100644 index 0000000..4ef437b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/tests/base.py @@ -0,0 +1,539 @@ +from __future__ import print_function, absolute_import +import os +import tempfile +import unittest +import sys +import re +import warnings +import io +from textwrap import dedent + +from future.utils import bind_method, PY26, PY3, PY2, PY27 +from future.moves.subprocess import check_output, STDOUT, CalledProcessError + +if PY26: + import unittest2 as unittest + + +def reformat_code(code): + """ + Removes any leading \n and dedents. + """ + if code.startswith('\n'): + code = code[1:] + return dedent(code) + + +def order_future_lines(code): + """ + Returns the code block with any ``__future__`` import lines sorted, and + then any ``future`` import lines sorted, then any ``builtins`` import lines + sorted. + + This only sorts the lines within the expected blocks. + + See test_order_future_lines() for an example. + """ + + # We need .splitlines(keepends=True), which doesn't exist on Py2, + # so we use this instead: + lines = code.split('\n') + + uufuture_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from __future__ import ')] + + future_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from future') + or line.startswith('from past')] + + builtins_line_numbers = [i for i, line in enumerate(lines) + if line.startswith('from builtins')] + + assert code.lstrip() == code, ('internal usage error: ' + 'dedent the code before calling order_future_lines()') + + def mymax(numbers): + return max(numbers) if len(numbers) > 0 else 0 + + def mymin(numbers): + return min(numbers) if len(numbers) > 0 else float('inf') + + assert mymax(uufuture_line_numbers) <= mymin(future_line_numbers), \ + 'the __future__ and future imports are out of order' + + # assert mymax(future_line_numbers) <= mymin(builtins_line_numbers), \ + # 'the future and builtins imports are out of order' + + uul = sorted([lines[i] for i in uufuture_line_numbers]) + sorted_uufuture_lines = dict(zip(uufuture_line_numbers, uul)) + + fl = sorted([lines[i] for i in future_line_numbers]) + sorted_future_lines = dict(zip(future_line_numbers, fl)) + + bl = sorted([lines[i] for i in builtins_line_numbers]) + sorted_builtins_lines = dict(zip(builtins_line_numbers, bl)) + + # Replace the old unsorted "from __future__ import ..." lines with the + # new sorted ones: + new_lines = [] + for i in range(len(lines)): + if i in uufuture_line_numbers: + new_lines.append(sorted_uufuture_lines[i]) + elif i in future_line_numbers: + new_lines.append(sorted_future_lines[i]) + elif i in builtins_line_numbers: + new_lines.append(sorted_builtins_lines[i]) + else: + new_lines.append(lines[i]) + return '\n'.join(new_lines) + + +class VerboseCalledProcessError(CalledProcessError): + """ + Like CalledProcessError, but it displays more information (message and + script output) for diagnosing test failures etc. + """ + def __init__(self, msg, returncode, cmd, output=None): + self.msg = msg + self.returncode = returncode + self.cmd = cmd + self.output = output + + def __str__(self): + return ("Command '%s' failed with exit status %d\nMessage: %s\nOutput: %s" + % (self.cmd, self.returncode, self.msg, self.output)) + +class FuturizeError(VerboseCalledProcessError): + pass + +class PasteurizeError(VerboseCalledProcessError): + pass + + +class CodeHandler(unittest.TestCase): + """ + Handy mixin for test classes for writing / reading / futurizing / + running .py files in the test suite. + """ + def setUp(self): + """ + The outputs from the various futurize stages should have the + following headers: + """ + # After stage1: + # TODO: use this form after implementing a fixer to consolidate + # __future__ imports into a single line: + # self.headers1 = """ + # from __future__ import absolute_import, division, print_function + # """ + self.headers1 = reformat_code(""" + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + """) + + # After stage2 --all-imports: + # TODO: use this form after implementing a fixer to consolidate + # __future__ imports into a single line: + # self.headers2 = """ + # from __future__ import (absolute_import, division, + # print_function, unicode_literals) + # from future import standard_library + # from future.builtins import * + # """ + self.headers2 = reformat_code(""" + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + from future import standard_library + standard_library.install_aliases() + from builtins import * + """) + self.interpreters = [sys.executable] + self.tempdir = tempfile.mkdtemp() + os.path.sep + pypath = os.getenv('PYTHONPATH') + if pypath: + self.env = {'PYTHONPATH': os.getcwd() + os.pathsep + pypath} + else: + self.env = {'PYTHONPATH': os.getcwd()} + + def convert(self, code, stages=(1, 2), all_imports=False, from3=False, + reformat=True, run=True, conservative=False): + """ + Converts the code block using ``futurize`` and returns the + resulting code. + + Passing stages=[1] or stages=[2] passes the flag ``--stage1`` or + ``stage2`` to ``futurize``. Passing both stages runs ``futurize`` + with both stages by default. + + If from3 is False, runs ``futurize``, converting from Python 2 to + both 2 and 3. If from3 is True, runs ``pasteurize`` to convert + from Python 3 to both 2 and 3. + + Optionally reformats the code block first using the reformat() function. + + If run is True, runs the resulting code under all Python + interpreters in self.interpreters. + """ + if reformat: + code = reformat_code(code) + self._write_test_script(code) + self._futurize_test_script(stages=stages, all_imports=all_imports, + from3=from3, conservative=conservative) + output = self._read_test_script() + if run: + for interpreter in self.interpreters: + _ = self._run_test_script(interpreter=interpreter) + return output + + def compare(self, output, expected, ignore_imports=True): + """ + Compares whether the code blocks are equal. If not, raises an + exception so the test fails. Ignores any trailing whitespace like + blank lines. + + If ignore_imports is True, passes the code blocks into the + strip_future_imports method. + + If one code block is a unicode string and the other a + byte-string, it assumes the byte-string is encoded as utf-8. + """ + if ignore_imports: + output = self.strip_future_imports(output) + expected = self.strip_future_imports(expected) + if isinstance(output, bytes) and not isinstance(expected, bytes): + output = output.decode('utf-8') + if isinstance(expected, bytes) and not isinstance(output, bytes): + expected = expected.decode('utf-8') + self.assertEqual(order_future_lines(output.rstrip()), + expected.rstrip()) + + def strip_future_imports(self, code): + """ + Strips any of these import lines: + + from __future__ import + from future + from future. + from builtins + + or any line containing: + install_hooks() + or: + install_aliases() + + Limitation: doesn't handle imports split across multiple lines like + this: + + from __future__ import (absolute_import, division, print_function, + unicode_literals) + """ + output = [] + # We need .splitlines(keepends=True), which doesn't exist on Py2, + # so we use this instead: + for line in code.split('\n'): + if not (line.startswith('from __future__ import ') + or line.startswith('from future ') + or line.startswith('from builtins ') + or 'install_hooks()' in line + or 'install_aliases()' in line + # but don't match "from future_builtins" :) + or line.startswith('from future.')): + output.append(line) + return '\n'.join(output) + + def convert_check(self, before, expected, stages=(1, 2), all_imports=False, + ignore_imports=True, from3=False, run=True, + conservative=False): + """ + Convenience method that calls convert() and compare(). + + Reformats the code blocks automatically using the reformat_code() + function. + + If all_imports is passed, we add the appropriate import headers + for the stage(s) selected to the ``expected`` code-block, so they + needn't appear repeatedly in the test code. + + If ignore_imports is True, ignores the presence of any lines + beginning: + + from __future__ import ... + from future import ... + + for the purpose of the comparison. + """ + output = self.convert(before, stages=stages, all_imports=all_imports, + from3=from3, run=run, conservative=conservative) + if all_imports: + headers = self.headers2 if 2 in stages else self.headers1 + else: + headers = '' + + reformatted = reformat_code(expected) + if headers in reformatted: + headers = '' + + self.compare(output, headers + reformatted, + ignore_imports=ignore_imports) + + def unchanged(self, code, **kwargs): + """ + Convenience method to ensure the code is unchanged by the + futurize process. + """ + self.convert_check(code, code, **kwargs) + + def _write_test_script(self, code, filename='mytestscript.py'): + """ + Dedents the given code (a multiline string) and writes it out to + a file in a temporary folder like /tmp/tmpUDCn7x/mytestscript.py. + """ + if isinstance(code, bytes): + code = code.decode('utf-8') + # Be explicit about encoding the temp file as UTF-8 (issue #63): + with io.open(self.tempdir + filename, 'wt', encoding='utf-8') as f: + f.write(dedent(code)) + + def _read_test_script(self, filename='mytestscript.py'): + with io.open(self.tempdir + filename, 'rt', encoding='utf-8') as f: + newsource = f.read() + return newsource + + def _futurize_test_script(self, filename='mytestscript.py', stages=(1, 2), + all_imports=False, from3=False, + conservative=False): + params = [] + stages = list(stages) + if all_imports: + params.append('--all-imports') + if from3: + script = 'pasteurize.py' + else: + script = 'futurize.py' + if stages == [1]: + params.append('--stage1') + elif stages == [2]: + params.append('--stage2') + else: + assert stages == [1, 2] + if conservative: + params.append('--conservative') + # No extra params needed + + # Absolute file path: + fn = self.tempdir + filename + call_args = [sys.executable, script] + params + ['-w', fn] + try: + output = check_output(call_args, stderr=STDOUT, env=self.env) + except CalledProcessError as e: + with open(fn) as f: + msg = ( + 'Error running the command %s\n' + '%s\n' + 'Contents of file %s:\n' + '\n' + '%s') % ( + ' '.join(call_args), + 'env=%s' % self.env, + fn, + '----\n%s\n----' % f.read(), + ) + ErrorClass = (FuturizeError if 'futurize' in script else PasteurizeError) + + if not hasattr(e, 'output'): + # The attribute CalledProcessError.output doesn't exist on Py2.6 + e.output = None + raise ErrorClass(msg, e.returncode, e.cmd, output=e.output) + return output + + def _run_test_script(self, filename='mytestscript.py', + interpreter=sys.executable): + # Absolute file path: + fn = self.tempdir + filename + try: + output = check_output([interpreter, fn], + env=self.env, stderr=STDOUT) + except CalledProcessError as e: + with open(fn) as f: + msg = ( + 'Error running the command %s\n' + '%s\n' + 'Contents of file %s:\n' + '\n' + '%s') % ( + ' '.join([interpreter, fn]), + 'env=%s' % self.env, + fn, + '----\n%s\n----' % f.read(), + ) + if not hasattr(e, 'output'): + # The attribute CalledProcessError.output doesn't exist on Py2.6 + e.output = None + raise VerboseCalledProcessError(msg, e.returncode, e.cmd, output=e.output) + return output + + +# Decorator to skip some tests on Python 2.6 ... +skip26 = unittest.skipIf(PY26, "this test is known to fail on Py2.6") + + +def expectedFailurePY3(func): + if not PY3: + return func + return unittest.expectedFailure(func) + +def expectedFailurePY26(func): + if not PY26: + return func + return unittest.expectedFailure(func) + + +def expectedFailurePY27(func): + if not PY27: + return func + return unittest.expectedFailure(func) + + +def expectedFailurePY2(func): + if not PY2: + return func + return unittest.expectedFailure(func) + + +# Renamed in Py3.3: +if not hasattr(unittest.TestCase, 'assertRaisesRegex'): + unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + +# From Py3.3: +def assertRegex(self, text, expected_regex, msg=None): + """Fail the test unless the text matches the regular expression.""" + if isinstance(expected_regex, (str, unicode)): + assert expected_regex, "expected_regex must not be empty." + expected_regex = re.compile(expected_regex) + if not expected_regex.search(text): + msg = msg or "Regex didn't match" + msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text) + raise self.failureException(msg) + +if not hasattr(unittest.TestCase, 'assertRegex'): + bind_method(unittest.TestCase, 'assertRegex', assertRegex) + +class _AssertRaisesBaseContext(object): + + def __init__(self, expected, test_case, callable_obj=None, + expected_regex=None): + self.expected = expected + self.test_case = test_case + if callable_obj is not None: + try: + self.obj_name = callable_obj.__name__ + except AttributeError: + self.obj_name = str(callable_obj) + else: + self.obj_name = None + if isinstance(expected_regex, (bytes, str)): + expected_regex = re.compile(expected_regex) + self.expected_regex = expected_regex + self.msg = None + + def _raiseFailure(self, standardMsg): + msg = self.test_case._formatMessage(self.msg, standardMsg) + raise self.test_case.failureException(msg) + + def handle(self, name, callable_obj, args, kwargs): + """ + If callable_obj is None, assertRaises/Warns is being used as a + context manager, so check for a 'msg' kwarg and return self. + If callable_obj is not None, call it passing args and kwargs. + """ + if callable_obj is None: + self.msg = kwargs.pop('msg', None) + return self + with self: + callable_obj(*args, **kwargs) + +class _AssertWarnsContext(_AssertRaisesBaseContext): + """A context manager used to implement TestCase.assertWarns* methods.""" + + def __enter__(self): + # The __warningregistry__'s need to be in a pristine state for tests + # to work properly. + for v in sys.modules.values(): + if getattr(v, '__warningregistry__', None): + v.__warningregistry__ = {} + self.warnings_manager = warnings.catch_warnings(record=True) + self.warnings = self.warnings_manager.__enter__() + warnings.simplefilter("always", self.expected) + return self + + def __exit__(self, exc_type, exc_value, tb): + self.warnings_manager.__exit__(exc_type, exc_value, tb) + if exc_type is not None: + # let unexpected exceptions pass through + return + try: + exc_name = self.expected.__name__ + except AttributeError: + exc_name = str(self.expected) + first_matching = None + for m in self.warnings: + w = m.message + if not isinstance(w, self.expected): + continue + if first_matching is None: + first_matching = w + if (self.expected_regex is not None and + not self.expected_regex.search(str(w))): + continue + # store warning for later retrieval + self.warning = w + self.filename = m.filename + self.lineno = m.lineno + return + # Now we simply try to choose a helpful failure message + if first_matching is not None: + self._raiseFailure('"{}" does not match "{}"'.format( + self.expected_regex.pattern, str(first_matching))) + if self.obj_name: + self._raiseFailure("{} not triggered by {}".format(exc_name, + self.obj_name)) + else: + self._raiseFailure("{} not triggered".format(exc_name)) + + +def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs): + """Fail unless a warning of class warnClass is triggered + by callable_obj when invoked with arguments args and keyword + arguments kwargs. If a different type of warning is + triggered, it will not be handled: depending on the other + warning filtering rules in effect, it might be silenced, printed + out, or raised as an exception. + + If called with callable_obj omitted or None, will return a + context object used like this:: + + with self.assertWarns(SomeWarning): + do_something() + + An optional keyword argument 'msg' can be provided when assertWarns + is used as a context object. + + The context manager keeps a reference to the first matching + warning as the 'warning' attribute; similarly, the 'filename' + and 'lineno' attributes give you information about the line + of Python code from which the warning was triggered. + This allows you to inspect the warning after the assertion:: + + with self.assertWarns(SomeWarning) as cm: + do_something() + the_warning = cm.warning + self.assertEqual(the_warning.some_attribute, 147) + """ + context = _AssertWarnsContext(expected_warning, self, callable_obj) + return context.handle('assertWarns', callable_obj, args, kwargs) + +if not hasattr(unittest.TestCase, 'assertWarns'): + bind_method(unittest.TestCase, 'assertWarns', assertWarns) diff --git a/minor_project/lib/python3.6/site-packages/future/types/__init__.py b/minor_project/lib/python3.6/site-packages/future/types/__init__.py new file mode 100644 index 0000000..0625077 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/__init__.py @@ -0,0 +1,257 @@ +""" +This module contains backports the data types that were significantly changed +in the transition from Python 2 to Python 3. + +- an implementation of Python 3's bytes object (pure Python subclass of + Python 2's builtin 8-bit str type) +- an implementation of Python 3's str object (pure Python subclass of + Python 2's builtin unicode type) +- a backport of the range iterator from Py3 with slicing support + +It is used as follows:: + + from __future__ import division, absolute_import, print_function + from builtins import bytes, dict, int, range, str + +to bring in the new semantics for these functions from Python 3. And +then, for example:: + + b = bytes(b'ABCD') + assert list(b) == [65, 66, 67, 68] + assert repr(b) == "b'ABCD'" + assert [65, 66] in b + + # These raise TypeErrors: + # b + u'EFGH' + # b.split(u'B') + # bytes(b',').join([u'Fred', u'Bill']) + + + s = str(u'ABCD') + + # These raise TypeErrors: + # s.join([b'Fred', b'Bill']) + # s.startswith(b'A') + # b'B' in s + # s.find(b'A') + # s.replace(u'A', b'a') + + # This raises an AttributeError: + # s.decode('utf-8') + + assert repr(s) == 'ABCD' # consistent repr with Py3 (no u prefix) + + + for i in range(10**11)[:10]: + pass + +and:: + + class VerboseList(list): + def append(self, item): + print('Adding an item') + super().append(item) # new simpler super() function + +For more information: +--------------------- + +- future.types.newbytes +- future.types.newdict +- future.types.newint +- future.types.newobject +- future.types.newrange +- future.types.newstr + + +Notes +===== + +range() +------- +``range`` is a custom class that backports the slicing behaviour from +Python 3 (based on the ``xrange`` module by Dan Crosta). See the +``newrange`` module docstring for more details. + + +super() +------- +``super()`` is based on Ryan Kelly's ``magicsuper`` module. See the +``newsuper`` module docstring for more details. + + +round() +------- +Python 3 modifies the behaviour of ``round()`` to use "Banker's Rounding". +See http://stackoverflow.com/a/10825998. See the ``newround`` module +docstring for more details. + +""" + +from __future__ import absolute_import, division, print_function + +import functools +from numbers import Integral + +from future import utils + + +# Some utility functions to enforce strict type-separation of unicode str and +# bytes: +def disallow_types(argnums, disallowed_types): + """ + A decorator that raises a TypeError if any of the given numbered + arguments is of the corresponding given type (e.g. bytes or unicode + string). + + For example: + + @disallow_types([0, 1], [unicode, bytes]) + def f(a, b): + pass + + raises a TypeError when f is called if a unicode object is passed as + `a` or a bytes object is passed as `b`. + + This also skips over keyword arguments, so + + @disallow_types([0, 1], [unicode, bytes]) + def g(a, b=None): + pass + + doesn't raise an exception if g is called with only one argument a, + e.g.: + + g(b'Byte string') + + Example use: + + >>> class newbytes(object): + ... @disallow_types([1], [unicode]) + ... def __add__(self, other): + ... pass + + >>> newbytes('1234') + u'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: can't concat 'bytes' to (unicode) str + """ + + def decorator(function): + + @functools.wraps(function) + def wrapper(*args, **kwargs): + # These imports are just for this decorator, and are defined here + # to prevent circular imports: + from .newbytes import newbytes + from .newint import newint + from .newstr import newstr + + errmsg = "argument can't be {0}" + for (argnum, mytype) in zip(argnums, disallowed_types): + # Handle the case where the type is passed as a string like 'newbytes'. + if isinstance(mytype, str) or isinstance(mytype, bytes): + mytype = locals()[mytype] + + # Only restrict kw args only if they are passed: + if len(args) <= argnum: + break + + # Here we use type() rather than isinstance() because + # __instancecheck__ is being overridden. E.g. + # isinstance(b'abc', newbytes) is True on Py2. + if type(args[argnum]) == mytype: + raise TypeError(errmsg.format(mytype)) + + return function(*args, **kwargs) + return wrapper + return decorator + + +def no(mytype, argnums=(1,)): + """ + A shortcut for the disallow_types decorator that disallows only one type + (in any position in argnums). + + Example use: + + >>> class newstr(object): + ... @no('bytes') + ... def __add__(self, other): + ... pass + + >>> newstr(u'1234') + b'1234' #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: argument can't be bytes + + The object can also be passed directly, but passing the string helps + to prevent circular import problems. + """ + if isinstance(argnums, Integral): + argnums = (argnums,) + disallowed_types = [mytype] * len(argnums) + return disallow_types(argnums, disallowed_types) + + +def issubset(list1, list2): + """ + Examples: + + >>> issubset([], [65, 66, 67]) + True + >>> issubset([65], [65, 66, 67]) + True + >>> issubset([65, 66], [65, 66, 67]) + True + >>> issubset([65, 67], [65, 66, 67]) + False + """ + n = len(list1) + for startpos in range(len(list2) - n + 1): + if list2[startpos:startpos+n] == list1: + return True + return False + + +if utils.PY3: + import builtins + bytes = builtins.bytes + dict = builtins.dict + int = builtins.int + list = builtins.list + object = builtins.object + range = builtins.range + str = builtins.str + + # The identity mapping + newtypes = {bytes: bytes, + dict: dict, + int: int, + list: list, + object: object, + range: range, + str: str} + + __all__ = ['newtypes'] + +else: + + from .newbytes import newbytes + from .newdict import newdict + from .newint import newint + from .newlist import newlist + from .newrange import newrange + from .newobject import newobject + from .newstr import newstr + + newtypes = {bytes: newbytes, + dict: newdict, + int: newint, + long: newint, + list: newlist, + object: newobject, + range: newrange, + str: newbytes, + unicode: newstr} + + __all__ = ['newbytes', 'newdict', 'newint', 'newlist', 'newrange', 'newstr', 'newtypes'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..9994ae3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newbytes.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newbytes.cpython-36.pyc new file mode 100644 index 0000000..c0909f6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newbytes.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newdict.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newdict.cpython-36.pyc new file mode 100644 index 0000000..073fe91 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newdict.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newint.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newint.cpython-36.pyc new file mode 100644 index 0000000..1aff2bd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newint.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newlist.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newlist.cpython-36.pyc new file mode 100644 index 0000000..c96ab5b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newlist.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newmemoryview.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newmemoryview.cpython-36.pyc new file mode 100644 index 0000000..8a865fa Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newmemoryview.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newobject.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newobject.cpython-36.pyc new file mode 100644 index 0000000..d9d8230 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newobject.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newopen.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newopen.cpython-36.pyc new file mode 100644 index 0000000..93514e9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newopen.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newrange.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newrange.cpython-36.pyc new file mode 100644 index 0000000..25dde0d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newrange.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newstr.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newstr.cpython-36.pyc new file mode 100644 index 0000000..ea51c3e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/types/__pycache__/newstr.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/types/newbytes.py b/minor_project/lib/python3.6/site-packages/future/types/newbytes.py new file mode 100644 index 0000000..c9d584a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newbytes.py @@ -0,0 +1,460 @@ +""" +Pure-Python implementation of a Python 3-like bytes object for Python 2. + +Why do this? Without it, the Python 2 bytes object is a very, very +different beast to the Python 3 bytes object. +""" + +from numbers import Integral +import string +import copy + +from future.utils import istext, isbytes, PY2, PY3, with_metaclass +from future.types import no, issubset +from future.types.newobject import newobject + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + + +_builtin_bytes = bytes + +if PY3: + # We'll probably never use newstr on Py3 anyway... + unicode = str + + +class BaseNewBytes(type): + def __instancecheck__(cls, instance): + if cls == newbytes: + return isinstance(instance, _builtin_bytes) + else: + return issubclass(instance.__class__, cls) + + +def _newchr(x): + if isinstance(x, str): # this happens on pypy + return x.encode('ascii') + else: + return chr(x) + + +class newbytes(with_metaclass(BaseNewBytes, _builtin_bytes)): + """ + A backport of the Python 3 bytes object to Py2 + """ + def __new__(cls, *args, **kwargs): + """ + From the Py3 bytes docstring: + + bytes(iterable_of_ints) -> bytes + bytes(string, encoding[, errors]) -> bytes + bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer + bytes(int) -> bytes object of size given by the parameter initialized with null bytes + bytes() -> empty bytes object + + Construct an immutable array of bytes from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - any object implementing the buffer API. + - an integer + """ + + encoding = None + errors = None + + if len(args) == 0: + return super(newbytes, cls).__new__(cls) + elif len(args) >= 2: + args = list(args) + if len(args) == 3: + errors = args.pop() + encoding=args.pop() + # Was: elif isinstance(args[0], newbytes): + # We use type() instead of the above because we're redefining + # this to be True for all unicode string subclasses. Warning: + # This may render newstr un-subclassable. + if type(args[0]) == newbytes: + # Special-case: for consistency with Py3.3, we return the same object + # (with the same id) if a newbytes object is passed into the + # newbytes constructor. + return args[0] + elif isinstance(args[0], _builtin_bytes): + value = args[0] + elif isinstance(args[0], unicode): + try: + if 'encoding' in kwargs: + assert encoding is None + encoding = kwargs['encoding'] + if 'errors' in kwargs: + assert errors is None + errors = kwargs['errors'] + except AssertionError: + raise TypeError('Argument given by name and position') + if encoding is None: + raise TypeError('unicode string argument without an encoding') + ### + # Was: value = args[0].encode(**kwargs) + # Python 2.6 string encode() method doesn't take kwargs: + # Use this instead: + newargs = [encoding] + if errors is not None: + newargs.append(errors) + value = args[0].encode(*newargs) + ### + elif hasattr(args[0], '__bytes__'): + value = args[0].__bytes__() + elif isinstance(args[0], Iterable): + if len(args[0]) == 0: + # This could be an empty list or tuple. Return b'' as on Py3. + value = b'' + else: + # Was: elif len(args[0])>0 and isinstance(args[0][0], Integral): + # # It's a list of integers + # But then we can't index into e.g. frozensets. Try to proceed + # anyway. + try: + value = bytearray([_newchr(x) for x in args[0]]) + except: + raise ValueError('bytes must be in range(0, 256)') + elif isinstance(args[0], Integral): + if args[0] < 0: + raise ValueError('negative count') + value = b'\x00' * args[0] + else: + value = args[0] + if type(value) == newbytes: + # Above we use type(...) rather than isinstance(...) because the + # newbytes metaclass overrides __instancecheck__. + # oldbytes(value) gives the wrong thing on Py2: the same + # result as str(value) on Py3, e.g. "b'abc'". (Issue #193). + # So we handle this case separately: + return copy.copy(value) + else: + return super(newbytes, cls).__new__(cls, value) + + def __repr__(self): + return 'b' + super(newbytes, self).__repr__() + + def __str__(self): + return 'b' + "'{0}'".format(super(newbytes, self).__str__()) + + def __getitem__(self, y): + value = super(newbytes, self).__getitem__(y) + if isinstance(y, Integral): + return ord(value) + else: + return newbytes(value) + + def __getslice__(self, *args): + return self.__getitem__(slice(*args)) + + def __contains__(self, key): + if isinstance(key, int): + newbyteskey = newbytes([key]) + # Don't use isinstance() here because we only want to catch + # newbytes, not Python 2 str: + elif type(key) == newbytes: + newbyteskey = key + else: + newbyteskey = newbytes(key) + return issubset(list(newbyteskey), list(self)) + + @no(unicode) + def __add__(self, other): + return newbytes(super(newbytes, self).__add__(other)) + + @no(unicode) + def __radd__(self, left): + return newbytes(left) + self + + @no(unicode) + def __mul__(self, other): + return newbytes(super(newbytes, self).__mul__(other)) + + @no(unicode) + def __rmul__(self, other): + return newbytes(super(newbytes, self).__rmul__(other)) + + def __mod__(self, vals): + if isinstance(vals, newbytes): + vals = _builtin_bytes.__str__(vals) + + elif isinstance(vals, tuple): + newvals = [] + for v in vals: + if isinstance(v, newbytes): + v = _builtin_bytes.__str__(v) + newvals.append(v) + vals = tuple(newvals) + + elif (hasattr(vals.__class__, '__getitem__') and + hasattr(vals.__class__, 'iteritems')): + for k, v in vals.iteritems(): + if isinstance(v, newbytes): + vals[k] = _builtin_bytes.__str__(v) + + return _builtin_bytes.__mod__(self, vals) + + def __imod__(self, other): + return self.__mod__(other) + + def join(self, iterable_of_bytes): + errmsg = 'sequence item {0}: expected bytes, {1} found' + if isbytes(iterable_of_bytes) or istext(iterable_of_bytes): + raise TypeError(errmsg.format(0, type(iterable_of_bytes))) + for i, item in enumerate(iterable_of_bytes): + if istext(item): + raise TypeError(errmsg.format(i, type(item))) + return newbytes(super(newbytes, self).join(iterable_of_bytes)) + + @classmethod + def fromhex(cls, string): + # Only on Py2: + return cls(string.replace(' ', '').decode('hex')) + + @no(unicode) + def find(self, sub, *args): + return super(newbytes, self).find(sub, *args) + + @no(unicode) + def rfind(self, sub, *args): + return super(newbytes, self).rfind(sub, *args) + + @no(unicode, (1, 2)) + def replace(self, old, new, *args): + return newbytes(super(newbytes, self).replace(old, new, *args)) + + def encode(self, *args): + raise AttributeError("encode method has been disabled in newbytes") + + def decode(self, encoding='utf-8', errors='strict'): + """ + Returns a newstr (i.e. unicode subclass) + + Decode B using the codec registered for encoding. Default encoding + is 'utf-8'. errors may be given to set a different error + handling scheme. Default is 'strict' meaning that encoding errors raise + a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that is + able to handle UnicodeDecodeErrors. + """ + # Py2 str.encode() takes encoding and errors as optional parameter, + # not keyword arguments as in Python 3 str. + + from future.types.newstr import newstr + + if errors == 'surrogateescape': + from future.utils.surrogateescape import register_surrogateescape + register_surrogateescape() + + return newstr(super(newbytes, self).decode(encoding, errors)) + + # This is currently broken: + # # We implement surrogateescape error handling here in addition rather + # # than relying on the custom error handler from + # # future.utils.surrogateescape to be registered globally, even though + # # that is fine in the case of decoding. (But not encoding: see the + # # comments in newstr.encode()``.) + # + # if errors == 'surrogateescape': + # # Decode char by char + # mybytes = [] + # for code in self: + # # Code is an int + # if 0x80 <= code <= 0xFF: + # b = 0xDC00 + code + # elif code <= 0x7F: + # b = _unichr(c).decode(encoding=encoding) + # else: + # # # It may be a bad byte + # # FIXME: What to do in this case? See the Py3 docs / tests. + # # # Try swallowing it. + # # continue + # # print("RAISE!") + # raise NotASurrogateError + # mybytes.append(b) + # return newbytes(mybytes) + # return newbytes(super(newstr, self).decode(encoding, errors)) + + @no(unicode) + def startswith(self, prefix, *args): + return super(newbytes, self).startswith(prefix, *args) + + @no(unicode) + def endswith(self, prefix, *args): + return super(newbytes, self).endswith(prefix, *args) + + @no(unicode) + def split(self, sep=None, maxsplit=-1): + # Py2 str.split() takes maxsplit as an optional parameter, not as a + # keyword argument as in Python 3 bytes. + parts = super(newbytes, self).split(sep, maxsplit) + return [newbytes(part) for part in parts] + + def splitlines(self, keepends=False): + """ + B.splitlines([keepends]) -> list of lines + + Return a list of the lines in B, breaking at line boundaries. + Line breaks are not included in the resulting list unless keepends + is given and true. + """ + # Py2 str.splitlines() takes keepends as an optional parameter, + # not as a keyword argument as in Python 3 bytes. + parts = super(newbytes, self).splitlines(keepends) + return [newbytes(part) for part in parts] + + @no(unicode) + def rsplit(self, sep=None, maxsplit=-1): + # Py2 str.rsplit() takes maxsplit as an optional parameter, not as a + # keyword argument as in Python 3 bytes. + parts = super(newbytes, self).rsplit(sep, maxsplit) + return [newbytes(part) for part in parts] + + @no(unicode) + def partition(self, sep): + parts = super(newbytes, self).partition(sep) + return tuple(newbytes(part) for part in parts) + + @no(unicode) + def rpartition(self, sep): + parts = super(newbytes, self).rpartition(sep) + return tuple(newbytes(part) for part in parts) + + @no(unicode, (1,)) + def rindex(self, sub, *args): + ''' + S.rindex(sub [,start [,end]]) -> int + + Like S.rfind() but raise ValueError when the substring is not found. + ''' + pos = self.rfind(sub, *args) + if pos == -1: + raise ValueError('substring not found') + + @no(unicode) + def index(self, sub, *args): + ''' + Returns index of sub in bytes. + Raises ValueError if byte is not in bytes and TypeError if can't + be converted bytes or its length is not 1. + ''' + if isinstance(sub, int): + if len(args) == 0: + start, end = 0, len(self) + elif len(args) == 1: + start = args[0] + elif len(args) == 2: + start, end = args + else: + raise TypeError('takes at most 3 arguments') + return list(self)[start:end].index(sub) + if not isinstance(sub, bytes): + try: + sub = self.__class__(sub) + except (TypeError, ValueError): + raise TypeError("can't convert sub to bytes") + try: + return super(newbytes, self).index(sub, *args) + except ValueError: + raise ValueError('substring not found') + + def __eq__(self, other): + if isinstance(other, (_builtin_bytes, bytearray)): + return super(newbytes, self).__eq__(other) + else: + return False + + def __ne__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ne__(other) + else: + return True + + unorderable_err = 'unorderable types: bytes() and {0}' + + def __lt__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __le__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __gt__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __ge__(self, other): + if isinstance(other, _builtin_bytes): + return super(newbytes, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __native__(self): + # We can't just feed a newbytes object into str(), because + # newbytes.__str__() returns e.g. "b'blah'", consistent with Py3 bytes. + return super(newbytes, self).__str__() + + def __getattribute__(self, name): + """ + A trick to cause the ``hasattr`` builtin-fn to return False for + the 'encode' method on Py2. + """ + if name in ['encode', u'encode']: + raise AttributeError("encode method has been disabled in newbytes") + return super(newbytes, self).__getattribute__(name) + + @no(unicode) + def rstrip(self, bytes_to_strip=None): + """ + Strip trailing bytes contained in the argument. + If the argument is omitted, strip trailing ASCII whitespace. + """ + return newbytes(super(newbytes, self).rstrip(bytes_to_strip)) + + @no(unicode) + def strip(self, bytes_to_strip=None): + """ + Strip leading and trailing bytes contained in the argument. + If the argument is omitted, strip trailing ASCII whitespace. + """ + return newbytes(super(newbytes, self).strip(bytes_to_strip)) + + def lower(self): + """ + b.lower() -> copy of b + + Return a copy of b with all ASCII characters converted to lowercase. + """ + return newbytes(super(newbytes, self).lower()) + + @no(unicode) + def upper(self): + """ + b.upper() -> copy of b + + Return a copy of b with all ASCII characters converted to uppercase. + """ + return newbytes(super(newbytes, self).upper()) + + @classmethod + @no(unicode) + def maketrans(cls, frm, to): + """ + B.maketrans(frm, to) -> translation table + + Return a translation table (a bytes object of length 256) suitable + for use in the bytes or bytearray translate method where each byte + in frm is mapped to the byte at the same position in to. + The bytes objects frm and to must be of the same length. + """ + return newbytes(string.maketrans(frm, to)) + + +__all__ = ['newbytes'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/newdict.py b/minor_project/lib/python3.6/site-packages/future/types/newdict.py new file mode 100644 index 0000000..3f3a559 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newdict.py @@ -0,0 +1,111 @@ +""" +A dict subclass for Python 2 that behaves like Python 3's dict + +Example use: + +>>> from builtins import dict +>>> d1 = dict() # instead of {} for an empty dict +>>> d2 = dict(key1='value1', key2='value2') + +The keys, values and items methods now return iterators on Python 2.x +(with set-like behaviour on Python 2.7). + +>>> for d in (d1, d2): +... assert not isinstance(d.keys(), list) +... assert not isinstance(d.values(), list) +... assert not isinstance(d.items(), list) +""" + +import sys + +from future.utils import with_metaclass +from future.types.newobject import newobject + + +_builtin_dict = dict +ver = sys.version_info[:2] + + +class BaseNewDict(type): + def __instancecheck__(cls, instance): + if cls == newdict: + return isinstance(instance, _builtin_dict) + else: + return issubclass(instance.__class__, cls) + + +class newdict(with_metaclass(BaseNewDict, _builtin_dict)): + """ + A backport of the Python 3 dict object to Py2 + """ + def items(self): + """ + On Python 2.7+: + D.items() -> a set-like object providing a view on D's items + On Python 2.6: + D.items() -> an iterator over D's items + """ + if ver == (2, 7): + return self.viewitems() + elif ver == (2, 6): + return self.iteritems() + elif ver >= (3, 0): + return self.items() + + def keys(self): + """ + On Python 2.7+: + D.keys() -> a set-like object providing a view on D's keys + On Python 2.6: + D.keys() -> an iterator over D's keys + """ + if ver == (2, 7): + return self.viewkeys() + elif ver == (2, 6): + return self.iterkeys() + elif ver >= (3, 0): + return self.keys() + + def values(self): + """ + On Python 2.7+: + D.values() -> a set-like object providing a view on D's values + On Python 2.6: + D.values() -> an iterator over D's values + """ + if ver == (2, 7): + return self.viewvalues() + elif ver == (2, 6): + return self.itervalues() + elif ver >= (3, 0): + return self.values() + + def __new__(cls, *args, **kwargs): + """ + dict() -> new empty dictionary + dict(mapping) -> new dictionary initialized from a mapping object's + (key, value) pairs + dict(iterable) -> new dictionary initialized as if via: + d = {} + for k, v in iterable: + d[k] = v + dict(**kwargs) -> new dictionary initialized with the name=value pairs + in the keyword argument list. For example: dict(one=1, two=2) + """ + + if len(args) == 0: + return super(newdict, cls).__new__(cls) + elif type(args[0]) == newdict: + value = args[0] + else: + value = args[0] + return super(newdict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return dict(self) + + +__all__ = ['newdict'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/newint.py b/minor_project/lib/python3.6/site-packages/future/types/newint.py new file mode 100644 index 0000000..748dba9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newint.py @@ -0,0 +1,381 @@ +""" +Backport of Python 3's int, based on Py2's long. + +They are very similar. The most notable difference is: + +- representation: trailing L in Python 2 removed in Python 3 +""" +from __future__ import division + +import struct + +from future.types.newbytes import newbytes +from future.types.newobject import newobject +from future.utils import PY3, isint, istext, isbytes, with_metaclass, native + + +if PY3: + long = int + from collections.abc import Iterable +else: + from collections import Iterable + + +class BaseNewInt(type): + def __instancecheck__(cls, instance): + if cls == newint: + # Special case for Py2 short or long int + return isinstance(instance, (int, long)) + else: + return issubclass(instance.__class__, cls) + + +class newint(with_metaclass(BaseNewInt, long)): + """ + A backport of the Python 3 int object to Py2 + """ + def __new__(cls, x=0, base=10): + """ + From the Py3 int docstring: + + | int(x=0) -> integer + | int(x, base=10) -> integer + | + | Convert a number or string to an integer, or return 0 if no + | arguments are given. If x is a number, return x.__int__(). For + | floating point numbers, this truncates towards zero. + | + | If x is not a number or if base is given, then x must be a string, + | bytes, or bytearray instance representing an integer literal in the + | given base. The literal can be preceded by '+' or '-' and be + | surrounded by whitespace. The base defaults to 10. Valid bases are + | 0 and 2-36. Base 0 means to interpret the base from the string as an + | integer literal. + | >>> int('0b100', base=0) + | 4 + + """ + try: + val = x.__int__() + except AttributeError: + val = x + else: + if not isint(val): + raise TypeError('__int__ returned non-int ({0})'.format( + type(val))) + + if base != 10: + # Explicit base + if not (istext(val) or isbytes(val) or isinstance(val, bytearray)): + raise TypeError( + "int() can't convert non-string with explicit base") + try: + return super(newint, cls).__new__(cls, val, base) + except TypeError: + return super(newint, cls).__new__(cls, newbytes(val), base) + # After here, base is 10 + try: + return super(newint, cls).__new__(cls, val) + except TypeError: + # Py2 long doesn't handle bytearray input with an explicit base, so + # handle this here. + # Py3: int(bytearray(b'10'), 2) == 2 + # Py2: int(bytearray(b'10'), 2) == 2 raises TypeError + # Py2: long(bytearray(b'10'), 2) == 2 raises TypeError + try: + return super(newint, cls).__new__(cls, newbytes(val)) + except: + raise TypeError("newint argument must be a string or a number," + "not '{0}'".format(type(val))) + + def __repr__(self): + """ + Without the L suffix + """ + value = super(newint, self).__repr__() + assert value[-1] == 'L' + return value[:-1] + + def __add__(self, other): + value = super(newint, self).__add__(other) + if value is NotImplemented: + return long(self) + other + return newint(value) + + def __radd__(self, other): + value = super(newint, self).__radd__(other) + if value is NotImplemented: + return other + long(self) + return newint(value) + + def __sub__(self, other): + value = super(newint, self).__sub__(other) + if value is NotImplemented: + return long(self) - other + return newint(value) + + def __rsub__(self, other): + value = super(newint, self).__rsub__(other) + if value is NotImplemented: + return other - long(self) + return newint(value) + + def __mul__(self, other): + value = super(newint, self).__mul__(other) + if isint(value): + return newint(value) + elif value is NotImplemented: + return long(self) * other + return value + + def __rmul__(self, other): + value = super(newint, self).__rmul__(other) + if isint(value): + return newint(value) + elif value is NotImplemented: + return other * long(self) + return value + + def __div__(self, other): + # We override this rather than e.g. relying on object.__div__ or + # long.__div__ because we want to wrap the value in a newint() + # call if other is another int + value = long(self) / other + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __rdiv__(self, other): + value = other / long(self) + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __idiv__(self, other): + # long has no __idiv__ method. Use __itruediv__ and cast back to + # newint: + value = self.__itruediv__(other) + if isinstance(other, (int, long)): + return newint(value) + else: + return value + + def __truediv__(self, other): + value = super(newint, self).__truediv__(other) + if value is NotImplemented: + value = long(self) / other + return value + + def __rtruediv__(self, other): + return super(newint, self).__rtruediv__(other) + + def __itruediv__(self, other): + # long has no __itruediv__ method + mylong = long(self) + mylong /= other + return mylong + + def __floordiv__(self, other): + return newint(super(newint, self).__floordiv__(other)) + + def __rfloordiv__(self, other): + return newint(super(newint, self).__rfloordiv__(other)) + + def __ifloordiv__(self, other): + # long has no __ifloordiv__ method + mylong = long(self) + mylong //= other + return newint(mylong) + + def __mod__(self, other): + value = super(newint, self).__mod__(other) + if value is NotImplemented: + return long(self) % other + return newint(value) + + def __rmod__(self, other): + value = super(newint, self).__rmod__(other) + if value is NotImplemented: + return other % long(self) + return newint(value) + + def __divmod__(self, other): + value = super(newint, self).__divmod__(other) + if value is NotImplemented: + mylong = long(self) + return (mylong // other, mylong % other) + return (newint(value[0]), newint(value[1])) + + def __rdivmod__(self, other): + value = super(newint, self).__rdivmod__(other) + if value is NotImplemented: + mylong = long(self) + return (other // mylong, other % mylong) + return (newint(value[0]), newint(value[1])) + + def __pow__(self, other): + value = super(newint, self).__pow__(other) + if value is NotImplemented: + return long(self) ** other + return newint(value) + + def __rpow__(self, other): + value = super(newint, self).__rpow__(other) + if value is NotImplemented: + return other ** long(self) + return newint(value) + + def __lshift__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for <<: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__lshift__(other)) + + def __rshift__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for >>: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__rshift__(other)) + + def __and__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for &: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__and__(other)) + + def __or__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for |: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__or__(other)) + + def __xor__(self, other): + if not isint(other): + raise TypeError( + "unsupported operand type(s) for ^: '%s' and '%s'" % + (type(self).__name__, type(other).__name__)) + return newint(super(newint, self).__xor__(other)) + + def __neg__(self): + return newint(super(newint, self).__neg__()) + + def __pos__(self): + return newint(super(newint, self).__pos__()) + + def __abs__(self): + return newint(super(newint, self).__abs__()) + + def __invert__(self): + return newint(super(newint, self).__invert__()) + + def __int__(self): + return self + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + """ + So subclasses can override this, Py3-style + """ + return super(newint, self).__nonzero__() + + def __native__(self): + return long(self) + + def to_bytes(self, length, byteorder='big', signed=False): + """ + Return an array of bytes representing an integer. + + The integer is represented using length bytes. An OverflowError is + raised if the integer is not representable with the given number of + bytes. + + The byteorder argument determines the byte order used to represent the + integer. If byteorder is 'big', the most significant byte is at the + beginning of the byte array. If byteorder is 'little', the most + significant byte is at the end of the byte array. To request the native + byte order of the host system, use `sys.byteorder' as the byte order value. + + The signed keyword-only argument determines whether two's complement is + used to represent the integer. If signed is False and a negative integer + is given, an OverflowError is raised. + """ + if length < 0: + raise ValueError("length argument must be non-negative") + if length == 0 and self == 0: + return newbytes() + if signed and self < 0: + bits = length * 8 + num = (2**bits) + self + if num <= 0: + raise OverflowError("int too smal to convert") + else: + if self < 0: + raise OverflowError("can't convert negative int to unsigned") + num = self + if byteorder not in ('little', 'big'): + raise ValueError("byteorder must be either 'little' or 'big'") + h = b'%x' % num + s = newbytes((b'0'*(len(h) % 2) + h).zfill(length*2).decode('hex')) + if signed: + high_set = s[0] & 0x80 + if self > 0 and high_set: + raise OverflowError("int too big to convert") + if self < 0 and not high_set: + raise OverflowError("int too small to convert") + if len(s) > length: + raise OverflowError("int too big to convert") + return s if byteorder == 'big' else s[::-1] + + @classmethod + def from_bytes(cls, mybytes, byteorder='big', signed=False): + """ + Return the integer represented by the given array of bytes. + + The mybytes argument must either support the buffer protocol or be an + iterable object producing bytes. Bytes and bytearray are examples of + built-in objects that support the buffer protocol. + + The byteorder argument determines the byte order used to represent the + integer. If byteorder is 'big', the most significant byte is at the + beginning of the byte array. If byteorder is 'little', the most + significant byte is at the end of the byte array. To request the native + byte order of the host system, use `sys.byteorder' as the byte order value. + + The signed keyword-only argument indicates whether two's complement is + used to represent the integer. + """ + if byteorder not in ('little', 'big'): + raise ValueError("byteorder must be either 'little' or 'big'") + if isinstance(mybytes, unicode): + raise TypeError("cannot convert unicode objects to bytes") + # mybytes can also be passed as a sequence of integers on Py3. + # Test for this: + elif isinstance(mybytes, Iterable): + mybytes = newbytes(mybytes) + b = mybytes if byteorder == 'big' else mybytes[::-1] + if len(b) == 0: + b = b'\x00' + # The encode() method has been disabled by newbytes, but Py2's + # str has it: + num = int(native(b).encode('hex'), 16) + if signed and (b[0] & 0x80): + num = num - (2 ** (len(b)*8)) + return cls(num) + + +# def _twos_comp(val, bits): +# """compute the 2's compliment of int value val""" +# if( (val&(1<<(bits-1))) != 0 ): +# val = val - (1<>> from builtins import list +>>> l1 = list() # instead of {} for an empty list +>>> l1.append('hello') +>>> l2 = l1.copy() + +""" + +import sys +import copy + +from future.utils import with_metaclass +from future.types.newobject import newobject + + +_builtin_list = list +ver = sys.version_info[:2] + + +class BaseNewList(type): + def __instancecheck__(cls, instance): + if cls == newlist: + return isinstance(instance, _builtin_list) + else: + return issubclass(instance.__class__, cls) + + +class newlist(with_metaclass(BaseNewList, _builtin_list)): + """ + A backport of the Python 3 list object to Py2 + """ + def copy(self): + """ + L.copy() -> list -- a shallow copy of L + """ + return copy.copy(self) + + def clear(self): + """L.clear() -> None -- remove all items from L""" + for i in range(len(self)): + self.pop() + + def __new__(cls, *args, **kwargs): + """ + list() -> new empty list + list(iterable) -> new list initialized from iterable's items + """ + + if len(args) == 0: + return super(newlist, cls).__new__(cls) + elif type(args[0]) == newlist: + value = args[0] + else: + value = args[0] + return super(newlist, cls).__new__(cls, value) + + def __add__(self, value): + return newlist(super(newlist, self).__add__(value)) + + def __radd__(self, left): + " left + self " + try: + return newlist(left) + self + except: + return NotImplemented + + def __getitem__(self, y): + """ + x.__getitem__(y) <==> x[y] + + Warning: a bug in Python 2.x prevents indexing via a slice from + returning a newlist object. + """ + if isinstance(y, slice): + return newlist(super(newlist, self).__getitem__(y)) + else: + return super(newlist, self).__getitem__(y) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return list(self) + + def __nonzero__(self): + return len(self) > 0 + + +__all__ = ['newlist'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/newmemoryview.py b/minor_project/lib/python3.6/site-packages/future/types/newmemoryview.py new file mode 100644 index 0000000..09f804d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newmemoryview.py @@ -0,0 +1,29 @@ +""" +A pretty lame implementation of a memoryview object for Python 2.6. +""" +from numbers import Integral +import string + +from future.utils import istext, isbytes, PY2, with_metaclass +from future.types import no, issubset + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + +# class BaseNewBytes(type): +# def __instancecheck__(cls, instance): +# return isinstance(instance, _builtin_bytes) + + +class newmemoryview(object): # with_metaclass(BaseNewBytes, _builtin_bytes)): + """ + A pretty lame backport of the Python 2.7 and Python 3.x + memoryviewview object to Py2.6. + """ + def __init__(self, obj): + return obj + + +__all__ = ['newmemoryview'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/newobject.py b/minor_project/lib/python3.6/site-packages/future/types/newobject.py new file mode 100644 index 0000000..31b84fc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newobject.py @@ -0,0 +1,117 @@ +""" +An object subclass for Python 2 that gives new-style classes written in the +style of Python 3 (with ``__next__`` and unicode-returning ``__str__`` methods) +the appropriate Python 2-style ``next`` and ``__unicode__`` methods for compatible. + +Example use:: + + from builtins import object + + my_unicode_str = u'Unicode string: \u5b54\u5b50' + + class A(object): + def __str__(self): + return my_unicode_str + + a = A() + print(str(a)) + + # On Python 2, these relations hold: + assert unicode(a) == my_unicode_string + assert str(a) == my_unicode_string.encode('utf-8') + + +Another example:: + + from builtins import object + + class Upper(object): + def __init__(self, iterable): + self._iter = iter(iterable) + def __next__(self): # note the Py3 interface + return next(self._iter).upper() + def __iter__(self): + return self + + assert list(Upper('hello')) == list('HELLO') + +""" + + +class newobject(object): + """ + A magical object class that provides Python 2 compatibility methods:: + next + __unicode__ + __nonzero__ + + Subclasses of this class can merely define the Python 3 methods (__next__, + __str__, and __bool__). + """ + def next(self): + if hasattr(self, '__next__'): + return type(self).__next__(self) + raise TypeError('newobject is not an iterator') + + def __unicode__(self): + # All subclasses of the builtin object should have __str__ defined. + # Note that old-style classes do not have __str__ defined. + if hasattr(self, '__str__'): + s = type(self).__str__(self) + else: + s = str(self) + if isinstance(s, unicode): + return s + else: + return s.decode('utf-8') + + def __nonzero__(self): + if hasattr(self, '__bool__'): + return type(self).__bool__(self) + if hasattr(self, '__len__'): + return type(self).__len__(self) + # object has no __nonzero__ method + return True + + # Are these ever needed? + # def __div__(self): + # return self.__truediv__() + + # def __idiv__(self, other): + # return self.__itruediv__(other) + + def __long__(self): + if not hasattr(self, '__int__'): + return NotImplemented + return self.__int__() # not type(self).__int__(self) + + # def __new__(cls, *args, **kwargs): + # """ + # dict() -> new empty dictionary + # dict(mapping) -> new dictionary initialized from a mapping object's + # (key, value) pairs + # dict(iterable) -> new dictionary initialized as if via: + # d = {} + # for k, v in iterable: + # d[k] = v + # dict(**kwargs) -> new dictionary initialized with the name=value pairs + # in the keyword argument list. For example: dict(one=1, two=2) + # """ + + # if len(args) == 0: + # return super(newdict, cls).__new__(cls) + # elif type(args[0]) == newdict: + # return args[0] + # else: + # value = args[0] + # return super(newdict, cls).__new__(cls, value) + + def __native__(self): + """ + Hook for the future.utils.native() function + """ + return object(self) + + __slots__ = [] + +__all__ = ['newobject'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/newopen.py b/minor_project/lib/python3.6/site-packages/future/types/newopen.py new file mode 100644 index 0000000..b75d45a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newopen.py @@ -0,0 +1,32 @@ +""" +A substitute for the Python 3 open() function. + +Note that io.open() is more complete but maybe slower. Even so, the +completeness may be a better default. TODO: compare these +""" + +_builtin_open = open + +class newopen(object): + """Wrapper providing key part of Python 3 open() interface. + + From IPython's py3compat.py module. License: BSD. + """ + def __init__(self, fname, mode="r", encoding="utf-8"): + self.f = _builtin_open(fname, mode) + self.enc = encoding + + def write(self, s): + return self.f.write(s.encode(self.enc)) + + def read(self, size=-1): + return self.f.read(size).decode(self.enc) + + def close(self): + return self.f.close() + + def __enter__(self): + return self + + def __exit__(self, etype, value, traceback): + self.f.close() diff --git a/minor_project/lib/python3.6/site-packages/future/types/newrange.py b/minor_project/lib/python3.6/site-packages/future/types/newrange.py new file mode 100644 index 0000000..eda01a5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newrange.py @@ -0,0 +1,170 @@ +""" +Nearly identical to xrange.py, by Dan Crosta, from + + https://github.com/dcrosta/xrange.git + +This is included here in the ``future`` package rather than pointed to as +a dependency because there is no package for ``xrange`` on PyPI. It is +also tweaked to appear like a regular Python 3 ``range`` object rather +than a Python 2 xrange. + +From Dan Crosta's README: + + "A pure-Python implementation of Python 2.7's xrange built-in, with + some features backported from the Python 3.x range built-in (which + replaced xrange) in that version." + + Read more at + https://late.am/post/2012/06/18/what-the-heck-is-an-xrange +""" +from __future__ import absolute_import + +from future.utils import PY2 + +if PY2: + from collections import Sequence, Iterator +else: + from collections.abc import Sequence, Iterator +from itertools import islice + +from future.backports.misc import count # with step parameter on Py2.6 +# For backward compatibility with python-future versions < 0.14.4: +_count = count + + +class newrange(Sequence): + """ + Pure-Python backport of Python 3's range object. See `the CPython + documentation for details: + `_ + """ + + def __init__(self, *args): + if len(args) == 1: + start, stop, step = 0, args[0], 1 + elif len(args) == 2: + start, stop, step = args[0], args[1], 1 + elif len(args) == 3: + start, stop, step = args + else: + raise TypeError('range() requires 1-3 int arguments') + + try: + start, stop, step = int(start), int(stop), int(step) + except ValueError: + raise TypeError('an integer is required') + + if step == 0: + raise ValueError('range() arg 3 must not be zero') + elif step < 0: + stop = min(stop, start) + else: + stop = max(stop, start) + + self._start = start + self._stop = stop + self._step = step + self._len = (stop - start) // step + bool((stop - start) % step) + + @property + def start(self): + return self._start + + @property + def stop(self): + return self._stop + + @property + def step(self): + return self._step + + def __repr__(self): + if self._step == 1: + return 'range(%d, %d)' % (self._start, self._stop) + return 'range(%d, %d, %d)' % (self._start, self._stop, self._step) + + def __eq__(self, other): + return (isinstance(other, newrange) and + (self._len == 0 == other._len or + (self._start, self._step, self._len) == + (other._start, other._step, self._len))) + + def __len__(self): + return self._len + + def index(self, value): + """Return the 0-based position of integer `value` in + the sequence this range represents.""" + try: + diff = value - self._start + except TypeError: + raise ValueError('%r is not in range' % value) + quotient, remainder = divmod(diff, self._step) + if remainder == 0 and 0 <= quotient < self._len: + return abs(quotient) + raise ValueError('%r is not in range' % value) + + def count(self, value): + """Return the number of ocurrences of integer `value` + in the sequence this range represents.""" + # a value can occur exactly zero or one times + return int(value in self) + + def __contains__(self, value): + """Return ``True`` if the integer `value` occurs in + the sequence this range represents.""" + try: + self.index(value) + return True + except ValueError: + return False + + def __reversed__(self): + return iter(self[::-1]) + + def __getitem__(self, index): + """Return the element at position ``index`` in the sequence + this range represents, or raise :class:`IndexError` if the + position is out of range.""" + if isinstance(index, slice): + return self.__getitem_slice(index) + if index < 0: + # negative indexes access from the end + index = self._len + index + if index < 0 or index >= self._len: + raise IndexError('range object index out of range') + return self._start + index * self._step + + def __getitem_slice(self, slce): + """Return a range which represents the requested slce + of the sequence represented by this range. + """ + scaled_indices = (self._step * n for n in slce.indices(self._len)) + start_offset, stop_offset, new_step = scaled_indices + return newrange(self._start + start_offset, + self._start + stop_offset, + new_step) + + def __iter__(self): + """Return an iterator which enumerates the elements of the + sequence this range represents.""" + return range_iterator(self) + + +class range_iterator(Iterator): + """An iterator for a :class:`range`. + """ + def __init__(self, range_): + self._stepper = islice(count(range_.start, range_.step), len(range_)) + + def __iter__(self): + return self + + def __next__(self): + return next(self._stepper) + + def next(self): + return next(self._stepper) + + +__all__ = ['newrange'] diff --git a/minor_project/lib/python3.6/site-packages/future/types/newstr.py b/minor_project/lib/python3.6/site-packages/future/types/newstr.py new file mode 100644 index 0000000..8ca191f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/types/newstr.py @@ -0,0 +1,426 @@ +""" +This module redefines ``str`` on Python 2.x to be a subclass of the Py2 +``unicode`` type that behaves like the Python 3.x ``str``. + +The main differences between ``newstr`` and Python 2.x's ``unicode`` type are +the stricter type-checking and absence of a `u''` prefix in the representation. + +It is designed to be used together with the ``unicode_literals`` import +as follows: + + >>> from __future__ import unicode_literals + >>> from builtins import str, isinstance + +On Python 3.x and normally on Python 2.x, these expressions hold + + >>> str('blah') is 'blah' + True + >>> isinstance('blah', str) + True + +However, on Python 2.x, with this import: + + >>> from __future__ import unicode_literals + +the same expressions are False: + + >>> str('blah') is 'blah' + False + >>> isinstance('blah', str) + False + +This module is designed to be imported together with ``unicode_literals`` on +Python 2 to bring the meaning of ``str`` back into alignment with unprefixed +string literals (i.e. ``unicode`` subclasses). + +Note that ``str()`` (and ``print()``) would then normally call the +``__unicode__`` method on objects in Python 2. To define string +representations of your objects portably across Py3 and Py2, use the +:func:`python_2_unicode_compatible` decorator in :mod:`future.utils`. + +""" + +from numbers import Number + +from future.utils import PY3, istext, with_metaclass, isnewbytes +from future.types import no, issubset +from future.types.newobject import newobject + + +if PY3: + # We'll probably never use newstr on Py3 anyway... + unicode = str + from collections.abc import Iterable +else: + from collections import Iterable + + +class BaseNewStr(type): + def __instancecheck__(cls, instance): + if cls == newstr: + return isinstance(instance, unicode) + else: + return issubclass(instance.__class__, cls) + + +class newstr(with_metaclass(BaseNewStr, unicode)): + """ + A backport of the Python 3 str object to Py2 + """ + no_convert_msg = "Can't convert '{0}' object to str implicitly" + + def __new__(cls, *args, **kwargs): + """ + From the Py3 str docstring: + + str(object='') -> str + str(bytes_or_buffer[, encoding[, errors]]) -> str + + Create a new string object from the given object. If encoding or + errors is specified, then the object must expose a data buffer + that will be decoded using the given encoding and error handler. + Otherwise, returns the result of object.__str__() (if defined) + or repr(object). + encoding defaults to sys.getdefaultencoding(). + errors defaults to 'strict'. + + """ + if len(args) == 0: + return super(newstr, cls).__new__(cls) + # Special case: If someone requests str(str(u'abc')), return the same + # object (same id) for consistency with Py3.3. This is not true for + # other objects like list or dict. + elif type(args[0]) == newstr and cls == newstr: + return args[0] + elif isinstance(args[0], unicode): + value = args[0] + elif isinstance(args[0], bytes): # i.e. Py2 bytes or newbytes + if 'encoding' in kwargs or len(args) > 1: + value = args[0].decode(*args[1:], **kwargs) + else: + value = args[0].__str__() + else: + value = args[0] + return super(newstr, cls).__new__(cls, value) + + def __repr__(self): + """ + Without the u prefix + """ + + value = super(newstr, self).__repr__() + # assert value[0] == u'u' + return value[1:] + + def __getitem__(self, y): + """ + Warning: Python <= 2.7.6 has a bug that causes this method never to be called + when y is a slice object. Therefore the type of newstr()[:2] is wrong + (unicode instead of newstr). + """ + return newstr(super(newstr, self).__getitem__(y)) + + def __contains__(self, key): + errmsg = "'in ' requires string as left operand, not {0}" + # Don't use isinstance() here because we only want to catch + # newstr, not Python 2 unicode: + if type(key) == newstr: + newkey = key + elif isinstance(key, unicode) or isinstance(key, bytes) and not isnewbytes(key): + newkey = newstr(key) + else: + raise TypeError(errmsg.format(type(key))) + return issubset(list(newkey), list(self)) + + @no('newbytes') + def __add__(self, other): + return newstr(super(newstr, self).__add__(other)) + + @no('newbytes') + def __radd__(self, left): + " left + self " + try: + return newstr(left) + self + except: + return NotImplemented + + def __mul__(self, other): + return newstr(super(newstr, self).__mul__(other)) + + def __rmul__(self, other): + return newstr(super(newstr, self).__rmul__(other)) + + def join(self, iterable): + errmsg = 'sequence item {0}: expected unicode string, found bytes' + for i, item in enumerate(iterable): + # Here we use type() rather than isinstance() because + # __instancecheck__ is being overridden. E.g. + # isinstance(b'abc', newbytes) is True on Py2. + if isnewbytes(item): + raise TypeError(errmsg.format(i)) + # Support use as a staticmethod: str.join('-', ['a', 'b']) + if type(self) == newstr: + return newstr(super(newstr, self).join(iterable)) + else: + return newstr(super(newstr, newstr(self)).join(iterable)) + + @no('newbytes') + def find(self, sub, *args): + return super(newstr, self).find(sub, *args) + + @no('newbytes') + def rfind(self, sub, *args): + return super(newstr, self).rfind(sub, *args) + + @no('newbytes', (1, 2)) + def replace(self, old, new, *args): + return newstr(super(newstr, self).replace(old, new, *args)) + + def decode(self, *args): + raise AttributeError("decode method has been disabled in newstr") + + def encode(self, encoding='utf-8', errors='strict'): + """ + Returns bytes + + Encode S using the codec registered for encoding. Default encoding + is 'utf-8'. errors may be given to set a different error + handling scheme. Default is 'strict' meaning that encoding errors raise + a UnicodeEncodeError. Other possible values are 'ignore', 'replace' and + 'xmlcharrefreplace' as well as any other name registered with + codecs.register_error that can handle UnicodeEncodeErrors. + """ + from future.types.newbytes import newbytes + # Py2 unicode.encode() takes encoding and errors as optional parameter, + # not keyword arguments as in Python 3 str. + + # For the surrogateescape error handling mechanism, the + # codecs.register_error() function seems to be inadequate for an + # implementation of it when encoding. (Decoding seems fine, however.) + # For example, in the case of + # u'\udcc3'.encode('ascii', 'surrogateescape_handler') + # after registering the ``surrogateescape_handler`` function in + # future.utils.surrogateescape, both Python 2.x and 3.x raise an + # exception anyway after the function is called because the unicode + # string it has to return isn't encodable strictly as ASCII. + + if errors == 'surrogateescape': + if encoding == 'utf-16': + # Known to fail here. See test_encoding_works_normally() + raise NotImplementedError('FIXME: surrogateescape handling is ' + 'not yet implemented properly') + # Encode char by char, building up list of byte-strings + mybytes = [] + for c in self: + code = ord(c) + if 0xD800 <= code <= 0xDCFF: + mybytes.append(newbytes([code - 0xDC00])) + else: + mybytes.append(c.encode(encoding=encoding)) + return newbytes(b'').join(mybytes) + return newbytes(super(newstr, self).encode(encoding, errors)) + + @no('newbytes', 1) + def startswith(self, prefix, *args): + if isinstance(prefix, Iterable): + for thing in prefix: + if isnewbytes(thing): + raise TypeError(self.no_convert_msg.format(type(thing))) + return super(newstr, self).startswith(prefix, *args) + + @no('newbytes', 1) + def endswith(self, prefix, *args): + # Note we need the decorator above as well as the isnewbytes() + # check because prefix can be either a bytes object or e.g. a + # tuple of possible prefixes. (If it's a bytes object, each item + # in it is an int.) + if isinstance(prefix, Iterable): + for thing in prefix: + if isnewbytes(thing): + raise TypeError(self.no_convert_msg.format(type(thing))) + return super(newstr, self).endswith(prefix, *args) + + @no('newbytes', 1) + def split(self, sep=None, maxsplit=-1): + # Py2 unicode.split() takes maxsplit as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).split(sep, maxsplit) + return [newstr(part) for part in parts] + + @no('newbytes', 1) + def rsplit(self, sep=None, maxsplit=-1): + # Py2 unicode.rsplit() takes maxsplit as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).rsplit(sep, maxsplit) + return [newstr(part) for part in parts] + + @no('newbytes', 1) + def partition(self, sep): + parts = super(newstr, self).partition(sep) + return tuple(newstr(part) for part in parts) + + @no('newbytes', 1) + def rpartition(self, sep): + parts = super(newstr, self).rpartition(sep) + return tuple(newstr(part) for part in parts) + + @no('newbytes', 1) + def index(self, sub, *args): + """ + Like newstr.find() but raise ValueError when the substring is not + found. + """ + pos = self.find(sub, *args) + if pos == -1: + raise ValueError('substring not found') + return pos + + def splitlines(self, keepends=False): + """ + S.splitlines(keepends=False) -> list of strings + + Return a list of the lines in S, breaking at line boundaries. + Line breaks are not included in the resulting list unless keepends + is given and true. + """ + # Py2 unicode.splitlines() takes keepends as an optional parameter, + # not as a keyword argument as in Python 3 str. + parts = super(newstr, self).splitlines(keepends) + return [newstr(part) for part in parts] + + def __eq__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__eq__(other) + else: + return NotImplemented + + def __hash__(self): + if (isinstance(self, unicode) or + isinstance(self, bytes) and not isnewbytes(self)): + return super(newstr, self).__hash__() + else: + raise NotImplementedError() + + def __ne__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ne__(other) + else: + return True + + unorderable_err = 'unorderable types: str() and {0}' + + def __lt__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__lt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __le__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__le__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __gt__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__gt__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __ge__(self, other): + if (isinstance(other, unicode) or + isinstance(other, bytes) and not isnewbytes(other)): + return super(newstr, self).__ge__(other) + raise TypeError(self.unorderable_err.format(type(other))) + + def __getattribute__(self, name): + """ + A trick to cause the ``hasattr`` builtin-fn to return False for + the 'decode' method on Py2. + """ + if name in ['decode', u'decode']: + raise AttributeError("decode method has been disabled in newstr") + return super(newstr, self).__getattribute__(name) + + def __native__(self): + """ + A hook for the future.utils.native() function. + """ + return unicode(self) + + @staticmethod + def maketrans(x, y=None, z=None): + """ + Return a translation table usable for str.translate(). + + If there is only one argument, it must be a dictionary mapping Unicode + ordinals (integers) or characters to Unicode ordinals, strings or None. + Character keys will be then converted to ordinals. + If there are two arguments, they must be strings of equal length, and + in the resulting dictionary, each character in x will be mapped to the + character at the same position in y. If there is a third argument, it + must be a string, whose characters will be mapped to None in the result. + """ + + if y is None: + assert z is None + if not isinstance(x, dict): + raise TypeError('if you give only one argument to maketrans it must be a dict') + result = {} + for (key, value) in x.items(): + if len(key) > 1: + raise ValueError('keys in translate table must be strings or integers') + result[ord(key)] = value + else: + if not isinstance(x, unicode) and isinstance(y, unicode): + raise TypeError('x and y must be unicode strings') + if not len(x) == len(y): + raise ValueError('the first two maketrans arguments must have equal length') + result = {} + for (xi, yi) in zip(x, y): + if len(xi) > 1: + raise ValueError('keys in translate table must be strings or integers') + result[ord(xi)] = ord(yi) + + if z is not None: + for char in z: + result[ord(char)] = None + return result + + def translate(self, table): + """ + S.translate(table) -> str + + Return a copy of the string S, where all characters have been mapped + through the given translation table, which must be a mapping of + Unicode ordinals to Unicode ordinals, strings, or None. + Unmapped characters are left untouched. Characters mapped to None + are deleted. + """ + l = [] + for c in self: + if ord(c) in table: + val = table[ord(c)] + if val is None: + continue + elif isinstance(val, unicode): + l.append(val) + else: + l.append(chr(val)) + else: + l.append(c) + return ''.join(l) + + def isprintable(self): + raise NotImplementedError('fixme') + + def isidentifier(self): + raise NotImplementedError('fixme') + + def format_map(self): + raise NotImplementedError('fixme') + + +__all__ = ['newstr'] diff --git a/minor_project/lib/python3.6/site-packages/future/utils/__init__.py b/minor_project/lib/python3.6/site-packages/future/utils/__init__.py new file mode 100644 index 0000000..46bd96d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/utils/__init__.py @@ -0,0 +1,767 @@ +""" +A selection of cross-compatible functions for Python 2 and 3. + +This module exports useful functions for 2/3 compatible code: + + * bind_method: binds functions to classes + * ``native_str_to_bytes`` and ``bytes_to_native_str`` + * ``native_str``: always equal to the native platform string object (because + this may be shadowed by imports from future.builtins) + * lists: lrange(), lmap(), lzip(), lfilter() + * iterable method compatibility: + - iteritems, iterkeys, itervalues + - viewitems, viewkeys, viewvalues + + These use the original method if available, otherwise they use items, + keys, values. + + * types: + + * text_type: unicode in Python 2, str in Python 3 + * string_types: basestring in Python 2, str in Python 3 + * binary_type: str in Python 2, bytes in Python 3 + * integer_types: (int, long) in Python 2, int in Python 3 + * class_types: (type, types.ClassType) in Python 2, type in Python 3 + + * bchr(c): + Take an integer and make a 1-character byte string + * bord(c) + Take the result of indexing on a byte string and make an integer + * tobytes(s) + Take a text string, a byte string, or a sequence of characters taken + from a byte string, and make a byte string. + + * raise_from() + * raise_with_traceback() + +This module also defines these decorators: + + * ``python_2_unicode_compatible`` + * ``with_metaclass`` + * ``implements_iterator`` + +Some of the functions in this module come from the following sources: + + * Jinja2 (BSD licensed: see + https://github.com/mitsuhiko/jinja2/blob/master/LICENSE) + * Pandas compatibility module pandas.compat + * six.py by Benjamin Peterson + * Django +""" + +import types +import sys +import numbers +import functools +import copy +import inspect + + +PY3 = sys.version_info[0] >= 3 +PY34_PLUS = sys.version_info[0:2] >= (3, 4) +PY35_PLUS = sys.version_info[0:2] >= (3, 5) +PY36_PLUS = sys.version_info[0:2] >= (3, 6) +PY2 = sys.version_info[0] == 2 +PY26 = sys.version_info[0:2] == (2, 6) +PY27 = sys.version_info[0:2] == (2, 7) +PYPY = hasattr(sys, 'pypy_translation_info') + + +def python_2_unicode_compatible(cls): + """ + A decorator that defines __unicode__ and __str__ methods under Python + 2. Under Python 3, this decorator is a no-op. + + To support Python 2 and 3 with a single code base, define a __str__ + method returning unicode text and apply this decorator to the class, like + this:: + + >>> from future.utils import python_2_unicode_compatible + + >>> @python_2_unicode_compatible + ... class MyClass(object): + ... def __str__(self): + ... return u'Unicode string: \u5b54\u5b50' + + >>> a = MyClass() + + Then, after this import: + + >>> from future.builtins import str + + the following is ``True`` on both Python 3 and 2:: + + >>> str(a) == a.encode('utf-8').decode('utf-8') + True + + and, on a Unicode-enabled terminal with the right fonts, these both print the + Chinese characters for Confucius:: + + >>> print(a) + >>> print(str(a)) + + The implementation comes from django.utils.encoding. + """ + if not PY3: + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda self: self.__unicode__().encode('utf-8') + return cls + + +def with_metaclass(meta, *bases): + """ + Function from jinja2/_compat.py. License: BSD. + + Use it like this:: + + class BaseForm(object): + pass + + class FormType(type): + pass + + class Form(with_metaclass(FormType, BaseForm)): + pass + + This requires a bit of explanation: the basic idea is to make a + dummy metaclass for one level of class instantiation that replaces + itself with the actual metaclass. Because of internal type checks + we also need to make sure that we downgrade the custom metaclass + for one level to something closer to type (that's why __call__ and + __init__ comes back from type etc.). + + This has the advantage over six.with_metaclass of not introducing + dummy classes into the final MRO. + """ + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +# Definitions from pandas.compat and six.py follow: +if PY3: + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s, str): + return bytes(s, 'latin-1') + else: + return bytes(s) + def bord(s): + return s + + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + +else: + # Python 2 + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + +### + +if PY3: + def tobytes(s): + if isinstance(s, bytes): + return s + else: + if isinstance(s, str): + return s.encode('latin-1') + else: + return bytes(s) +else: + # Python 2 + def tobytes(s): + if isinstance(s, unicode): + return s.encode('latin-1') + else: + return ''.join(s) + +tobytes.__doc__ = """ + Encodes to latin-1 (where the first 256 chars are the same as + ASCII.) + """ + +if PY3: + def native_str_to_bytes(s, encoding='utf-8'): + return s.encode(encoding) + + def bytes_to_native_str(b, encoding='utf-8'): + return b.decode(encoding) + + def text_to_native_str(t, encoding=None): + return t +else: + # Python 2 + def native_str_to_bytes(s, encoding=None): + from future.types import newbytes # to avoid a circular import + return newbytes(s) + + def bytes_to_native_str(b, encoding=None): + return native(b) + + def text_to_native_str(t, encoding='ascii'): + """ + Use this to create a Py2 native string when "from __future__ import + unicode_literals" is in effect. + """ + return unicode(t).encode(encoding) + +native_str_to_bytes.__doc__ = """ + On Py3, returns an encoded string. + On Py2, returns a newbytes type, ignoring the ``encoding`` argument. + """ + +if PY3: + # list-producing versions of the major Python iterating functions + def lrange(*args, **kwargs): + return list(range(*args, **kwargs)) + + def lzip(*args, **kwargs): + return list(zip(*args, **kwargs)) + + def lmap(*args, **kwargs): + return list(map(*args, **kwargs)) + + def lfilter(*args, **kwargs): + return list(filter(*args, **kwargs)) +else: + import __builtin__ + # Python 2-builtin ranges produce lists + lrange = __builtin__.range + lzip = __builtin__.zip + lmap = __builtin__.map + lfilter = __builtin__.filter + + +def isidentifier(s, dotted=False): + ''' + A function equivalent to the str.isidentifier method on Py3 + ''' + if dotted: + return all(isidentifier(a) for a in s.split('.')) + if PY3: + return s.isidentifier() + else: + import re + _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") + return bool(_name_re.match(s)) + + +def viewitems(obj, **kwargs): + """ + Function for iterating over dictionary items with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewitems", None) + if not func: + func = obj.items + return func(**kwargs) + + +def viewkeys(obj, **kwargs): + """ + Function for iterating over dictionary keys with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewkeys", None) + if not func: + func = obj.keys + return func(**kwargs) + + +def viewvalues(obj, **kwargs): + """ + Function for iterating over dictionary values with the same set-like + behaviour on Py2.7 as on Py3. + + Passes kwargs to method.""" + func = getattr(obj, "viewvalues", None) + if not func: + func = obj.values + return func(**kwargs) + + +def iteritems(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewitems(). + """ + func = getattr(obj, "iteritems", None) + if not func: + func = obj.items + return func(**kwargs) + + +def iterkeys(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewkeys(). + """ + func = getattr(obj, "iterkeys", None) + if not func: + func = obj.keys + return func(**kwargs) + + +def itervalues(obj, **kwargs): + """Use this only if compatibility with Python versions before 2.7 is + required. Otherwise, prefer viewvalues(). + """ + func = getattr(obj, "itervalues", None) + if not func: + func = obj.values + return func(**kwargs) + + +def bind_method(cls, name, func): + """Bind a method to class, python 2 and python 3 compatible. + + Parameters + ---------- + + cls : type + class to receive bound method + name : basestring + name of method on class instance + func : function + function to be bound as method + + Returns + ------- + None + """ + # only python 2 has an issue with bound/unbound methods + if not PY3: + setattr(cls, name, types.MethodType(func, None, cls)) + else: + setattr(cls, name, func) + + +def getexception(): + return sys.exc_info()[1] + + +def _get_caller_globals_and_locals(): + """ + Returns the globals and locals of the calling frame. + + Is there an alternative to frame hacking here? + """ + caller_frame = inspect.stack()[2] + myglobals = caller_frame[0].f_globals + mylocals = caller_frame[0].f_locals + return myglobals, mylocals + + +def _repr_strip(mystring): + """ + Returns the string without any initial or final quotes. + """ + r = repr(mystring) + if r.startswith("'") and r.endswith("'"): + return r[1:-1] + else: + return r + + +if PY3: + def raise_from(exc, cause): + """ + Equivalent to: + + raise EXCEPTION from CAUSE + + on Python 3. (See PEP 3134). + """ + myglobals, mylocals = _get_caller_globals_and_locals() + + # We pass the exception and cause along with other globals + # when we exec(): + myglobals = myglobals.copy() + myglobals['__python_future_raise_from_exc'] = exc + myglobals['__python_future_raise_from_cause'] = cause + execstr = "raise __python_future_raise_from_exc from __python_future_raise_from_cause" + exec(execstr, myglobals, mylocals) + + def raise_(tp, value=None, tb=None): + """ + A function that matches the Python 2.x ``raise`` statement. This + allows re-raising exceptions with the cls value and traceback on + Python 2 and 3. + """ + if isinstance(tp, BaseException): + # If the first object is an instance, the type of the exception + # is the class of the instance, the instance itself is the value, + # and the second object must be None. + if value is not None: + raise TypeError("instance exception may not have a separate value") + exc = tp + elif isinstance(tp, type) and not issubclass(tp, BaseException): + # If the first object is a class, it becomes the type of the + # exception. + raise TypeError("class must derive from BaseException, not %s" % tp.__name__) + else: + # The second object is used to determine the exception value: If it + # is an instance of the class, the instance becomes the exception + # value. If the second object is a tuple, it is used as the argument + # list for the class constructor; if it is None, an empty argument + # list is used, and any other object is treated as a single argument + # to the constructor. The instance so created by calling the + # constructor is used as the exception value. + if isinstance(value, tp): + exc = value + elif isinstance(value, tuple): + exc = tp(*value) + elif value is None: + exc = tp() + else: + exc = tp(value) + + if exc.__traceback__ is not tb: + raise exc.with_traceback(tb) + raise exc + + def raise_with_traceback(exc, traceback=Ellipsis): + if traceback == Ellipsis: + _, _, traceback = sys.exc_info() + raise exc.with_traceback(traceback) + +else: + def raise_from(exc, cause): + """ + Equivalent to: + + raise EXCEPTION from CAUSE + + on Python 3. (See PEP 3134). + """ + # Is either arg an exception class (e.g. IndexError) rather than + # instance (e.g. IndexError('my message here')? If so, pass the + # name of the class undisturbed through to "raise ... from ...". + if isinstance(exc, type) and issubclass(exc, Exception): + e = exc() + # exc = exc.__name__ + # execstr = "e = " + _repr_strip(exc) + "()" + # myglobals, mylocals = _get_caller_globals_and_locals() + # exec(execstr, myglobals, mylocals) + else: + e = exc + e.__suppress_context__ = False + if isinstance(cause, type) and issubclass(cause, Exception): + e.__cause__ = cause() + e.__cause__.__traceback__ = sys.exc_info()[2] + e.__suppress_context__ = True + elif cause is None: + e.__cause__ = None + e.__suppress_context__ = True + elif isinstance(cause, BaseException): + e.__cause__ = cause + object.__setattr__(e.__cause__, '__traceback__', sys.exc_info()[2]) + e.__suppress_context__ = True + else: + raise TypeError("exception causes must derive from BaseException") + e.__context__ = sys.exc_info()[1] + raise e + + exec(''' +def raise_(tp, value=None, tb=None): + raise tp, value, tb + +def raise_with_traceback(exc, traceback=Ellipsis): + if traceback == Ellipsis: + _, _, traceback = sys.exc_info() + raise exc, None, traceback +'''.strip()) + + +raise_with_traceback.__doc__ = ( +"""Raise exception with existing traceback. +If traceback is not passed, uses sys.exc_info() to get traceback.""" +) + + +# Deprecated alias for backward compatibility with ``future`` versions < 0.11: +reraise = raise_ + + +def implements_iterator(cls): + ''' + From jinja2/_compat.py. License: BSD. + + Use as a decorator like this:: + + @implements_iterator + class UppercasingIterator(object): + def __init__(self, iterable): + self._iter = iter(iterable) + def __iter__(self): + return self + def __next__(self): + return next(self._iter).upper() + + ''' + if PY3: + return cls + else: + cls.next = cls.__next__ + del cls.__next__ + return cls + +if PY3: + get_next = lambda x: x.next +else: + get_next = lambda x: x.__next__ + + +def encode_filename(filename): + if PY3: + return filename + else: + if isinstance(filename, unicode): + return filename.encode('utf-8') + return filename + + +def is_new_style(cls): + """ + Python 2.7 has both new-style and old-style classes. Old-style classes can + be pesky in some circumstances, such as when using inheritance. Use this + function to test for whether a class is new-style. (Python 3 only has + new-style classes.) + """ + return hasattr(cls, '__class__') and ('__dict__' in dir(cls) + or hasattr(cls, '__slots__')) + +# The native platform string and bytes types. Useful because ``str`` and +# ``bytes`` are redefined on Py2 by ``from future.builtins import *``. +native_str = str +native_bytes = bytes + + +def istext(obj): + """ + Deprecated. Use:: + >>> isinstance(obj, str) + after this import: + >>> from future.builtins import str + """ + return isinstance(obj, type(u'')) + + +def isbytes(obj): + """ + Deprecated. Use:: + >>> isinstance(obj, bytes) + after this import: + >>> from future.builtins import bytes + """ + return isinstance(obj, type(b'')) + + +def isnewbytes(obj): + """ + Equivalent to the result of ``type(obj) == type(newbytes)`` + in other words, it is REALLY a newbytes instance, not a Py2 native str + object? + + Note that this does not cover subclasses of newbytes, and it is not + equivalent to ininstance(obj, newbytes) + """ + return type(obj).__name__ == 'newbytes' + + +def isint(obj): + """ + Deprecated. Tests whether an object is a Py3 ``int`` or either a Py2 ``int`` or + ``long``. + + Instead of using this function, you can use: + + >>> from future.builtins import int + >>> isinstance(obj, int) + + The following idiom is equivalent: + + >>> from numbers import Integral + >>> isinstance(obj, Integral) + """ + + return isinstance(obj, numbers.Integral) + + +def native(obj): + """ + On Py3, this is a no-op: native(obj) -> obj + + On Py2, returns the corresponding native Py2 types that are + superclasses for backported objects from Py3: + + >>> from builtins import str, bytes, int + + >>> native(str(u'ABC')) + u'ABC' + >>> type(native(str(u'ABC'))) + unicode + + >>> native(bytes(b'ABC')) + b'ABC' + >>> type(native(bytes(b'ABC'))) + bytes + + >>> native(int(10**20)) + 100000000000000000000L + >>> type(native(int(10**20))) + long + + Existing native types on Py2 will be returned unchanged: + + >>> type(native(u'ABC')) + unicode + """ + if hasattr(obj, '__native__'): + return obj.__native__() + else: + return obj + + +# Implementation of exec_ is from ``six``: +if PY3: + import builtins + exec_ = getattr(builtins, "exec") +else: + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + +# Defined here for backward compatibility: +def old_div(a, b): + """ + DEPRECATED: import ``old_div`` from ``past.utils`` instead. + + Equivalent to ``a / b`` on Python 2 without ``from __future__ import + division``. + + TODO: generalize this to other objects (like arrays etc.) + """ + if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral): + return a // b + else: + return a / b + + +def as_native_str(encoding='utf-8'): + ''' + A decorator to turn a function or method call that returns text, i.e. + unicode, into one that returns a native platform str. + + Use it as a decorator like this:: + + from __future__ import unicode_literals + + class MyClass(object): + @as_native_str(encoding='ascii') + def __repr__(self): + return next(self._iter).upper() + ''' + if PY3: + return lambda f: f + else: + def encoder(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + return f(*args, **kwargs).encode(encoding=encoding) + return wrapper + return encoder + +# listvalues and listitems definitions from Nick Coghlan's (withdrawn) +# PEP 496: +try: + dict.iteritems +except AttributeError: + # Python 3 + def listvalues(d): + return list(d.values()) + def listitems(d): + return list(d.items()) +else: + # Python 2 + def listvalues(d): + return d.values() + def listitems(d): + return d.items() + +if PY3: + def ensure_new_type(obj): + return obj +else: + def ensure_new_type(obj): + from future.types.newbytes import newbytes + from future.types.newstr import newstr + from future.types.newint import newint + from future.types.newdict import newdict + + native_type = type(native(obj)) + + # Upcast only if the type is already a native (non-future) type + if issubclass(native_type, type(obj)): + # Upcast + if native_type == str: # i.e. Py2 8-bit str + return newbytes(obj) + elif native_type == unicode: + return newstr(obj) + elif native_type == int: + return newint(obj) + elif native_type == long: + return newint(obj) + elif native_type == dict: + return newdict(obj) + else: + return obj + else: + # Already a new type + assert type(obj) in [newbytes, newstr] + return obj + + +__all__ = ['PY2', 'PY26', 'PY3', 'PYPY', + 'as_native_str', 'binary_type', 'bind_method', 'bord', 'bstr', + 'bytes_to_native_str', 'class_types', 'encode_filename', + 'ensure_new_type', 'exec_', 'get_next', 'getexception', + 'implements_iterator', 'integer_types', 'is_new_style', 'isbytes', + 'isidentifier', 'isint', 'isnewbytes', 'istext', 'iteritems', + 'iterkeys', 'itervalues', 'lfilter', 'listitems', 'listvalues', + 'lmap', 'lrange', 'lzip', 'native', 'native_bytes', 'native_str', + 'native_str_to_bytes', 'old_div', + 'python_2_unicode_compatible', 'raise_', + 'raise_with_traceback', 'reraise', 'string_types', + 'text_to_native_str', 'text_type', 'tobytes', 'viewitems', + 'viewkeys', 'viewvalues', 'with_metaclass' + ] diff --git a/minor_project/lib/python3.6/site-packages/future/utils/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/utils/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..d7acf82 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/utils/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/utils/__pycache__/surrogateescape.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/future/utils/__pycache__/surrogateescape.cpython-36.pyc new file mode 100644 index 0000000..57abb2c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/future/utils/__pycache__/surrogateescape.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/future/utils/surrogateescape.py b/minor_project/lib/python3.6/site-packages/future/utils/surrogateescape.py new file mode 100644 index 0000000..0dcc9fa --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/future/utils/surrogateescape.py @@ -0,0 +1,198 @@ +""" +This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error +handler of Python 3. + +Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc +""" + +# This code is released under the Python license and the BSD 2-clause license + +import codecs +import sys + +from future import utils + + +FS_ERRORS = 'surrogateescape' + +# # -- Python 2/3 compatibility ------------------------------------- +# FS_ERRORS = 'my_surrogateescape' + +def u(text): + if utils.PY3: + return text + else: + return text.decode('unicode_escape') + +def b(data): + if utils.PY3: + return data.encode('latin1') + else: + return data + +if utils.PY3: + _unichr = chr + bytes_chr = lambda code: bytes((code,)) +else: + _unichr = unichr + bytes_chr = chr + +def surrogateescape_handler(exc): + """ + Pure Python implementation of the PEP 383: the "surrogateescape" error + handler of Python 3. Undecodable bytes will be replaced by a Unicode + character U+DCxx on decoding, and these are translated into the + original bytes on encoding. + """ + mystring = exc.object[exc.start:exc.end] + + try: + if isinstance(exc, UnicodeDecodeError): + # mystring is a byte-string in this case + decoded = replace_surrogate_decode(mystring) + elif isinstance(exc, UnicodeEncodeError): + # In the case of u'\udcc3'.encode('ascii', + # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an + # exception anyway after this function is called, even though I think + # it's doing what it should. It seems that the strict encoder is called + # to encode the unicode string that this function returns ... + decoded = replace_surrogate_encode(mystring) + else: + raise exc + except NotASurrogateError: + raise exc + return (decoded, exc.end) + + +class NotASurrogateError(Exception): + pass + + +def replace_surrogate_encode(mystring): + """ + Returns a (unicode) string, not the more logical bytes, because the codecs + register_error functionality expects this. + """ + decoded = [] + for ch in mystring: + # if utils.PY3: + # code = ch + # else: + code = ord(ch) + + # The following magic comes from Py3.3's Python/codecs.c file: + if not 0xD800 <= code <= 0xDCFF: + # Not a surrogate. Fail with the original exception. + raise NotASurrogateError + # mybytes = [0xe0 | (code >> 12), + # 0x80 | ((code >> 6) & 0x3f), + # 0x80 | (code & 0x3f)] + # Is this a good idea? + if 0xDC00 <= code <= 0xDC7F: + decoded.append(_unichr(code - 0xDC00)) + elif code <= 0xDCFF: + decoded.append(_unichr(code - 0xDC00)) + else: + raise NotASurrogateError + return str().join(decoded) + + +def replace_surrogate_decode(mybytes): + """ + Returns a (unicode) string + """ + decoded = [] + for ch in mybytes: + # We may be parsing newbytes (in which case ch is an int) or a native + # str on Py2 + if isinstance(ch, int): + code = ch + else: + code = ord(ch) + if 0x80 <= code <= 0xFF: + decoded.append(_unichr(0xDC00 + code)) + elif code <= 0x7F: + decoded.append(_unichr(code)) + else: + # # It may be a bad byte + # # Try swallowing it. + # continue + # print("RAISE!") + raise NotASurrogateError + return str().join(decoded) + + +def encodefilename(fn): + if FS_ENCODING == 'ascii': + # ASCII encoder of Python 2 expects that the error handler returns a + # Unicode string encodable to ASCII, whereas our surrogateescape error + # handler has to return bytes in 0x80-0xFF range. + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if code < 128: + ch = bytes_chr(code) + elif 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + else: + raise UnicodeEncodeError(FS_ENCODING, + fn, index, index+1, + 'ordinal not in range(128)') + encoded.append(ch) + return bytes().join(encoded) + elif FS_ENCODING == 'utf-8': + # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF + # doesn't go through our error handler + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if 0xD800 <= code <= 0xDFFF: + if 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + encoded.append(ch) + else: + raise UnicodeEncodeError( + FS_ENCODING, + fn, index, index+1, 'surrogates not allowed') + else: + ch_utf8 = ch.encode('utf-8') + encoded.append(ch_utf8) + return bytes().join(encoded) + else: + return fn.encode(FS_ENCODING, FS_ERRORS) + +def decodefilename(fn): + return fn.decode(FS_ENCODING, FS_ERRORS) + +FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') +# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') +# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') + + +# normalize the filesystem encoding name. +# For example, we expect "utf-8", not "UTF8". +FS_ENCODING = codecs.lookup(FS_ENCODING).name + + +def register_surrogateescape(): + """ + Registers the surrogateescape error handler on Python 2 (only) + """ + if utils.PY3: + return + try: + codecs.lookup_error(FS_ERRORS) + except LookupError: + codecs.register_error(FS_ERRORS, surrogateescape_handler) + + +if __name__ == '__main__': + pass + # # Tests: + # register_surrogateescape() + + # b = decodefilename(fn) + # assert b == encoded, "%r != %r" % (b, encoded) + # c = encodefilename(b) + # assert c == fn, '%r != %r' % (c, fn) + # # print("ok") diff --git a/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/LICENSE.rst b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/LICENSE.rst new file mode 100644 index 0000000..63664b8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/LICENSE.rst @@ -0,0 +1,34 @@ +License +------- + +License: bsd-3-clause + +Copyright (c) 2013-2020, Kim Davies. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. diff --git a/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/METADATA new file mode 100644 index 0000000..f73c0ff --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/METADATA @@ -0,0 +1,243 @@ +Metadata-Version: 2.1 +Name: idna +Version: 2.10 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008â€. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna†module that +comes with the Python standard library, but only supports the +old, deprecated IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + # Python 2 + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library works with Python 2.7 and Python 3.4 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + # Python 2 + >>> import idna.codec + >>> print u'домена.иÑпытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.иÑпытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + # Python 2 + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification no longer normalizes input from different potential ways a user +may input a domain name. This functionality, known as a “mappingâ€, is now +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen†is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + # Python 3 + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + # Python 2 + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup tables for +performance. These tables are derived from computing against eligibility criteria +in the respective standards. These tables are computed using the command-line +script ``tools/idna-data``. + +This tool will fetch relevant tables from the Unicode Consortium and perform the +required calculations to identify eligibility. It has three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + +Note that this script requires Python 3, but all generated library data will work +in Python 2.7. + + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as tests that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/RECORD new file mode 100644 index 0000000..070b5d4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-2.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-2.10.dist-info/LICENSE.rst,sha256=QSAUQg0kc9ugYRfD1Nng7sqm3eDKMM2VH07CvjlCbzI,1565 +idna-2.10.dist-info/METADATA,sha256=ZWCaQDBjdmSvx5EU7Cv6ORC-9NUQ6nXh1eXx38ySe40,9104 +idna-2.10.dist-info/RECORD,, +idna-2.10.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +idna-2.10.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 +idna/__pycache__/__init__.cpython-36.pyc,, +idna/__pycache__/codec.cpython-36.pyc,, +idna/__pycache__/compat.cpython-36.pyc,, +idna/__pycache__/core.cpython-36.pyc,, +idna/__pycache__/idnadata.cpython-36.pyc,, +idna/__pycache__/intranges.cpython-36.pyc,, +idna/__pycache__/package_data.cpython-36.pyc,, +idna/__pycache__/uts46data.cpython-36.pyc,, +idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 +idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 +idna/core.py,sha256=jCoaLb3bA2tS_DDx9PpGuNTEZZN2jAzB369aP-IHYRE,11951 +idna/idnadata.py,sha256=gmzFwZWjdms3kKZ_M_vwz7-LP_SCgYfSeE03B21Qpsk,42350 +idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 +idna/package_data.py,sha256=bxBjpLnE06_1jSYKEy5svOMu1zM3OMztXVUb1tPlcp0,22 +idna/uts46data.py,sha256=lMdw2zdjkH1JUWXPPEfFUSYT3Fyj60bBmfLvvy5m7ko,202084 diff --git a/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna-2.10.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/minor_project/lib/python3.6/site-packages/idna/__init__.py b/minor_project/lib/python3.6/site-packages/idna/__init__.py new file mode 100644 index 0000000..847bf93 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/__init__.py @@ -0,0 +1,2 @@ +from .package_data import __version__ +from .core import * diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..6528b4f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/codec.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/codec.cpython-36.pyc new file mode 100644 index 0000000..ea95520 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/codec.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..d6df4ed Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/core.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/core.cpython-36.pyc new file mode 100644 index 0000000..7551d4b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/core.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/idnadata.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/idnadata.cpython-36.pyc new file mode 100644 index 0000000..81541f3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/idnadata.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/intranges.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/intranges.cpython-36.pyc new file mode 100644 index 0000000..52954a6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/intranges.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/package_data.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/package_data.cpython-36.pyc new file mode 100644 index 0000000..a857d44 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/package_data.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/__pycache__/uts46data.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/idna/__pycache__/uts46data.cpython-36.pyc new file mode 100644 index 0000000..fe1e1ed Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/idna/__pycache__/uts46data.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/idna/codec.py b/minor_project/lib/python3.6/site-packages/idna/codec.py new file mode 100644 index 0000000..98c65ea --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/minor_project/lib/python3.6/site-packages/idna/compat.py b/minor_project/lib/python3.6/site-packages/idna/compat.py new file mode 100644 index 0000000..4d47f33 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/compat.py @@ -0,0 +1,12 @@ +from .core import * +from .codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/minor_project/lib/python3.6/site-packages/idna/core.py b/minor_project/lib/python3.6/site-packages/idna/core.py new file mode 100644 index 0000000..41ec5c7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/core.py @@ -0,0 +1,400 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] >= 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + v = unicodedata.combining(unichr(cp)) + if v == 0: + if not unicodedata.name(unichr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + +def _is_script(cp, script): + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('L'), ord('D')]: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + if joining_type in [ord('R'), ord('D')]: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + except ValueError: + raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + ulabel(label) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeEncodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + if not label: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and not std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/minor_project/lib/python3.6/site-packages/idna/idnadata.py b/minor_project/lib/python3.6/site-packages/idna/idnadata.py new file mode 100644 index 0000000..a284e4c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/idnadata.py @@ -0,0 +1,2050 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "13.0.0" +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e0000009ffd, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16ff000016ff2, + 0x200000002a6de, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2f8000002fa1e, + 0x300000003134b, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b11f, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1b0000001b001, + 0x1b1640001b168, + ), +} +joining_types = { + 0x600: 85, + 0x601: 85, + 0x602: 85, + 0x603: 85, + 0x604: 85, + 0x605: 85, + 0x608: 85, + 0x60b: 85, + 0x620: 68, + 0x621: 85, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x66e: 68, + 0x66f: 68, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x674: 85, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6dd: 85, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7fa: 67, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x860: 68, + 0x861: 85, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x866: 85, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ad: 85, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8e2: 85, + 0x1806: 85, + 0x1807: 68, + 0x180a: 67, + 0x180e: 85, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1880: 85, + 0x1881: 85, + 0x1882: 85, + 0x1883: 85, + 0x1884: 85, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18aa: 68, + 0x200c: 85, + 0x200d: 67, + 0x202f: 85, + 0x2066: 85, + 0x2067: 85, + 0x2068: 85, + 0x2069: 85, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa873: 85, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac6: 85, + 0x10ac7: 82, + 0x10ac8: 85, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acb: 85, + 0x10acc: 85, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae2: 85, + 0x10ae3: 85, + 0x10ae4: 82, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10baf: 85, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f45: 85, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x110bd: 85, + 0x110cd: 85, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e94b: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x8a0000008b5, + 0x8b6000008c8, + 0x8d3000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3d00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcde00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf3, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ece, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x17000000170d, + 0x170e00001715, + 0x172000001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001ac1, + 0x1b0000001b4c, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001dfa, + 0x1dfb00001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c5f, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e0000009ffd, + 0xa0000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab6a, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10f0000010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011070, + 0x1107f000110bb, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e0001123f, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ac000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x130000001342f, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1b0000001b11f, + 0x1b1500001b153, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2c00001e2fa, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x1fbf00001fbfa, + 0x200000002a6de, + 0x2a7000002b735, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x300000003134b, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/minor_project/lib/python3.6/site-packages/idna/intranges.py b/minor_project/lib/python3.6/site-packages/idna/intranges.py new file mode 100644 index 0000000..fa8a735 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/intranges.py @@ -0,0 +1,53 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect + +def intranges_from_list(list_): + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start, end): + return (start << 32) | end + +def _decode_range(r): + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_, ranges): + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/minor_project/lib/python3.6/site-packages/idna/package_data.py b/minor_project/lib/python3.6/site-packages/idna/package_data.py new file mode 100644 index 0000000..ce1c521 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '2.10' + diff --git a/minor_project/lib/python3.6/site-packages/idna/uts46data.py b/minor_project/lib/python3.6/site-packages/idna/uts46data.py new file mode 100644 index 0000000..3766dd4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/idna/uts46data.py @@ -0,0 +1,8357 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "13.0.0" +def _seg_0(): + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1(): + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' Ì„'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' Ì'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1â„4'), + (0xBD, 'M', u'1â„2'), + (0xBE, 'M', u'3â„4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'Ã¥'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + ] + +def _seg_2(): + return [ + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'Ä'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'Ä…'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'Ä‹'), + (0x10B, 'V'), + (0x10C, 'M', u'Ä'), + (0x10D, 'V'), + (0x10E, 'M', u'Ä'), + (0x10F, 'V'), + (0x110, 'M', u'Ä‘'), + (0x111, 'V'), + (0x112, 'M', u'Ä“'), + (0x113, 'V'), + (0x114, 'M', u'Ä•'), + (0x115, 'V'), + (0x116, 'M', u'Ä—'), + (0x117, 'V'), + (0x118, 'M', u'Ä™'), + (0x119, 'V'), + (0x11A, 'M', u'Ä›'), + (0x11B, 'V'), + (0x11C, 'M', u'Ä'), + (0x11D, 'V'), + (0x11E, 'M', u'ÄŸ'), + (0x11F, 'V'), + (0x120, 'M', u'Ä¡'), + (0x121, 'V'), + (0x122, 'M', u'Ä£'), + (0x123, 'V'), + (0x124, 'M', u'Ä¥'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'Ä©'), + (0x129, 'V'), + (0x12A, 'M', u'Ä«'), + (0x12B, 'V'), + ] + +def _seg_3(): + return [ + (0x12C, 'M', u'Ä­'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'Ä·'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'Å‚'), + (0x142, 'V'), + (0x143, 'M', u'Å„'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'Å‹'), + (0x14B, 'V'), + (0x14C, 'M', u'Å'), + (0x14D, 'V'), + (0x14E, 'M', u'Å'), + (0x14F, 'V'), + (0x150, 'M', u'Å‘'), + (0x151, 'V'), + (0x152, 'M', u'Å“'), + (0x153, 'V'), + (0x154, 'M', u'Å•'), + (0x155, 'V'), + (0x156, 'M', u'Å—'), + (0x157, 'V'), + (0x158, 'M', u'Å™'), + (0x159, 'V'), + (0x15A, 'M', u'Å›'), + (0x15B, 'V'), + (0x15C, 'M', u'Å'), + (0x15D, 'V'), + (0x15E, 'M', u'ÅŸ'), + (0x15F, 'V'), + (0x160, 'M', u'Å¡'), + (0x161, 'V'), + (0x162, 'M', u'Å£'), + (0x163, 'V'), + (0x164, 'M', u'Å¥'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'Å©'), + (0x169, 'V'), + (0x16A, 'M', u'Å«'), + (0x16B, 'V'), + (0x16C, 'M', u'Å­'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'Å·'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'É“'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'Æ…'), + (0x185, 'V'), + (0x186, 'M', u'É”'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'É–'), + (0x18A, 'M', u'É—'), + (0x18B, 'M', u'ÆŒ'), + (0x18C, 'V'), + (0x18E, 'M', u'Ç'), + (0x18F, 'M', u'É™'), + (0x190, 'M', u'É›'), + (0x191, 'M', u'Æ’'), + (0x192, 'V'), + (0x193, 'M', u'É '), + ] + +def _seg_4(): + return [ + (0x194, 'M', u'É£'), + (0x195, 'V'), + (0x196, 'M', u'É©'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'Æ™'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'Æ¡'), + (0x1A1, 'V'), + (0x1A2, 'M', u'Æ£'), + (0x1A3, 'V'), + (0x1A4, 'M', u'Æ¥'), + (0x1A5, 'V'), + (0x1A6, 'M', u'Ê€'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'Æ­'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ÊŠ'), + (0x1B2, 'M', u'Ê‹'), + (0x1B3, 'M', u'Æ´'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'Ê’'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ÇŽ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'Ç'), + (0x1D0, 'V'), + (0x1D1, 'M', u'Ç’'), + (0x1D2, 'V'), + (0x1D3, 'M', u'Ç”'), + (0x1D4, 'V'), + (0x1D5, 'M', u'Ç–'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'Çš'), + (0x1DA, 'V'), + (0x1DB, 'M', u'Çœ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ÇŸ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'Ç¡'), + (0x1E1, 'V'), + (0x1E2, 'M', u'Ç£'), + (0x1E3, 'V'), + (0x1E4, 'M', u'Ç¥'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'Ç©'), + (0x1E9, 'V'), + (0x1EA, 'M', u'Ç«'), + (0x1EB, 'V'), + (0x1EC, 'M', u'Ç­'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'Æ•'), + (0x1F7, 'M', u'Æ¿'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'Ç»'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'Ç¿'), + (0x1FF, 'V'), + (0x200, 'M', u'È'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'È…'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'È‹'), + (0x20B, 'V'), + (0x20C, 'M', u'È'), + ] + +def _seg_5(): + return [ + (0x20D, 'V'), + (0x20E, 'M', u'È'), + (0x20F, 'V'), + (0x210, 'M', u'È‘'), + (0x211, 'V'), + (0x212, 'M', u'È“'), + (0x213, 'V'), + (0x214, 'M', u'È•'), + (0x215, 'V'), + (0x216, 'M', u'È—'), + (0x217, 'V'), + (0x218, 'M', u'È™'), + (0x219, 'V'), + (0x21A, 'M', u'È›'), + (0x21B, 'V'), + (0x21C, 'M', u'È'), + (0x21D, 'V'), + (0x21E, 'M', u'ÈŸ'), + (0x21F, 'V'), + (0x220, 'M', u'Æž'), + (0x221, 'V'), + (0x222, 'M', u'È£'), + (0x223, 'V'), + (0x224, 'M', u'È¥'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'È©'), + (0x229, 'V'), + (0x22A, 'M', u'È«'), + (0x22B, 'V'), + (0x22C, 'M', u'È­'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'â±¥'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'Æš'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'É‚'), + (0x242, 'V'), + (0x243, 'M', u'Æ€'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ÊŒ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'É‹'), + (0x24B, 'V'), + (0x24C, 'M', u'É'), + (0x24D, 'V'), + (0x24E, 'M', u'É'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'É»'), + (0x2B6, 'M', u'Ê'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ÌŠ'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' Ì‹'), + (0x2DE, 'V'), + (0x2E0, 'M', u'É£'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'Ê•'), + (0x2E5, 'V'), + (0x340, 'M', u'Ì€'), + (0x341, 'M', u'Ì'), + (0x342, 'V'), + (0x343, 'M', u'Ì“'), + (0x344, 'M', u'̈Ì'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'Í·'), + (0x377, 'V'), + ] + +def _seg_6(): + return [ + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'M', u'ϳ'), + (0x380, 'X'), + (0x384, '3', u' Ì'), + (0x385, '3', u' ̈Ì'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ÏŒ'), + (0x38D, 'X'), + (0x38E, 'M', u'Ï'), + (0x38F, 'M', u'ÏŽ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'Ï€'), + (0x3A1, 'M', u'Ï'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'Ï„'), + (0x3A5, 'M', u'Ï…'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ÏŠ'), + (0x3AB, 'M', u'Ï‹'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'Ï—'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'Ï…'), + (0x3D3, 'M', u'Ï'), + (0x3D4, 'M', u'Ï‹'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'Ï€'), + (0x3D7, 'V'), + (0x3D8, 'M', u'Ï™'), + (0x3D9, 'V'), + (0x3DA, 'M', u'Ï›'), + (0x3DB, 'V'), + (0x3DC, 'M', u'Ï'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ÏŸ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'Ï¡'), + (0x3E1, 'V'), + (0x3E2, 'M', u'Ï£'), + (0x3E3, 'V'), + (0x3E4, 'M', u'Ï¥'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'Ï©'), + (0x3E9, 'V'), + (0x3EA, 'M', u'Ï«'), + (0x3EB, 'V'), + (0x3EC, 'M', u'Ï­'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'Ï'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'Ï»'), + (0x3FB, 'V'), + (0x3FD, 'M', u'Í»'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'Ñ'), + (0x401, 'M', u'Ñ‘'), + (0x402, 'M', u'Ñ’'), + ] + +def _seg_7(): + return [ + (0x403, 'M', u'Ñ“'), + (0x404, 'M', u'Ñ”'), + (0x405, 'M', u'Ñ•'), + (0x406, 'M', u'Ñ–'), + (0x407, 'M', u'Ñ—'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'Ñ™'), + (0x40A, 'M', u'Ñš'), + (0x40B, 'M', u'Ñ›'), + (0x40C, 'M', u'Ñœ'), + (0x40D, 'M', u'Ñ'), + (0x40E, 'M', u'Ñž'), + (0x40F, 'M', u'ÑŸ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'Ñ€'), + (0x421, 'M', u'Ñ'), + (0x422, 'M', u'Ñ‚'), + (0x423, 'M', u'у'), + (0x424, 'M', u'Ñ„'), + (0x425, 'M', u'Ñ…'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ÑŠ'), + (0x42B, 'M', u'Ñ‹'), + (0x42C, 'M', u'ÑŒ'), + (0x42D, 'M', u'Ñ'), + (0x42E, 'M', u'ÑŽ'), + (0x42F, 'M', u'Ñ'), + (0x430, 'V'), + (0x460, 'M', u'Ñ¡'), + (0x461, 'V'), + (0x462, 'M', u'Ñ£'), + (0x463, 'V'), + (0x464, 'M', u'Ñ¥'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'Ñ©'), + (0x469, 'V'), + (0x46A, 'M', u'Ñ«'), + (0x46B, 'V'), + (0x46C, 'M', u'Ñ­'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'Ñ·'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'Ñ»'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'Ñ¿'), + (0x47F, 'V'), + (0x480, 'M', u'Ò'), + (0x481, 'V'), + (0x48A, 'M', u'Ò‹'), + (0x48B, 'V'), + (0x48C, 'M', u'Ò'), + (0x48D, 'V'), + (0x48E, 'M', u'Ò'), + (0x48F, 'V'), + (0x490, 'M', u'Ò‘'), + (0x491, 'V'), + (0x492, 'M', u'Ò“'), + (0x493, 'V'), + (0x494, 'M', u'Ò•'), + (0x495, 'V'), + (0x496, 'M', u'Ò—'), + (0x497, 'V'), + (0x498, 'M', u'Ò™'), + (0x499, 'V'), + (0x49A, 'M', u'Ò›'), + (0x49B, 'V'), + (0x49C, 'M', u'Ò'), + (0x49D, 'V'), + ] + +def _seg_8(): + return [ + (0x49E, 'M', u'ÒŸ'), + (0x49F, 'V'), + (0x4A0, 'M', u'Ò¡'), + (0x4A1, 'V'), + (0x4A2, 'M', u'Ò£'), + (0x4A3, 'V'), + (0x4A4, 'M', u'Ò¥'), + (0x4A5, 'V'), + (0x4A6, 'M', u'Ò§'), + (0x4A7, 'V'), + (0x4A8, 'M', u'Ò©'), + (0x4A9, 'V'), + (0x4AA, 'M', u'Ò«'), + (0x4AB, 'V'), + (0x4AC, 'M', u'Ò­'), + (0x4AD, 'V'), + (0x4AE, 'M', u'Ò¯'), + (0x4AF, 'V'), + (0x4B0, 'M', u'Ò±'), + (0x4B1, 'V'), + (0x4B2, 'M', u'Ò³'), + (0x4B3, 'V'), + (0x4B4, 'M', u'Òµ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'Ò·'), + (0x4B7, 'V'), + (0x4B8, 'M', u'Ò¹'), + (0x4B9, 'V'), + (0x4BA, 'M', u'Ò»'), + (0x4BB, 'V'), + (0x4BC, 'M', u'Ò½'), + (0x4BD, 'V'), + (0x4BE, 'M', u'Ò¿'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'Ó‚'), + (0x4C2, 'V'), + (0x4C3, 'M', u'Ó„'), + (0x4C4, 'V'), + (0x4C5, 'M', u'Ó†'), + (0x4C6, 'V'), + (0x4C7, 'M', u'Óˆ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ÓŠ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ÓŒ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ÓŽ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'Ó‘'), + (0x4D1, 'V'), + (0x4D2, 'M', u'Ó“'), + (0x4D3, 'V'), + (0x4D4, 'M', u'Ó•'), + (0x4D5, 'V'), + (0x4D6, 'M', u'Ó—'), + (0x4D7, 'V'), + (0x4D8, 'M', u'Ó™'), + (0x4D9, 'V'), + (0x4DA, 'M', u'Ó›'), + (0x4DB, 'V'), + (0x4DC, 'M', u'Ó'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ÓŸ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'Ó¡'), + (0x4E1, 'V'), + (0x4E2, 'M', u'Ó£'), + (0x4E3, 'V'), + (0x4E4, 'M', u'Ó¥'), + (0x4E5, 'V'), + (0x4E6, 'M', u'Ó§'), + (0x4E7, 'V'), + (0x4E8, 'M', u'Ó©'), + (0x4E9, 'V'), + (0x4EA, 'M', u'Ó«'), + (0x4EB, 'V'), + (0x4EC, 'M', u'Ó­'), + (0x4ED, 'V'), + (0x4EE, 'M', u'Ó¯'), + (0x4EF, 'V'), + (0x4F0, 'M', u'Ó±'), + (0x4F1, 'V'), + (0x4F2, 'M', u'Ó³'), + (0x4F3, 'V'), + (0x4F4, 'M', u'Óµ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'Ó·'), + (0x4F7, 'V'), + (0x4F8, 'M', u'Ó¹'), + (0x4F9, 'V'), + (0x4FA, 'M', u'Ó»'), + (0x4FB, 'V'), + (0x4FC, 'M', u'Ó½'), + (0x4FD, 'V'), + (0x4FE, 'M', u'Ó¿'), + (0x4FF, 'V'), + (0x500, 'M', u'Ô'), + (0x501, 'V'), + (0x502, 'M', u'Ôƒ'), + ] + +def _seg_9(): + return [ + (0x503, 'V'), + (0x504, 'M', u'Ô…'), + (0x505, 'V'), + (0x506, 'M', u'Ô‡'), + (0x507, 'V'), + (0x508, 'M', u'Ô‰'), + (0x509, 'V'), + (0x50A, 'M', u'Ô‹'), + (0x50B, 'V'), + (0x50C, 'M', u'Ô'), + (0x50D, 'V'), + (0x50E, 'M', u'Ô'), + (0x50F, 'V'), + (0x510, 'M', u'Ô‘'), + (0x511, 'V'), + (0x512, 'M', u'Ô“'), + (0x513, 'V'), + (0x514, 'M', u'Ô•'), + (0x515, 'V'), + (0x516, 'M', u'Ô—'), + (0x517, 'V'), + (0x518, 'M', u'Ô™'), + (0x519, 'V'), + (0x51A, 'M', u'Ô›'), + (0x51B, 'V'), + (0x51C, 'M', u'Ô'), + (0x51D, 'V'), + (0x51E, 'M', u'ÔŸ'), + (0x51F, 'V'), + (0x520, 'M', u'Ô¡'), + (0x521, 'V'), + (0x522, 'M', u'Ô£'), + (0x523, 'V'), + (0x524, 'M', u'Ô¥'), + (0x525, 'V'), + (0x526, 'M', u'Ô§'), + (0x527, 'V'), + (0x528, 'M', u'Ô©'), + (0x529, 'V'), + (0x52A, 'M', u'Ô«'), + (0x52B, 'V'), + (0x52C, 'M', u'Ô­'), + (0x52D, 'V'), + (0x52E, 'M', u'Ô¯'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', u'Õ¡'), + (0x532, 'M', u'Õ¢'), + (0x533, 'M', u'Õ£'), + (0x534, 'M', u'Õ¤'), + (0x535, 'M', u'Õ¥'), + (0x536, 'M', u'Õ¦'), + (0x537, 'M', u'Õ§'), + (0x538, 'M', u'Õ¨'), + (0x539, 'M', u'Õ©'), + (0x53A, 'M', u'Õª'), + (0x53B, 'M', u'Õ«'), + (0x53C, 'M', u'Õ¬'), + (0x53D, 'M', u'Õ­'), + (0x53E, 'M', u'Õ®'), + (0x53F, 'M', u'Õ¯'), + (0x540, 'M', u'Õ°'), + (0x541, 'M', u'Õ±'), + (0x542, 'M', u'Õ²'), + (0x543, 'M', u'Õ³'), + (0x544, 'M', u'Õ´'), + (0x545, 'M', u'Õµ'), + (0x546, 'M', u'Õ¶'), + (0x547, 'M', u'Õ·'), + (0x548, 'M', u'Õ¸'), + (0x549, 'M', u'Õ¹'), + (0x54A, 'M', u'Õº'), + (0x54B, 'M', u'Õ»'), + (0x54C, 'M', u'Õ¼'), + (0x54D, 'M', u'Õ½'), + (0x54E, 'M', u'Õ¾'), + (0x54F, 'M', u'Õ¿'), + (0x550, 'M', u'Ö€'), + (0x551, 'M', u'Ö'), + (0x552, 'M', u'Ö‚'), + (0x553, 'M', u'Öƒ'), + (0x554, 'M', u'Ö„'), + (0x555, 'M', u'Ö…'), + (0x556, 'M', u'Ö†'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', u'Õ¥Ö‚'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + ] + +def _seg_10(): + return [ + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'Û‡Ù´'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x8A0, 'V'), + (0x8B5, 'X'), + (0x8B6, 'V'), + (0x8C8, 'X'), + (0x8D3, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + ] + +def _seg_11(): + return [ + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + ] + +def _seg_12(): + return [ + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'à¹à¸²'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', u'à»àº²'), + (0xEB4, 'V'), + ] + +def _seg_13(): + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'à¾à¾µ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'â´§'), + (0x10C8, 'X'), + (0x10CD, 'M', u'â´­'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14(): + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', u'á°'), + (0x13F9, 'M', u'á±'), + (0x13FA, 'M', u'á²'), + (0x13FB, 'M', u'á³'), + (0x13FC, 'M', u'á´'), + (0x13FD, 'M', u'áµ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1AC1, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + ] + +def _seg_15(): + return [ + (0x1C80, 'M', u'в'), + (0x1C81, 'M', u'д'), + (0x1C82, 'M', u'о'), + (0x1C83, 'M', u'Ñ'), + (0x1C84, 'M', u'Ñ‚'), + (0x1C86, 'M', u'ÑŠ'), + (0x1C87, 'M', u'Ñ£'), + (0x1C88, 'M', u'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', u'áƒ'), + (0x1C91, 'M', u'ბ'), + (0x1C92, 'M', u'გ'), + (0x1C93, 'M', u'დ'), + (0x1C94, 'M', u'ე'), + (0x1C95, 'M', u'ვ'), + (0x1C96, 'M', u'ზ'), + (0x1C97, 'M', u'თ'), + (0x1C98, 'M', u'ი'), + (0x1C99, 'M', u'კ'), + (0x1C9A, 'M', u'ლ'), + (0x1C9B, 'M', u'მ'), + (0x1C9C, 'M', u'ნ'), + (0x1C9D, 'M', u'áƒ'), + (0x1C9E, 'M', u'პ'), + (0x1C9F, 'M', u'ჟ'), + (0x1CA0, 'M', u'რ'), + (0x1CA1, 'M', u'ს'), + (0x1CA2, 'M', u'ტ'), + (0x1CA3, 'M', u'უ'), + (0x1CA4, 'M', u'ფ'), + (0x1CA5, 'M', u'ქ'), + (0x1CA6, 'M', u'ღ'), + (0x1CA7, 'M', u'ყ'), + (0x1CA8, 'M', u'შ'), + (0x1CA9, 'M', u'ჩ'), + (0x1CAA, 'M', u'ც'), + (0x1CAB, 'M', u'ძ'), + (0x1CAC, 'M', u'წ'), + (0x1CAD, 'M', u'ჭ'), + (0x1CAE, 'M', u'ხ'), + (0x1CAF, 'M', u'ჯ'), + (0x1CB0, 'M', u'ჰ'), + (0x1CB1, 'M', u'ჱ'), + (0x1CB2, 'M', u'ჲ'), + (0x1CB3, 'M', u'ჳ'), + (0x1CB4, 'M', u'ჴ'), + (0x1CB5, 'M', u'ჵ'), + (0x1CB6, 'M', u'ჶ'), + (0x1CB7, 'M', u'ჷ'), + (0x1CB8, 'M', u'ჸ'), + (0x1CB9, 'M', u'ჹ'), + (0x1CBA, 'M', u'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', u'ჽ'), + (0x1CBE, 'M', u'ჾ'), + (0x1CBF, 'M', u'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'Ç'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'È£'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'É'), + (0x1D45, 'M', u'É‘'), + (0x1D46, 'M', u'á´‚'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'É™'), + (0x1D4B, 'M', u'É›'), + (0x1D4C, 'M', u'Éœ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'Å‹'), + (0x1D52, 'M', u'o'), + ] + +def _seg_16(): + return [ + (0x1D53, 'M', u'É”'), + (0x1D54, 'M', u'á´–'), + (0x1D55, 'M', u'á´—'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'á´'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'á´¥'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'Ï'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'É’'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'É•'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'Éœ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ÉŸ'), + (0x1DA2, 'M', u'É¡'), + (0x1DA3, 'M', u'É¥'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'É©'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'áµ»'), + (0x1DA8, 'M', u'Ê'), + (0x1DA9, 'M', u'É­'), + (0x1DAA, 'M', u'á¶…'), + (0x1DAB, 'M', u'ÊŸ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'ɰ'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'É´'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'Ê‚'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'Æ«'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ÊŠ'), + (0x1DB8, 'M', u'á´œ'), + (0x1DB9, 'M', u'Ê‹'), + (0x1DBA, 'M', u'ÊŒ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'Ê'), + (0x1DBD, 'M', u'Ê‘'), + (0x1DBE, 'M', u'Ê’'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DFA, 'X'), + (0x1DFB, 'V'), + (0x1E00, 'M', u'á¸'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'á¸'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'á¸'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'á¸'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + ] + +def _seg_17(): + return [ + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'á¹'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'á¹…'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'á¹'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'á¹'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'á¹—'), + (0x1E57, 'V'), + (0x1E58, 'M', u'á¹™'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'á¹›'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'á¹'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'á¹£'), + (0x1E63, 'V'), + (0x1E64, 'M', u'á¹¥'), + (0x1E65, 'V'), + (0x1E66, 'M', u'á¹§'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'á¹­'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'á¹±'), + (0x1E71, 'V'), + (0x1E72, 'M', u'á¹³'), + (0x1E73, 'V'), + (0x1E74, 'M', u'á¹µ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'á¹·'), + (0x1E77, 'V'), + (0x1E78, 'M', u'á¹¹'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'á¹»'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'á¹½'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'áº'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + ] + +def _seg_18(): + return [ + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'áº'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'áº'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'á»'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'á»…'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'á»'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'á»'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'á»—'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'á»™'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'á»›'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'á»'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'á»§'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'á»­'), + (0x1EED, 'V'), + ] + +def _seg_19(): + return [ + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'á»±'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'á»·'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'á»»'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'á¼€'), + (0x1F09, 'M', u'á¼'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'á¼…'), + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'á¼'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'á¼’'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'á¼”'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'á¼ '), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'á¼¢'), + (0x1F2B, 'M', u'á¼£'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'á¼¥'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'á¼§'), + (0x1F30, 'V'), + (0x1F38, 'M', u'á¼°'), + (0x1F39, 'M', u'á¼±'), + (0x1F3A, 'M', u'á¼²'), + (0x1F3B, 'M', u'á¼³'), + (0x1F3C, 'M', u'á¼´'), + (0x1F3D, 'M', u'á¼µ'), + (0x1F3E, 'M', u'á¼¶'), + (0x1F3F, 'M', u'á¼·'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'á½€'), + (0x1F49, 'M', u'á½'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'á½…'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'á½—'), + (0x1F60, 'V'), + (0x1F68, 'M', u'á½ '), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'á½¢'), + (0x1F6B, 'M', u'á½£'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'á½¥'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'á½§'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ÏŒ'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'Ï'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ÏŽ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'á¼Î¹'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + ] + +def _seg_20(): + return [ + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'á¼Î¹'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'á¾°'), + (0x1FB9, 'M', u'á¾±'), + (0x1FBA, 'M', u'á½°'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' Ì“'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' Ì“'), + (0x1FC0, '3', u' Í‚'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'á½²'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'á½´'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' ̓̀'), + (0x1FCE, '3', u' Ì“Ì'), + (0x1FCF, '3', u' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'Î'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'á¿'), + (0x1FD9, 'M', u'á¿‘'), + (0x1FDA, 'M', u'á½¶'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' Ì”Ì'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'á¿ '), + (0x1FE9, 'M', u'á¿¡'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'Ï'), + (0x1FEC, 'M', u'á¿¥'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈Ì'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + ] + +def _seg_21(): + return [ + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ÏŒ'), + (0x1FFA, 'M', u'á½¼'), + (0x1FFB, 'M', u'ÏŽ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' Ì'), + (0x1FFE, '3', u' Ì”'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'â€'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' Ì…'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'É™'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20C0, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + ] + +def _seg_22(): + return [ + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'É›'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'Ã¥'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'×'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'×’'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'Ï€'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'Ï€'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1â„7'), + (0x2151, 'M', u'1â„9'), + (0x2152, 'M', u'1â„10'), + (0x2153, 'M', u'1â„3'), + (0x2154, 'M', u'2â„3'), + (0x2155, 'M', u'1â„5'), + (0x2156, 'M', u'2â„5'), + (0x2157, 'M', u'3â„5'), + (0x2158, 'M', u'4â„5'), + (0x2159, 'M', u'1â„6'), + (0x215A, 'M', u'5â„6'), + (0x215B, 'M', u'1â„8'), + (0x215C, 'M', u'3â„8'), + (0x215D, 'M', u'5â„8'), + (0x215E, 'M', u'7â„8'), + (0x215F, 'M', u'1â„'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + ] + +def _seg_23(): + return [ + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0â„3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + ] + +def _seg_24(): + return [ + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'â«Ì¸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', u'â°°'), + (0x2C01, 'M', u'â°±'), + (0x2C02, 'M', u'â°²'), + (0x2C03, 'M', u'â°³'), + (0x2C04, 'M', u'â°´'), + (0x2C05, 'M', u'â°µ'), + (0x2C06, 'M', u'â°¶'), + (0x2C07, 'M', u'â°·'), + (0x2C08, 'M', u'â°¸'), + (0x2C09, 'M', u'â°¹'), + (0x2C0A, 'M', u'â°º'), + (0x2C0B, 'M', u'â°»'), + (0x2C0C, 'M', u'â°¼'), + (0x2C0D, 'M', u'â°½'), + (0x2C0E, 'M', u'â°¾'), + (0x2C0F, 'M', u'â°¿'), + (0x2C10, 'M', u'â±€'), + (0x2C11, 'M', u'â±'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'â±…'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'â±'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'â±'), + (0x2C20, 'M', u'â±'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'â±’'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'â±”'), + (0x2C25, 'M', u'ⱕ'), + ] + +def _seg_25(): + return [ + (0x2C26, 'M', u'â±–'), + (0x2C27, 'M', u'â±—'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'â±™'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'â±›'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'â±'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'É«'), + (0x2C63, 'M', u'áµ½'), + (0x2C64, 'M', u'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'É‘'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'É'), + (0x2C70, 'M', u'É’'), + (0x2C71, 'V'), + (0x2C72, 'M', u'â±³'), + (0x2C73, 'V'), + (0x2C75, 'M', u'â±¶'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'È¿'), + (0x2C7F, 'M', u'É€'), + (0x2C80, 'M', u'â²'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'â²…'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'â²'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'â²'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'â²—'), + (0x2C97, 'V'), + (0x2C98, 'M', u'â²™'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'â²›'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'â²'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'â²£'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'â²¥'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'â²§'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'â²­'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'â²±'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'â²³'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'â²µ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'â²·'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'â²¹'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'â²»'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'â²½'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + ] + +def _seg_26(): + return [ + (0x2CBF, 'V'), + (0x2CC0, 'M', u'â³'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'â³…'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'â³'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'â³'), + (0x2CCF, 'V'), + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'â³—'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'â³™'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'â³›'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'â³'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'â³£'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'â³®'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'â³³'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E53, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'æ¯'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'ä¹™'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'å„¿'), + (0x2F0A, 'M', u'å…¥'), + (0x2F0B, 'M', u'å…«'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + ] + +def _seg_27(): + return [ + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'å'), + (0x2F18, 'M', u'åœ'), + (0x2F19, 'M', u'å©'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'åˆ'), + (0x2F1D, 'M', u'å£'), + (0x2F1E, 'M', u'å›—'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'å­'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'å°'), + (0x2F2A, 'M', u'å°¢'), + (0x2F2B, 'M', u'å°¸'), + (0x2F2C, 'M', u'å±®'), + (0x2F2D, 'M', u'å±±'), + (0x2F2E, 'M', u'å·›'), + (0x2F2F, 'M', u'å·¥'), + (0x2F30, 'M', u'å·±'), + (0x2F31, 'M', u'å·¾'), + (0x2F32, 'M', u'å¹²'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'å»´'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'å½'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'å½³'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'æ”´'), + (0x2F42, 'M', u'æ–‡'), + (0x2F43, 'M', u'æ–—'), + (0x2F44, 'M', u'æ–¤'), + (0x2F45, 'M', u'æ–¹'), + (0x2F46, 'M', u'æ— '), + (0x2F47, 'M', u'æ—¥'), + (0x2F48, 'M', u'æ›°'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'æ­¢'), + (0x2F4D, 'M', u'æ­¹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'æ°'), + (0x2F53, 'M', u'æ°”'), + (0x2F54, 'M', u'æ°´'), + (0x2F55, 'M', u'ç«'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'瓜'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'ç”°'), + (0x2F66, 'M', u'ç–‹'), + (0x2F67, 'M', u'ç–’'), + (0x2F68, 'M', u'ç™¶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'çš®'), + (0x2F6B, 'M', u'çš¿'), + (0x2F6C, 'M', u'ç›®'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'ç©´'), + (0x2F74, 'M', u'ç«‹'), + (0x2F75, 'M', u'竹'), + ] + +def _seg_28(): + return [ + (0x2F76, 'M', u'ç±³'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'ç¼¶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'ç¾½'), + (0x2F7C, 'M', u'è€'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'è¿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'è™'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'è¡€'), + (0x2F8F, 'M', u'行'), + (0x2F90, 'M', u'è¡£'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'è§’'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'è°·'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'è²'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'èµ°'), + (0x2F9C, 'M', u'è¶³'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'è¾›'), + (0x2FA0, 'M', u'è¾°'), + (0x2FA1, 'M', u'è¾µ'), + (0x2FA2, 'M', u'é‚‘'), + (0x2FA3, 'M', u'é…‰'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'é•·'), + (0x2FA8, 'M', u'é–€'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'éš¶'), + (0x2FAB, 'M', u'éš¹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'é‘'), + (0x2FAE, 'M', u'éž'), + (0x2FAF, 'M', u'é¢'), + (0x2FB0, 'M', u'é©'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'é '), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'髟'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'é­š'), + (0x2FC3, 'M', u'é³¥'), + (0x2FC4, 'M', u'é¹µ'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'é»'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'é¼ '), + (0x2FD0, 'M', u'é¼»'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'é½’'), + (0x2FD3, 'M', u'é¾'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'é¾ '), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + ] + +def _seg_29(): + return [ + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'å'), + (0x3039, 'M', u'å„'), + (0x303A, 'M', u'å…'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ã‚™'), + (0x309C, '3', u' ゚'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', u'á„€'), + (0x3132, 'M', u'á„'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'á„‚'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'á„„'), + (0x3139, 'M', u'á„…'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'ᄚ'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'á„¡'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'ᄊ'), + (0x3147, 'M', u'á„‹'), + (0x3148, 'M', u'ᄌ'), + (0x3149, 'M', u'á„'), + (0x314A, 'M', u'ᄎ'), + (0x314B, 'M', u'á„'), + (0x314C, 'M', u'á„'), + (0x314D, 'M', u'á„‘'), + (0x314E, 'M', u'á„’'), + (0x314F, 'M', u'á…¡'), + (0x3150, 'M', u'á…¢'), + (0x3151, 'M', u'á…£'), + (0x3152, 'M', u'á…¤'), + (0x3153, 'M', u'á…¥'), + (0x3154, 'M', u'á…¦'), + (0x3155, 'M', u'á…§'), + (0x3156, 'M', u'á…¨'), + (0x3157, 'M', u'á…©'), + (0x3158, 'M', u'á…ª'), + (0x3159, 'M', u'á…«'), + (0x315A, 'M', u'á…¬'), + (0x315B, 'M', u'á…­'), + (0x315C, 'M', u'á…®'), + (0x315D, 'M', u'á…¯'), + (0x315E, 'M', u'á…°'), + (0x315F, 'M', u'á…±'), + (0x3160, 'M', u'á…²'), + (0x3161, 'M', u'á…³'), + (0x3162, 'M', u'á…´'), + (0x3163, 'M', u'á…µ'), + (0x3164, 'X'), + (0x3165, 'M', u'á„”'), + (0x3166, 'M', u'á„•'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'ᄜ'), + (0x316F, 'M', u'á‡'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'á„'), + (0x3172, 'M', u'ᄞ'), + (0x3173, 'M', u'á„ '), + (0x3174, 'M', u'á„¢'), + (0x3175, 'M', u'á„£'), + (0x3176, 'M', u'á„§'), + (0x3177, 'M', u'á„©'), + (0x3178, 'M', u'á„«'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'á„­'), + (0x317B, 'M', u'á„®'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'á„¶'), + (0x317F, 'M', u'á…€'), + (0x3180, 'M', u'á…‡'), + ] + +def _seg_30(): + return [ + (0x3181, 'M', u'á…Œ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'á…—'), + (0x3185, 'M', u'á…˜'), + (0x3186, 'M', u'á…™'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'å››'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'ä¹™'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'ä¸'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(á„€)'), + (0x3201, '3', u'(á„‚)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(á„…)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(á„‹)'), + (0x3208, '3', u'(ᄌ)'), + (0x3209, '3', u'(ᄎ)'), + (0x320A, '3', u'(á„)'), + (0x320B, '3', u'(á„)'), + (0x320C, '3', u'(á„‘)'), + (0x320D, '3', u'(á„’)'), + (0x320E, '3', u'(ê°€)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(ë¼)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(ë°”)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(ì•„)'), + (0x3216, '3', u'(ìž)'), + (0x3217, '3', u'(ì°¨)'), + (0x3218, '3', u'(ì¹´)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(å››)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(å…­)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(å…«)'), + (0x3228, '3', u'(ä¹)'), + (0x3229, '3', u'(å)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(ç«)'), + (0x322C, '3', u'(æ°´)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(æ—¥)'), + (0x3231, '3', u'(æ ª)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(å)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(ç¥)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(å­¦)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(ä¼)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(å”)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + ] + +def _seg_31(): + return [ + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'å•'), + (0x3245, 'M', u'å¹¼'), + (0x3246, 'M', u'æ–‡'), + (0x3247, 'M', u'ç®'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'á„€'), + (0x3261, 'M', u'á„‚'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'á„…'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'á„‹'), + (0x3268, 'M', u'ᄌ'), + (0x3269, 'M', u'ᄎ'), + (0x326A, 'M', u'á„'), + (0x326B, 'M', u'á„'), + (0x326C, 'M', u'á„‘'), + (0x326D, 'M', u'á„’'), + (0x326E, 'M', u'ê°€'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'ë¼'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'ë°”'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'ì•„'), + (0x3276, 'M', u'ìž'), + (0x3277, 'M', u'ì°¨'), + (0x3278, 'M', u'ì¹´'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주ì˜'), + (0x327E, 'M', u'ìš°'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'å››'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'å…­'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'å…«'), + (0x3288, 'M', u'ä¹'), + (0x3289, 'M', u'å'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'ç«'), + (0x328C, 'M', u'æ°´'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'æ—¥'), + (0x3291, 'M', u'æ ª'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'å'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'ç¥'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'ç”·'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'é©'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'å°'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'é …'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'æ­£'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'å·¦'), + (0x32A8, 'M', u'å³'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'å®—'), + (0x32AB, 'M', u'å­¦'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'ä¼'), + ] + +def _seg_32(): + return [ + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'å”'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ã‚¢'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'ã‚«'), + (0x32D6, 'M', u'ã‚­'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'ã‚·'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'ã‚»'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'ã‚¿'), + (0x32E0, 'M', u'ãƒ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ãƒ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ãƒ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'M', u'令和'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インãƒ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + ] + +def _seg_33(): + return [ + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローãƒ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーãƒ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンãƒãƒ¼ãƒ '), + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センãƒ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ãƒã‚¤ãƒ„'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'ãƒãƒ¼ãƒ¬ãƒ«'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ãƒã‚¤ãƒ³ãƒˆ'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ãƒãƒ³ãƒ‰'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッãƒ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリãƒãƒ¼ãƒ«'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + ] + +def _seg_34(): + return [ + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'å¹³æˆ'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'æ ªå¼ä¼šç¤¾'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + ] + +def _seg_35(): + return [ + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1æ—¥'), + (0x33E1, 'M', u'2æ—¥'), + (0x33E2, 'M', u'3æ—¥'), + (0x33E3, 'M', u'4æ—¥'), + (0x33E4, 'M', u'5æ—¥'), + (0x33E5, 'M', u'6æ—¥'), + (0x33E6, 'M', u'7æ—¥'), + (0x33E7, 'M', u'8æ—¥'), + (0x33E8, 'M', u'9æ—¥'), + (0x33E9, 'M', u'10æ—¥'), + (0x33EA, 'M', u'11æ—¥'), + (0x33EB, 'M', u'12æ—¥'), + (0x33EC, 'M', u'13æ—¥'), + (0x33ED, 'M', u'14æ—¥'), + (0x33EE, 'M', u'15æ—¥'), + (0x33EF, 'M', u'16æ—¥'), + (0x33F0, 'M', u'17æ—¥'), + (0x33F1, 'M', u'18æ—¥'), + (0x33F2, 'M', u'19æ—¥'), + (0x33F3, 'M', u'20æ—¥'), + (0x33F4, 'M', u'21æ—¥'), + (0x33F5, 'M', u'22æ—¥'), + (0x33F6, 'M', u'23æ—¥'), + (0x33F7, 'M', u'24æ—¥'), + (0x33F8, 'M', u'25æ—¥'), + (0x33F9, 'M', u'26æ—¥'), + (0x33FA, 'M', u'27æ—¥'), + (0x33FB, 'M', u'28æ—¥'), + (0x33FC, 'M', u'29æ—¥'), + (0x33FD, 'M', u'30æ—¥'), + (0x33FE, 'M', u'31æ—¥'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x9FFD, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ê™'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ê™…'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ê™'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ê™'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ê™—'), + (0xA657, 'V'), + (0xA658, 'M', u'ê™™'), + (0xA659, 'V'), + (0xA65A, 'M', u'ê™›'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ê™'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ê™§'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ê™­'), + (0xA66D, 'V'), + (0xA680, 'M', u'êš'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'êš…'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + ] + +def _seg_36(): + return [ + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'êš‹'), + (0xA68B, 'V'), + (0xA68C, 'M', u'êš'), + (0xA68D, 'V'), + (0xA68E, 'M', u'êš'), + (0xA68F, 'V'), + (0xA690, 'M', u'êš‘'), + (0xA691, 'V'), + (0xA692, 'M', u'êš“'), + (0xA693, 'V'), + (0xA694, 'M', u'êš•'), + (0xA695, 'V'), + (0xA696, 'M', u'êš—'), + (0xA697, 'V'), + (0xA698, 'M', u'êš™'), + (0xA699, 'V'), + (0xA69A, 'M', u'êš›'), + (0xA69B, 'V'), + (0xA69C, 'M', u'ÑŠ'), + (0xA69D, 'M', u'ÑŒ'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ê'), + (0xA741, 'V'), + (0xA742, 'M', u'êƒ'), + (0xA743, 'V'), + (0xA744, 'M', u'ê…'), + (0xA745, 'V'), + (0xA746, 'M', u'ê‡'), + (0xA747, 'V'), + (0xA748, 'M', u'ê‰'), + (0xA749, 'V'), + (0xA74A, 'M', u'ê‹'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ê'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ê'), + (0xA74F, 'V'), + (0xA750, 'M', u'ê‘'), + (0xA751, 'V'), + (0xA752, 'M', u'ê“'), + (0xA753, 'V'), + (0xA754, 'M', u'ê•'), + (0xA755, 'V'), + (0xA756, 'M', u'ê—'), + (0xA757, 'V'), + (0xA758, 'M', u'ê™'), + (0xA759, 'V'), + (0xA75A, 'M', u'ê›'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ê'), + (0xA75D, 'V'), + (0xA75E, 'M', u'êŸ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ê¡'), + (0xA761, 'V'), + (0xA762, 'M', u'ê£'), + (0xA763, 'V'), + (0xA764, 'M', u'ê¥'), + (0xA765, 'V'), + (0xA766, 'M', u'ê§'), + (0xA767, 'V'), + (0xA768, 'M', u'ê©'), + (0xA769, 'V'), + (0xA76A, 'M', u'ê«'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ê­'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ê¯'), + ] + +def _seg_37(): + return [ + (0xA76F, 'V'), + (0xA770, 'M', u'ê¯'), + (0xA771, 'V'), + (0xA779, 'M', u'êº'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ê¼'), + (0xA77C, 'V'), + (0xA77D, 'M', u'áµ¹'), + (0xA77E, 'M', u'ê¿'), + (0xA77F, 'V'), + (0xA780, 'M', u'êž'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', u'êž…'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'É¥'), + (0xA78E, 'V'), + (0xA790, 'M', u'êž‘'), + (0xA791, 'V'), + (0xA792, 'M', u'êž“'), + (0xA793, 'V'), + (0xA796, 'M', u'êž—'), + (0xA797, 'V'), + (0xA798, 'M', u'êž™'), + (0xA799, 'V'), + (0xA79A, 'M', u'êž›'), + (0xA79B, 'V'), + (0xA79C, 'M', u'êž'), + (0xA79D, 'V'), + (0xA79E, 'M', u'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', u'êž¡'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'êž§'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'êž©'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'M', u'Éœ'), + (0xA7AC, 'M', u'É¡'), + (0xA7AD, 'M', u'ɬ'), + (0xA7AE, 'M', u'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', u'Êž'), + (0xA7B1, 'M', u'ʇ'), + (0xA7B2, 'M', u'Ê'), + (0xA7B3, 'M', u'ê­“'), + (0xA7B4, 'M', u'êžµ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', u'êž·'), + (0xA7B7, 'V'), + (0xA7B8, 'M', u'êž¹'), + (0xA7B9, 'V'), + (0xA7BA, 'M', u'êž»'), + (0xA7BB, 'V'), + (0xA7BC, 'M', u'êž½'), + (0xA7BD, 'V'), + (0xA7BE, 'M', u'êž¿'), + (0xA7BF, 'V'), + (0xA7C0, 'X'), + (0xA7C2, 'M', u'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', u'êž”'), + (0xA7C5, 'M', u'Ê‚'), + (0xA7C6, 'M', u'á¶Ž'), + (0xA7C7, 'M', u'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', u'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7F5, 'M', u'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'Å“'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + ] + +def _seg_38(): + return [ + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', u'ꜧ'), + (0xAB5D, 'M', u'ꬷ'), + (0xAB5E, 'M', u'É«'), + (0xAB5F, 'M', u'ê­’'), + (0xAB60, 'V'), + (0xAB69, 'M', u'Ê'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', u'Ꭰ'), + (0xAB71, 'M', u'Ꭱ'), + (0xAB72, 'M', u'Ꭲ'), + (0xAB73, 'M', u'Ꭳ'), + (0xAB74, 'M', u'Ꭴ'), + (0xAB75, 'M', u'Ꭵ'), + (0xAB76, 'M', u'Ꭶ'), + (0xAB77, 'M', u'Ꭷ'), + (0xAB78, 'M', u'Ꭸ'), + (0xAB79, 'M', u'Ꭹ'), + (0xAB7A, 'M', u'Ꭺ'), + (0xAB7B, 'M', u'Ꭻ'), + (0xAB7C, 'M', u'Ꭼ'), + (0xAB7D, 'M', u'Ꭽ'), + (0xAB7E, 'M', u'Ꭾ'), + (0xAB7F, 'M', u'Ꭿ'), + (0xAB80, 'M', u'Ꮀ'), + (0xAB81, 'M', u'Ꮁ'), + (0xAB82, 'M', u'Ꮂ'), + (0xAB83, 'M', u'Ꮃ'), + (0xAB84, 'M', u'Ꮄ'), + (0xAB85, 'M', u'Ꮅ'), + (0xAB86, 'M', u'Ꮆ'), + (0xAB87, 'M', u'Ꮇ'), + (0xAB88, 'M', u'Ꮈ'), + (0xAB89, 'M', u'Ꮉ'), + (0xAB8A, 'M', u'Ꮊ'), + (0xAB8B, 'M', u'Ꮋ'), + (0xAB8C, 'M', u'Ꮌ'), + (0xAB8D, 'M', u'Ꮍ'), + (0xAB8E, 'M', u'Ꮎ'), + (0xAB8F, 'M', u'Ꮏ'), + (0xAB90, 'M', u'á€'), + (0xAB91, 'M', u'á'), + (0xAB92, 'M', u'á‚'), + (0xAB93, 'M', u'áƒ'), + (0xAB94, 'M', u'á„'), + (0xAB95, 'M', u'á…'), + (0xAB96, 'M', u'á†'), + (0xAB97, 'M', u'á‡'), + (0xAB98, 'M', u'áˆ'), + (0xAB99, 'M', u'á‰'), + (0xAB9A, 'M', u'áŠ'), + (0xAB9B, 'M', u'á‹'), + (0xAB9C, 'M', u'áŒ'), + (0xAB9D, 'M', u'á'), + (0xAB9E, 'M', u'áŽ'), + (0xAB9F, 'M', u'á'), + (0xABA0, 'M', u'á'), + (0xABA1, 'M', u'á‘'), + (0xABA2, 'M', u'á’'), + (0xABA3, 'M', u'á“'), + (0xABA4, 'M', u'á”'), + (0xABA5, 'M', u'á•'), + (0xABA6, 'M', u'á–'), + (0xABA7, 'M', u'á—'), + (0xABA8, 'M', u'á˜'), + (0xABA9, 'M', u'á™'), + (0xABAA, 'M', u'áš'), + (0xABAB, 'M', u'á›'), + (0xABAC, 'M', u'áœ'), + (0xABAD, 'M', u'á'), + (0xABAE, 'M', u'áž'), + (0xABAF, 'M', u'áŸ'), + (0xABB0, 'M', u'á '), + (0xABB1, 'M', u'á¡'), + (0xABB2, 'M', u'á¢'), + (0xABB3, 'M', u'á£'), + ] + +def _seg_39(): + return [ + (0xABB4, 'M', u'á¤'), + (0xABB5, 'M', u'á¥'), + (0xABB6, 'M', u'á¦'), + (0xABB7, 'M', u'á§'), + (0xABB8, 'M', u'á¨'), + (0xABB9, 'M', u'á©'), + (0xABBA, 'M', u'áª'), + (0xABBB, 'M', u'á«'), + (0xABBC, 'M', u'á¬'), + (0xABBD, 'M', u'á­'), + (0xABBE, 'M', u'á®'), + (0xABBF, 'M', u'á¯'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'æ›´'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'å¥'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + (0xF90B, 'M', u'å–‡'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'ç¾…'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'é‚'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'æ´›'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'çž'), + (0xF918, 'M', u'è½'), + (0xF919, 'M', u'é…ª'), + (0xF91A, 'M', u'é§±'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'åµ'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'åµ'), + (0xF922, 'M', u'æ¿«'), + (0xF923, 'M', u'è—'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'è Ÿ'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'勞'), + (0xF930, 'M', u'æ“„'), + (0xF931, 'M', u'æ«“'), + (0xF932, 'M', u'çˆ'), + (0xF933, 'M', u'ç›§'), + (0xF934, 'M', u'è€'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'è·¯'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'é­¯'), + (0xF93A, 'M', u'é·º'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'ç¶ '), + (0xF93E, 'M', u'è‰'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'è«–'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'ç± '), + (0xF945, 'M', u'è¾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'é›·'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'å±¢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'æ·š'), + (0xF94E, 'M', u'æ¼'), + ] + +def _seg_40(): + return [ + (0xF94F, 'M', u'ç´¯'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'å‹’'), + (0xF953, 'M', u'è‚‹'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'ç¶¾'), + (0xF958, 'M', u'è±'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'æ‹'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'ç•°'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'ä¸'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'ç´¢'), + (0xF96B, 'M', u'åƒ'), + (0xF96C, 'M', u'塞'), + (0xF96D, 'M', u'çœ'), + (0xF96E, 'M', u'葉'), + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'è¾°'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'è‹¥'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'ç•¥'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'å…©'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'æ¢'), + (0xF97B, 'M', u'ç³§'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'è«’'), + (0xF97E, 'M', u'é‡'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'å‘‚'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'æ—…'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'é–­'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'æ­·'), + (0xF98D, 'M', u'è½¢'), + (0xF98E, 'M', u'å¹´'), + (0xF98F, 'M', u'æ†'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'æ’š'), + (0xF992, 'M', u'æ¼£'), + (0xF993, 'M', u'ç…‰'), + (0xF994, 'M', u'ç’‰'), + (0xF995, 'M', u'ç§Š'), + (0xF996, 'M', u'ç·´'), + (0xF997, 'M', u'è¯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'è“®'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'éŠ'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'å’½'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'æ»'), + (0xF9A5, 'M', u'æ®®'), + (0xF9A6, 'M', u'ç°¾'), + (0xF9A7, 'M', u'çµ'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'ç‘©'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'è†'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'é›¶'), + ] + +def _seg_41(): + return [ + (0xF9B3, 'M', u'éˆ'), + (0xF9B4, 'M', u'é ˜'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'å°¿'), + (0xF9BE, 'M', u'æ–™'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'é¼'), + (0xF9C4, 'M', u'é¾'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'æ»'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'æµ'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'ç‰'), + (0xF9CD, 'M', u'ç•™'), + (0xF9CE, 'M', u'ç¡«'), + (0xF9CF, 'M', u'ç´'), + (0xF9D0, 'M', u'類'), + (0xF9D1, 'M', u'å…­'), + (0xF9D2, 'M', u'戮'), + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'å´™'), + (0xF9D6, 'M', u'æ·ª'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'æ…„'), + (0xF9DA, 'M', u'æ —'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'å'), + (0xF9DF, 'M', u'å±¥'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'æŽ'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'æ³¥'), + (0xF9E4, 'M', u'ç†'), + (0xF9E5, 'M', u'ç—¢'), + (0xF9E6, 'M', u'ç½¹'), + (0xF9E7, 'M', u'è£'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'å'), + (0xF9EE, 'M', u'ç‡'), + (0xF9EF, 'M', u'ç’˜'), + (0xF9F0, 'M', u'è—º'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'é±—'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'æž—'), + (0xF9F5, 'M', u'æ·‹'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'ç«‹'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'ç²’'), + (0xF9FA, 'M', u'ç‹€'), + (0xF9FB, 'M', u'ç‚™'), + (0xF9FC, 'M', u'è­˜'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'æ‹“'), + (0xFA03, 'M', u'ç³–'), + (0xFA04, 'M', u'å®…'), + (0xFA05, 'M', u'æ´ž'), + (0xFA06, 'M', u'æš´'), + (0xFA07, 'M', u'è¼»'), + (0xFA08, 'M', u'行'), + (0xFA09, 'M', u'é™'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'å…€'), + (0xFA0D, 'M', u'å—€'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'塚'), + (0xFA11, 'V'), + (0xFA12, 'M', u'æ™´'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + ] + +def _seg_42(): + return [ + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'ç¦'), + (0xFA1C, 'M', u'é–'), + (0xFA1D, 'M', u'ç²¾'), + (0xFA1E, 'M', u'ç¾½'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'é¶´'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'éš·'), + (0xFA30, 'M', u'ä¾®'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'å…'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'å‘'), + (0xFA36, 'M', u'å–'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'å¡€'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + (0xFA3C, 'M', u'å±®'), + (0xFA3D, 'M', u'æ‚”'), + (0xFA3E, 'M', u'æ…¨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'æ•'), + (0xFA42, 'M', u'æ—¢'), + (0xFA43, 'M', u'æš‘'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'æµ·'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'æ¼¢'), + (0xFA48, 'M', u'ç…®'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'ç¢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'ç¥'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'ç¥'), + (0xFA52, 'M', u'ç¦'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'ç©€'), + (0xFA55, 'M', u'çª'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'ç·´'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'ç¹'), + (0xFA5A, 'M', u'ç½²'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'è‘—'), + (0xFA60, 'M', u'è¤'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'è¬'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'è´ˆ'), + (0xFA66, 'M', u'è¾¶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'é »'), + (0xFA6B, 'M', u'æµ'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'å…¨'), + (0xFA73, 'M', u'ä¾€'), + (0xFA74, 'M', u'å……'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'å–'), + (0xFA79, 'M', u'å••'), + (0xFA7A, 'M', u'å–™'), + (0xFA7B, 'M', u'å—¢'), + (0xFA7C, 'M', u'塚'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'å©¢'), + (0xFA81, 'M', u'嬨'), + ] + +def _seg_43(): + return [ + (0xFA82, 'M', u'å»’'), + (0xFA83, 'M', u'å»™'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'å¾­'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'æ…Ž'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'æ… '), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'æ„'), + (0xFA8E, 'M', u'æœ'), + (0xFA8F, 'M', u'æ‘’'), + (0xFA90, 'M', u'æ•–'), + (0xFA91, 'M', u'æ™´'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'æ–'), + (0xFA95, 'M', u'æ­¹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'æµ'), + (0xFA98, 'M', u'æ»›'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'æ¼¢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'ç…®'), + (0xFA9D, 'M', u'çž§'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'ç”»'), + (0xFAA4, 'M', u'ç˜'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'ç››'), + (0xFAA8, 'M', u'ç›´'), + (0xFAA9, 'M', u'çŠ'), + (0xFAAA, 'M', u'ç€'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'ç±»'), + (0xFAAF, 'M', u'çµ›'), + (0xFAB0, 'M', u'ç·´'), + (0xFAB1, 'M', u'ç¼¾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'è’'), + (0xFAB4, 'M', u'è¯'), + (0xFAB5, 'M', u'è¹'), + (0xFAB6, 'M', u'è¥'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'è«‹'), + (0xFABC, 'M', u'è¬'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'è«­'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'è´ˆ'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'é²'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'é–'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'é ‹'), + (0xFACC, 'M', u'é »'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'ð£•'), + (0xFAD2, 'M', u'ã®'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'ð¥³'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'Õ´Õ¶'), + (0xFB14, 'M', u'Õ´Õ¥'), + (0xFB15, 'M', u'Õ´Õ«'), + (0xFB16, 'M', u'Õ¾Õ¶'), + ] + +def _seg_44(): + return [ + (0xFB17, 'M', u'Õ´Õ­'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'×™Ö´'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'×¢'), + (0xFB21, 'M', u'×'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'×”'), + (0xFB24, 'M', u'×›'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'×'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'ש×'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּ×'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'×Ö·'), + (0xFB2F, 'M', u'×Ö¸'), + (0xFB30, 'M', u'×Ö¼'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'×’Ö¼'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'×”Ö¼'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'×–Ö¼'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'×™Ö¼'), + (0xFB3A, 'M', u'ךּ'), + (0xFB3B, 'M', u'×›Ö¼'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'× Ö¼'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'×£Ö¼'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'×§Ö¼'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'×›Ö¿'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'×ל'), + (0xFB50, 'M', u'Ù±'), + (0xFB52, 'M', u'Ù»'), + (0xFB56, 'M', u'Ù¾'), + (0xFB5A, 'M', u'Ú€'), + (0xFB5E, 'M', u'Ùº'), + (0xFB62, 'M', u'Ù¿'), + (0xFB66, 'M', u'Ù¹'), + (0xFB6A, 'M', u'Ú¤'), + (0xFB6E, 'M', u'Ú¦'), + (0xFB72, 'M', u'Ú„'), + (0xFB76, 'M', u'Úƒ'), + (0xFB7A, 'M', u'Ú†'), + (0xFB7E, 'M', u'Ú‡'), + (0xFB82, 'M', u'Ú'), + (0xFB84, 'M', u'ÚŒ'), + (0xFB86, 'M', u'ÚŽ'), + (0xFB88, 'M', u'Úˆ'), + (0xFB8A, 'M', u'Ú˜'), + (0xFB8C, 'M', u'Ú‘'), + (0xFB8E, 'M', u'Ú©'), + (0xFB92, 'M', u'Ú¯'), + (0xFB96, 'M', u'Ú³'), + (0xFB9A, 'M', u'Ú±'), + (0xFB9E, 'M', u'Úº'), + (0xFBA0, 'M', u'Ú»'), + (0xFBA4, 'M', u'Û€'), + (0xFBA6, 'M', u'Û'), + (0xFBAA, 'M', u'Ú¾'), + (0xFBAE, 'M', u'Û’'), + (0xFBB0, 'M', u'Û“'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'Ú­'), + (0xFBD7, 'M', u'Û‡'), + (0xFBD9, 'M', u'Û†'), + (0xFBDB, 'M', u'Ûˆ'), + (0xFBDD, 'M', u'Û‡Ù´'), + (0xFBDE, 'M', u'Û‹'), + (0xFBE0, 'M', u'Û…'), + (0xFBE2, 'M', u'Û‰'), + (0xFBE4, 'M', u'Û'), + (0xFBE8, 'M', u'Ù‰'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + ] + +def _seg_45(): + return [ + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئÛ'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ÛŒ'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'ÙØ¬'), + (0xFC2E, 'M', u'ÙØ­'), + (0xFC2F, 'M', u'ÙØ®'), + (0xFC30, 'M', u'ÙÙ…'), + (0xFC31, 'M', u'ÙÙ‰'), + (0xFC32, 'M', u'ÙÙŠ'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ÙÙ‘'), + ] + +def _seg_46(): + return [ + (0xFC60, '3', u' ÙŽÙ‘'), + (0xFC61, '3', u' ÙÙ‘'), + (0xFC62, '3', u' ÙÙ‘'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'ÙÙ‰'), + (0xFC7D, 'M', u'ÙÙŠ'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'ÙØ¬'), + (0xFCBF, 'M', u'ÙØ­'), + (0xFCC0, 'M', u'ÙØ®'), + (0xFCC1, 'M', u'ÙÙ…'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + ] + +def _seg_47(): + return [ + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'Ù€ÙŽÙ‘'), + (0xFCF3, 'M', u'Ù€ÙÙ‘'), + (0xFCF4, 'M', u'Ù€ÙÙ‘'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + ] + +def _seg_48(): + return [ + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'ÙØ®Ù…'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + ] + +def _seg_49(): + return [ + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'Ùمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'ã€'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'ã€'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'ã€'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'ã€'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' Ì…'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'ã€'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + ] + +def _seg_50(): + return [ + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' Ù‹'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ÙŒ'), + (0xFE73, 'V'), + (0xFE74, '3', u' Ù'), + (0xFE75, 'X'), + (0xFE76, '3', u' ÙŽ'), + (0xFE77, 'M', u'Ù€ÙŽ'), + (0xFE78, '3', u' Ù'), + (0xFE79, 'M', u'Ù€Ù'), + (0xFE7A, '3', u' Ù'), + (0xFE7B, 'M', u'Ù€Ù'), + (0xFE7C, '3', u' Ù‘'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' Ù’'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'Ø¡'), + (0xFE81, 'M', u'Ø¢'), + (0xFE83, 'M', u'Ø£'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'Ø¥'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'Ø©'), + (0xFE95, 'M', u'ت'), + (0xFE99, 'M', u'Ø«'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'Ø­'), + (0xFEA5, 'M', u'Ø®'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'ذ'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'Ø´'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'Ø·'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'Ù'), + (0xFED5, 'M', u'Ù‚'), + (0xFED9, 'M', u'Ùƒ'), + (0xFEDD, 'M', u'Ù„'), + (0xFEE1, 'M', u'Ù…'), + (0xFEE5, 'M', u'Ù†'), + (0xFEE9, 'M', u'Ù‡'), + (0xFEED, 'M', u'Ùˆ'), + (0xFEEF, 'M', u'Ù‰'), + (0xFEF1, 'M', u'ÙŠ'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + ] + +def _seg_51(): + return [ + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'ã€'), + (0xFF64, 'M', u'ã€'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ã‚¡'), + (0xFF68, 'M', u'ã‚£'), + (0xFF69, 'M', u'ã‚¥'), + (0xFF6A, 'M', u'ã‚§'), + (0xFF6B, 'M', u'ã‚©'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ã‚¢'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'ã‚«'), + (0xFF77, 'M', u'ã‚­'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'ã‚·'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'ã‚»'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'ã‚¿'), + (0xFF81, 'M', u'ãƒ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + ] + +def _seg_52(): + return [ + (0xFF88, 'M', u'ãƒ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ãƒ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'ã‚™'), + (0xFF9F, 'M', u'゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'á„€'), + (0xFFA2, 'M', u'á„'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'á„‚'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + (0xFFA8, 'M', u'á„„'), + (0xFFA9, 'M', u'á„…'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'ᄚ'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'á„¡'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'ᄊ'), + (0xFFB7, 'M', u'á„‹'), + (0xFFB8, 'M', u'ᄌ'), + (0xFFB9, 'M', u'á„'), + (0xFFBA, 'M', u'ᄎ'), + (0xFFBB, 'M', u'á„'), + (0xFFBC, 'M', u'á„'), + (0xFFBD, 'M', u'á„‘'), + (0xFFBE, 'M', u'á„’'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'á…¡'), + (0xFFC3, 'M', u'á…¢'), + (0xFFC4, 'M', u'á…£'), + (0xFFC5, 'M', u'á…¤'), + (0xFFC6, 'M', u'á…¥'), + (0xFFC7, 'M', u'á…¦'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'á…§'), + (0xFFCB, 'M', u'á…¨'), + (0xFFCC, 'M', u'á…©'), + (0xFFCD, 'M', u'á…ª'), + (0xFFCE, 'M', u'á…«'), + (0xFFCF, 'M', u'á…¬'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'á…­'), + (0xFFD3, 'M', u'á…®'), + (0xFFD4, 'M', u'á…¯'), + (0xFFD5, 'M', u'á…°'), + (0xFFD6, 'M', u'á…±'), + (0xFFD7, 'M', u'á…²'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'á…³'), + (0xFFDB, 'M', u'á…´'), + (0xFFDC, 'M', u'á…µ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' Ì„'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'Â¥'), + (0xFFE6, 'M', u'â‚©'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'â†'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'â– '), + (0xFFEE, 'M', u'â—‹'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + ] + +def _seg_53(): + return [ + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'ð¨'), + (0x10401, 'M', u'ð©'), + (0x10402, 'M', u'ðª'), + (0x10403, 'M', u'ð«'), + (0x10404, 'M', u'ð¬'), + (0x10405, 'M', u'ð­'), + (0x10406, 'M', u'ð®'), + (0x10407, 'M', u'ð¯'), + (0x10408, 'M', u'ð°'), + (0x10409, 'M', u'ð±'), + (0x1040A, 'M', u'ð²'), + (0x1040B, 'M', u'ð³'), + (0x1040C, 'M', u'ð´'), + (0x1040D, 'M', u'ðµ'), + (0x1040E, 'M', u'ð¶'), + (0x1040F, 'M', u'ð·'), + (0x10410, 'M', u'ð¸'), + (0x10411, 'M', u'ð¹'), + (0x10412, 'M', u'ðº'), + (0x10413, 'M', u'ð»'), + (0x10414, 'M', u'ð¼'), + (0x10415, 'M', u'ð½'), + (0x10416, 'M', u'ð¾'), + (0x10417, 'M', u'ð¿'), + (0x10418, 'M', u'ð‘€'), + (0x10419, 'M', u'ð‘'), + (0x1041A, 'M', u'ð‘‚'), + (0x1041B, 'M', u'ð‘ƒ'), + (0x1041C, 'M', u'ð‘„'), + (0x1041D, 'M', u'ð‘…'), + (0x1041E, 'M', u'ð‘†'), + (0x1041F, 'M', u'ð‘‡'), + (0x10420, 'M', u'ð‘ˆ'), + (0x10421, 'M', u'ð‘‰'), + (0x10422, 'M', u'ð‘Š'), + (0x10423, 'M', u'ð‘‹'), + (0x10424, 'M', u'ð‘Œ'), + (0x10425, 'M', u'ð‘'), + (0x10426, 'M', u'ð‘Ž'), + (0x10427, 'M', u'ð‘'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', u'ð“˜'), + (0x104B1, 'M', u'ð“™'), + (0x104B2, 'M', u'ð“š'), + (0x104B3, 'M', u'ð“›'), + (0x104B4, 'M', u'ð“œ'), + (0x104B5, 'M', u'ð“'), + (0x104B6, 'M', u'ð“ž'), + (0x104B7, 'M', u'ð“Ÿ'), + (0x104B8, 'M', u'ð“ '), + (0x104B9, 'M', u'ð“¡'), + (0x104BA, 'M', u'ð“¢'), + (0x104BB, 'M', u'ð“£'), + (0x104BC, 'M', u'ð“¤'), + (0x104BD, 'M', u'ð“¥'), + (0x104BE, 'M', u'ð“¦'), + ] + +def _seg_54(): + return [ + (0x104BF, 'M', u'ð“§'), + (0x104C0, 'M', u'ð“¨'), + (0x104C1, 'M', u'ð“©'), + (0x104C2, 'M', u'ð“ª'), + (0x104C3, 'M', u'ð“«'), + (0x104C4, 'M', u'ð“¬'), + (0x104C5, 'M', u'ð“­'), + (0x104C6, 'M', u'ð“®'), + (0x104C7, 'M', u'ð“¯'), + (0x104C8, 'M', u'ð“°'), + (0x104C9, 'M', u'ð“±'), + (0x104CA, 'M', u'ð“²'), + (0x104CB, 'M', u'ð“³'), + (0x104CC, 'M', u'ð“´'), + (0x104CD, 'M', u'ð“µ'), + (0x104CE, 'M', u'ð“¶'), + (0x104CF, 'M', u'ð“·'), + (0x104D0, 'M', u'ð“¸'), + (0x104D1, 'M', u'ð“¹'), + (0x104D2, 'M', u'ð“º'), + (0x104D3, 'M', u'ð“»'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + ] + +def _seg_55(): + return [ + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', u'ð³€'), + (0x10C81, 'M', u'ð³'), + (0x10C82, 'M', u'ð³‚'), + (0x10C83, 'M', u'ð³ƒ'), + (0x10C84, 'M', u'ð³„'), + (0x10C85, 'M', u'ð³…'), + (0x10C86, 'M', u'ð³†'), + (0x10C87, 'M', u'ð³‡'), + (0x10C88, 'M', u'ð³ˆ'), + (0x10C89, 'M', u'ð³‰'), + (0x10C8A, 'M', u'ð³Š'), + (0x10C8B, 'M', u'ð³‹'), + (0x10C8C, 'M', u'ð³Œ'), + (0x10C8D, 'M', u'ð³'), + (0x10C8E, 'M', u'ð³Ž'), + (0x10C8F, 'M', u'ð³'), + (0x10C90, 'M', u'ð³'), + (0x10C91, 'M', u'ð³‘'), + (0x10C92, 'M', u'ð³’'), + (0x10C93, 'M', u'ð³“'), + (0x10C94, 'M', u'ð³”'), + (0x10C95, 'M', u'ð³•'), + (0x10C96, 'M', u'ð³–'), + (0x10C97, 'M', u'ð³—'), + (0x10C98, 'M', u'ð³˜'), + (0x10C99, 'M', u'ð³™'), + (0x10C9A, 'M', u'ð³š'), + (0x10C9B, 'M', u'ð³›'), + (0x10C9C, 'M', u'ð³œ'), + (0x10C9D, 'M', u'ð³'), + (0x10C9E, 'M', u'ð³ž'), + (0x10C9F, 'M', u'ð³Ÿ'), + (0x10CA0, 'M', u'ð³ '), + (0x10CA1, 'M', u'ð³¡'), + (0x10CA2, 'M', u'ð³¢'), + (0x10CA3, 'M', u'ð³£'), + (0x10CA4, 'M', u'ð³¤'), + (0x10CA5, 'M', u'ð³¥'), + (0x10CA6, 'M', u'ð³¦'), + (0x10CA7, 'M', u'ð³§'), + (0x10CA8, 'M', u'ð³¨'), + (0x10CA9, 'M', u'ð³©'), + (0x10CAA, 'M', u'ð³ª'), + (0x10CAB, 'M', u'ð³«'), + (0x10CAC, 'M', u'ð³¬'), + (0x10CAD, 'M', u'ð³­'), + (0x10CAE, 'M', u'ð³®'), + (0x10CAF, 'M', u'ð³¯'), + (0x10CB0, 'M', u'ð³°'), + (0x10CB1, 'M', u'ð³±'), + (0x10CB2, 'M', u'ð³²'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10F00, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + ] + +def _seg_56(): + return [ + (0x11213, 'V'), + (0x1123F, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116B9, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11740, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', u'ð‘£€'), + (0x118A1, 'M', u'ð‘£'), + (0x118A2, 'M', u'𑣂'), + (0x118A3, 'M', u'𑣃'), + (0x118A4, 'M', u'𑣄'), + (0x118A5, 'M', u'ð‘£…'), + (0x118A6, 'M', u'𑣆'), + (0x118A7, 'M', u'𑣇'), + (0x118A8, 'M', u'𑣈'), + (0x118A9, 'M', u'𑣉'), + (0x118AA, 'M', u'𑣊'), + (0x118AB, 'M', u'𑣋'), + (0x118AC, 'M', u'𑣌'), + (0x118AD, 'M', u'ð‘£'), + (0x118AE, 'M', u'𑣎'), + (0x118AF, 'M', u'ð‘£'), + (0x118B0, 'M', u'ð‘£'), + (0x118B1, 'M', u'𑣑'), + (0x118B2, 'M', u'ð‘£’'), + (0x118B3, 'M', u'𑣓'), + (0x118B4, 'M', u'ð‘£”'), + (0x118B5, 'M', u'𑣕'), + (0x118B6, 'M', u'ð‘£–'), + (0x118B7, 'M', u'ð‘£—'), + ] + +def _seg_57(): + return [ + (0x118B8, 'M', u'𑣘'), + (0x118B9, 'M', u'ð‘£™'), + (0x118BA, 'M', u'𑣚'), + (0x118BB, 'M', u'ð‘£›'), + (0x118BC, 'M', u'𑣜'), + (0x118BD, 'M', u'ð‘£'), + (0x118BE, 'M', u'𑣞'), + (0x118BF, 'M', u'𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AC0, 'V'), + (0x11AF9, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + ] + +def _seg_58(): + return [ + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16A70, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', u'ð–¹ '), + (0x16E41, 'M', u'𖹡'), + (0x16E42, 'M', u'ð–¹¢'), + (0x16E43, 'M', u'ð–¹£'), + (0x16E44, 'M', u'𖹤'), + (0x16E45, 'M', u'ð–¹¥'), + (0x16E46, 'M', u'𖹦'), + (0x16E47, 'M', u'ð–¹§'), + (0x16E48, 'M', u'𖹨'), + (0x16E49, 'M', u'𖹩'), + (0x16E4A, 'M', u'𖹪'), + (0x16E4B, 'M', u'𖹫'), + (0x16E4C, 'M', u'𖹬'), + (0x16E4D, 'M', u'ð–¹­'), + (0x16E4E, 'M', u'ð–¹®'), + (0x16E4F, 'M', u'𖹯'), + (0x16E50, 'M', u'ð–¹°'), + (0x16E51, 'M', u'ð–¹±'), + (0x16E52, 'M', u'ð–¹²'), + (0x16E53, 'M', u'ð–¹³'), + (0x16E54, 'M', u'ð–¹´'), + (0x16E55, 'M', u'ð–¹µ'), + (0x16E56, 'M', u'ð–¹¶'), + (0x16E57, 'M', u'ð–¹·'), + (0x16E58, 'M', u'𖹸'), + (0x16E59, 'M', u'ð–¹¹'), + (0x16E5A, 'M', u'𖹺'), + (0x16E5B, 'M', u'ð–¹»'), + (0x16E5C, 'M', u'ð–¹¼'), + (0x16E5D, 'M', u'ð–¹½'), + (0x16E5E, 'M', u'ð–¹¾'), + (0x16E5F, 'M', u'𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1B000, 'V'), + (0x1B11F, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'ð…—ð…¥'), + (0x1D15F, 'M', u'ð…˜ð…¥'), + (0x1D160, 'M', u'ð…˜ð…¥ð…®'), + (0x1D161, 'M', u'ð…˜ð…¥ð…¯'), + (0x1D162, 'M', u'ð…˜ð…¥ð…°'), + (0x1D163, 'M', u'ð…˜ð…¥ð…±'), + (0x1D164, 'M', u'ð…˜ð…¥ð…²'), + (0x1D165, 'V'), + ] + +def _seg_59(): + return [ + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'ð†¹ð…¥'), + (0x1D1BC, 'M', u'ð†ºð…¥'), + (0x1D1BD, 'M', u'ð†¹ð…¥ð…®'), + (0x1D1BE, 'M', u'ð†ºð…¥ð…®'), + (0x1D1BF, 'M', u'ð†¹ð…¥ð…¯'), + (0x1D1C0, 'M', u'ð†ºð…¥ð…¯'), + (0x1D1C1, 'V'), + (0x1D1E9, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + ] + +def _seg_60(): + return [ + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + ] + +def _seg_61(): + return [ + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + ] + +def _seg_62(): + return [ + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + ] + +def _seg_63(): + return [ + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + ] + +def _seg_64(): + return [ + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + ] + +def _seg_65(): + return [ + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'È·'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + ] + +def _seg_66(): + return [ + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'Ï€'), + (0x1D6B8, 'M', u'Ï'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'Ï„'), + (0x1D6BC, 'M', u'Ï…'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'Ï€'), + (0x1D6D2, 'M', u'Ï'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'Ï„'), + (0x1D6D6, 'M', u'Ï…'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'Ï'), + (0x1D6E1, 'M', u'Ï€'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'Ï€'), + (0x1D6F2, 'M', u'Ï'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'Ï„'), + (0x1D6F6, 'M', u'Ï…'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'Ï€'), + (0x1D70C, 'M', u'Ï'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'Ï„'), + (0x1D710, 'M', u'Ï…'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + ] + +def _seg_67(): + return [ + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'Ï'), + (0x1D71B, 'M', u'Ï€'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'Ï€'), + (0x1D72C, 'M', u'Ï'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + (0x1D72F, 'M', u'Ï„'), + (0x1D730, 'M', u'Ï…'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'Ï€'), + (0x1D746, 'M', u'Ï'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'Ï„'), + (0x1D74A, 'M', u'Ï…'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'Ï'), + (0x1D755, 'M', u'Ï€'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'Ï€'), + (0x1D766, 'M', u'Ï'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'Ï„'), + (0x1D76A, 'M', u'Ï…'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + ] + +def _seg_68(): + return [ + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'Ï€'), + (0x1D780, 'M', u'Ï'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'Ï„'), + (0x1D784, 'M', u'Ï…'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'Ï'), + (0x1D78F, 'M', u'Ï€'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'Ï€'), + (0x1D7A0, 'M', u'Ï'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'Ï„'), + (0x1D7A4, 'M', u'Ï…'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'Ï€'), + (0x1D7BA, 'M', u'Ï'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'Ï„'), + (0x1D7BE, 'M', u'Ï…'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'Ï'), + (0x1D7C9, 'M', u'Ï€'), + (0x1D7CA, 'M', u'Ï'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + ] + +def _seg_69(): + return [ + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', u'𞤢'), + (0x1E901, 'M', u'𞤣'), + (0x1E902, 'M', u'𞤤'), + (0x1E903, 'M', u'𞤥'), + (0x1E904, 'M', u'𞤦'), + (0x1E905, 'M', u'𞤧'), + (0x1E906, 'M', u'𞤨'), + (0x1E907, 'M', u'𞤩'), + (0x1E908, 'M', u'𞤪'), + (0x1E909, 'M', u'𞤫'), + (0x1E90A, 'M', u'𞤬'), + (0x1E90B, 'M', u'𞤭'), + (0x1E90C, 'M', u'𞤮'), + (0x1E90D, 'M', u'𞤯'), + (0x1E90E, 'M', u'𞤰'), + (0x1E90F, 'M', u'𞤱'), + (0x1E910, 'M', u'𞤲'), + (0x1E911, 'M', u'𞤳'), + (0x1E912, 'M', u'𞤴'), + (0x1E913, 'M', u'𞤵'), + (0x1E914, 'M', u'𞤶'), + (0x1E915, 'M', u'𞤷'), + (0x1E916, 'M', u'𞤸'), + (0x1E917, 'M', u'𞤹'), + (0x1E918, 'M', u'𞤺'), + (0x1E919, 'M', u'𞤻'), + (0x1E91A, 'M', u'𞤼'), + (0x1E91B, 'M', u'𞤽'), + (0x1E91C, 'M', u'𞤾'), + (0x1E91D, 'M', u'𞤿'), + (0x1E91E, 'M', u'𞥀'), + (0x1E91F, 'M', u'ðž¥'), + (0x1E920, 'M', u'𞥂'), + (0x1E921, 'M', u'𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + ] + +def _seg_70(): + return [ + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'Ùˆ'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'Ø­'), + (0x1EE08, 'M', u'Ø·'), + (0x1EE09, 'M', u'ÙŠ'), + (0x1EE0A, 'M', u'Ùƒ'), + (0x1EE0B, 'M', u'Ù„'), + (0x1EE0C, 'M', u'Ù…'), + (0x1EE0D, 'M', u'Ù†'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'Ù'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'Ù‚'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'Ø´'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'Ø«'), + (0x1EE17, 'M', u'Ø®'), + (0x1EE18, 'M', u'ذ'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'Ù®'), + (0x1EE1D, 'M', u'Úº'), + (0x1EE1E, 'M', u'Ú¡'), + (0x1EE1F, 'M', u'Ù¯'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', u'Ù‡'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'Ø­'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ÙŠ'), + (0x1EE2A, 'M', u'Ùƒ'), + (0x1EE2B, 'M', u'Ù„'), + (0x1EE2C, 'M', u'Ù…'), + (0x1EE2D, 'M', u'Ù†'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'Ù'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'Ù‚'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'Ø´'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'Ø«'), + (0x1EE37, 'M', u'Ø®'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'Ø­'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ÙŠ'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'Ù„'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'Ù†'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'Ù‚'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'Ø´'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'Ø®'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'Úº'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'Ù¯'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'Ù‡'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'Ø­'), + (0x1EE68, 'M', u'Ø·'), + (0x1EE69, 'M', u'ÙŠ'), + (0x1EE6A, 'M', u'Ùƒ'), + ] + +def _seg_71(): + return [ + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'Ù…'), + (0x1EE6D, 'M', u'Ù†'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'Ù'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'Ù‚'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'Ø´'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'Ø«'), + (0x1EE77, 'M', u'Ø®'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'Ù®'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'Ú¡'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'Ù‡'), + (0x1EE85, 'M', u'Ùˆ'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'Ø­'), + (0x1EE88, 'M', u'Ø·'), + (0x1EE89, 'M', u'ÙŠ'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'Ù„'), + (0x1EE8C, 'M', u'Ù…'), + (0x1EE8D, 'M', u'Ù†'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'Ù'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'Ù‚'), + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'Ø´'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'Ø«'), + (0x1EE97, 'M', u'Ø®'), + (0x1EE98, 'M', u'ذ'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'Ùˆ'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'Ø­'), + (0x1EEA8, 'M', u'Ø·'), + (0x1EEA9, 'M', u'ÙŠ'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'Ù„'), + (0x1EEAC, 'M', u'Ù…'), + (0x1EEAD, 'M', u'Ù†'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'Ù'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'Ù‚'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'Ø´'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'Ø«'), + (0x1EEB7, 'M', u'Ø®'), + (0x1EEB8, 'M', u'ذ'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + ] + +def _seg_72(): + return [ + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'V'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'M', u'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ã»ã‹'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'å­—'), + (0x1F212, 'M', u'åŒ'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'è§£'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'ç„¡'), + (0x1F21B, 'M', u'æ–™'), + (0x1F21C, 'M', u'å‰'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'å†'), + (0x1F21F, 'M', u'æ–°'), + (0x1F220, 'M', u'åˆ'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + ] + +def _seg_73(): + return [ + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'å¹'), + (0x1F226, 'M', u'æ¼”'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'æ•'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'éŠ'), + (0x1F22C, 'M', u'å·¦'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'å³'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'èµ°'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'ç¦'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'åˆ'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'å–¶'), + (0x1F23B, 'M', u'é…'), + (0x1F23C, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔å‹ã€•'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'å¾—'), + (0x1F251, 'M', u'å¯'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6E0, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x1F780, 'V'), + (0x1F7D9, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1F979, 'X'), + (0x1F97A, 'V'), + (0x1F9CC, 'X'), + (0x1F9CD, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA75, 'X'), + (0x1FA78, 'V'), + (0x1FA7B, 'X'), + (0x1FA80, 'V'), + (0x1FA87, 'X'), + (0x1FA90, 'V'), + (0x1FAA9, 'X'), + (0x1FAB0, 'V'), + (0x1FAB7, 'X'), + (0x1FAC0, 'V'), + (0x1FAC3, 'X'), + (0x1FAD0, 'V'), + (0x1FAD7, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', u'0'), + (0x1FBF1, 'M', u'1'), + (0x1FBF2, 'M', u'2'), + (0x1FBF3, 'M', u'3'), + (0x1FBF4, 'M', u'4'), + (0x1FBF5, 'M', u'5'), + (0x1FBF6, 'M', u'6'), + (0x1FBF7, 'M', u'7'), + (0x1FBF8, 'M', u'8'), + (0x1FBF9, 'M', u'9'), + ] + +def _seg_74(): + return [ + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6DE, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'ä¹'), + (0x2F803, 'M', u'ð „¢'), + (0x2F804, 'M', u'ä½ '), + (0x2F805, 'M', u'ä¾®'), + (0x2F806, 'M', u'ä¾»'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'åº'), + (0x2F809, 'M', u'å‚™'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'åƒ'), + (0x2F80C, 'M', u'ã’ž'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'å…'), + (0x2F80F, 'M', u'å…”'), + (0x2F810, 'M', u'å…¤'), + (0x2F811, 'M', u'å…·'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'ã’¹'), + (0x2F814, 'M', u'å…§'), + (0x2F815, 'M', u'å†'), + (0x2F816, 'M', u'ð •‹'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'㓟'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'å‰'), + (0x2F82D, 'M', u'å‘'), + (0x2F82E, 'M', u'åš'), + (0x2F82F, 'M', u'å³'), + (0x2F830, 'M', u'å½'), + (0x2F831, 'M', u'å¿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'ç°'), + (0x2F836, 'M', u'åŠ'), + (0x2F837, 'M', u'åŸ'), + (0x2F838, 'M', u'ð ­£'), + (0x2F839, 'M', u'å«'), + (0x2F83A, 'M', u'å±'), + (0x2F83B, 'M', u'å†'), + (0x2F83C, 'M', u'å’ž'), + (0x2F83D, 'M', u'å¸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'å’¢'), + (0x2F841, 'M', u'å“¶'), + (0x2F842, 'M', u'å”'), + (0x2F843, 'M', u'å•“'), + (0x2F844, 'M', u'å•£'), + (0x2F845, 'M', u'å–„'), + (0x2F847, 'M', u'å–™'), + (0x2F848, 'M', u'å–«'), + (0x2F849, 'M', u'å–³'), + (0x2F84A, 'M', u'å—‚'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'å™´'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'å '), + (0x2F855, 'M', u'åž‹'), + (0x2F856, 'M', u'å ²'), + (0x2F857, 'M', u'å ±'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + ] + +def _seg_75(): + return [ + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'ã›®'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'å°†'), + (0x2F874, 'X'), + (0x2F875, 'M', u'å°¢'), + (0x2F876, 'M', u'ãž'), + (0x2F877, 'M', u'å± '), + (0x2F878, 'M', u'å±®'), + (0x2F879, 'M', u'å³€'), + (0x2F87A, 'M', u'å²'), + (0x2F87B, 'M', u'ð¡·¤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'ð¡·¦'), + (0x2F87E, 'M', u'åµ®'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'åµ¼'), + (0x2F881, 'M', u'å·¡'), + (0x2F882, 'M', u'å·¢'), + (0x2F883, 'M', u'ã ¯'), + (0x2F884, 'M', u'å·½'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'ã¡¢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'èˆ'), + (0x2F894, 'M', u'å¼¢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'å½¢'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'å¿'), + (0x2F89E, 'M', u'å¿—'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'æ‚'), + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'æ‚”'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'æ…ˆ'), + (0x2F8A7, 'M', u'æ…Œ'), + (0x2F8A8, 'M', u'æ…Ž'), + (0x2F8A9, 'M', u'æ…Œ'), + (0x2F8AA, 'M', u'æ…º'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'æˆ'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'æ‰'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'æ‹”'), + (0x2F8B7, 'M', u'æ'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'æ¨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'æ¤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'æ¢'), + (0x2F8C0, 'M', u'æ…'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + ] + +def _seg_76(): + return [ + (0x2F8C3, 'M', u'æ‘©'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'æ’'), + (0x2F8C6, 'M', u'æ‘·'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'æ•'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'æ—£'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'æš‘'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'æšœ'), + (0x2F8D6, 'M', u'è‚­'), + (0x2F8D7, 'M', u'ä™'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'æž'), + (0x2F8DC, 'M', u'æ“'), + (0x2F8DD, 'M', u'ð£ƒ'), + (0x2F8DE, 'M', u'ã­‰'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'æž…'), + (0x2F8E1, 'M', u'æ¡’'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'æ Ÿ'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'ã®'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'æ«›'), + (0x2F8EE, 'M', u'ã°˜'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'æ­”'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'æ­²'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'æ®»'), + (0x2F8F7, 'M', u'ð£ª'), + (0x2F8F8, 'M', u'ð¡´‹'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'æ³'), + (0x2F8FE, 'M', u'æ±§'), + (0x2F8FF, 'M', u'æ´–'), + (0x2F900, 'M', u'æ´¾'), + (0x2F901, 'M', u'æµ·'), + (0x2F902, 'M', u'æµ'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + (0x2F905, 'M', u'æ¶…'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'æ´´'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'æ¹®'), + (0x2F90A, 'M', u'ã´³'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'æ·¹'), + (0x2F90F, 'M', u'æ½®'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'ã¶–'), + (0x2F917, 'M', u'çŠ'), + (0x2F918, 'M', u'ç½'), + (0x2F919, 'M', u'ç·'), + (0x2F91A, 'M', u'ç‚­'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'ç……'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'ç‰'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + ] + +def _seg_77(): + return [ + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'çº'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'瑜'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'ç’…'), + (0x2F932, 'M', u'瓊'), + (0x2F933, 'M', u'ã¼›'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'ç•°'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'ç˜'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'ð¥„'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'ç›´'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'çŠ'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'çž‹'), + (0x2F94B, 'M', u'ä†'), + (0x2F94C, 'M', u'ä‚–'), + (0x2F94D, 'M', u'ð¥'), + (0x2F94E, 'M', u'硎'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'ç¦'), + (0x2F957, 'M', u'ç§«'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'ç©€'), + (0x2F95A, 'M', u'穊'), + (0x2F95B, 'M', u'ç©'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'ç³’'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'ç³£'), + (0x2F96A, 'M', u'ç´€'), + (0x2F96B, 'M', u'𥾆'), + (0x2F96C, 'M', u'çµ£'), + (0x2F96D, 'M', u'äŒ'), + (0x2F96E, 'M', u'ç·‡'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'ç¹…'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'ä™'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'è '), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'è°'), + (0x2F980, 'M', u'ð£Ÿ'), + (0x2F981, 'M', u'ä•'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'ä‹'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'èˆ'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + ] + +def _seg_78(): + return [ + (0x2F98E, 'M', u'ä‘«'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'èŠ'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'è‹¥'), + (0x2F999, 'M', u'èŒ'), + (0x2F99A, 'M', u'è£'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'è§'), + (0x2F99F, 'M', u'è‘—'), + (0x2F9A0, 'M', u'è“'), + (0x2F9A1, 'M', u'èŠ'), + (0x2F9A2, 'M', u'èŒ'), + (0x2F9A3, 'M', u'èœ'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'è”–'), + (0x2F9AB, 'M', u'ð§Š'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'ä•'), + (0x2F9AF, 'M', u'ä•¡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'ä•«'), + (0x2F9B3, 'M', u'è™'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'è™§'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'èš©'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'è¹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'è«'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'è '), + (0x2F9C2, 'M', u'ä—¹'), + (0x2F9C3, 'M', u'è¡ '), + (0x2F9C4, 'M', u'è¡£'), + (0x2F9C5, 'M', u'ð§™§'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'ã’»'), + (0x2F9CB, 'M', u'ð§¢®'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'äš¾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + (0x2F9D0, 'M', u'è«­'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'è³'), + (0x2F9D6, 'M', u'è´›'), + (0x2F9D7, 'M', u'èµ·'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'ð  „'), + (0x2F9DA, 'M', u'è·‹'), + (0x2F9DB, 'M', u'è¶¼'), + (0x2F9DC, 'M', u'è·°'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'è»”'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'é‚”'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'é„‘'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'é„›'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'é‹—'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'é¹'), + (0x2F9EC, 'M', u'é•'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'é–‹'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'é–·'), + (0x2F9F1, 'M', u'𨵷'), + ] + +def _seg_79(): + return [ + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'å¶²'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'ð©……'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'ä©®'), + (0x2F9F9, 'M', u'ä©¶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'ð©Š'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'ð©’–'), + (0x2F9FE, 'M', u'é ‹'), + (0x2FA00, 'M', u'é ©'), + (0x2FA01, 'M', u'ð©–¶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'é§‚'), + (0x2FA07, 'M', u'é§¾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'é±€'), + (0x2FA0C, 'M', u'é³½'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'ä³­'), + (0x2FA0F, 'M', u'éµ§'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'äµ–'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'é¼…'), + (0x2FA1A, 'M', u'é¼'), + (0x2FA1B, 'M', u'é¼–'), + (0x2FA1C, 'M', u'é¼»'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() +) diff --git a/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/LICENSE.txt b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..0f469af --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/LICENSE.txt @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2008-2016, The joblib developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/METADATA new file mode 100644 index 0000000..600ac21 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/METADATA @@ -0,0 +1,198 @@ +Metadata-Version: 2.1 +Name: joblib +Version: 1.0.0 +Summary: Lightweight pipelining with Python functions +Home-page: https://joblib.readthedocs.io +Author: Gael Varoquaux +Author-email: gael.varoquaux@normalesup.org +License: BSD +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Utilities +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst + +|PyPi| |Azure| |Codecov| + +.. |PyPi| image:: https://badge.fury.io/py/joblib.svg + :target: https://badge.fury.io/py/joblib + :alt: Joblib version + +.. |Azure| image:: https://dev.azure.com/joblib/joblib/_apis/build/status/joblib.joblib?branchName=master + :target: https://dev.azure.com/joblib/joblib/_build?definitionId=3&_a=summary&branchFilter=40 + :alt: Codecov coverage + +.. |Codecov| image:: https://codecov.io/gh/joblib/joblib/branch/master/graph/badge.svg + :target: https://codecov.io/gh/joblib/joblib + :alt: Codecov coverage + + +The homepage of joblib with user documentation is located on: + +https://joblib.readthedocs.io + +Getting the latest code +======================= + +To get the latest code using git, simply type:: + + git clone git://github.com/joblib/joblib.git + +If you don't have git installed, you can download a zip or tarball +of the latest code: http://github.com/joblib/joblib/archives/master + +Installing +========== + +You can use `pip` to install joblib:: + + pip install joblib + +from any directory or:: + + python setup.py install + +from the source directory. + +Dependencies +============ + +- Joblib has no mandatory dependencies besides Python (supported versions are + 2.7+ and 3.4+). +- Joblib has an optional dependency on Numpy (at least version 1.6.1) for array + manipulation. +- Joblib includes its own vendored copy of + `loky `_ for process management. +- Joblib can efficiently dump and load numpy arrays but does not require numpy + to be installed. +- Joblib has an optional dependency on + `python-lz4 `_ as a faster alternative to + zlib and gzip for compressed serialization. +- Joblib has an optional dependency on psutil to mitigate memory leaks in + parallel worker processes. +- Some examples require external dependencies such as pandas. See the + instructions in the `Building the docs`_ section for details. + +Workflow to contribute +====================== + +To contribute to joblib, first create an account on `github +`_. Once this is done, fork the `joblib repository +`_ to have your own repository, +clone it using 'git clone' on the computers where you want to work. Make +your changes in your clone, push them to your github account, test them +on several computers, and when you are happy with them, send a pull +request to the main repository. + +Running the test suite +====================== + +To run the test suite, you need the pytest (version >= 3) and coverage modules. +Run the test suite using:: + + pytest joblib + +from the root of the project. + +Building the docs +================= + +To build the docs you need to have sphinx (>=1.4) and some dependencies +installed:: + + pip install -U -r .readthedocs-requirements.txt + +The docs can then be built with the following command:: + + make doc + +The html docs are located in the ``doc/_build/html`` directory. + + +Making a source tarball +======================= + +To create a source tarball, eg for packaging or distributing, run the +following command:: + + python setup.py sdist + +The tarball will be created in the `dist` directory. This command will +compile the docs, and the resulting tarball can be installed with +no extra dependencies than the Python standard library. You will need +setuptool and sphinx. + +Making a release and uploading it to PyPI +========================================= + +This command is only run by project manager, to make a release, and +upload in to PyPI:: + + python setup.py sdist bdist_wheel + twine upload dist/* + + +Note that the documentation should automatically get updated at each git +push. If that is not the case, try building th doc locally and resolve +any doc build error (in particular when running the examples). + +Updating the changelog +====================== + +Changes are listed in the CHANGES.rst file. They must be manually updated +but, the following git command may be used to generate the lines:: + + git log --abbrev-commit --date=short --no-merges --sparse + +Licensing +--------- + +joblib is **BSD-licenced** (3 clause): + + This software is OSI Certified Open Source Software. + OSI Certified is a certification mark of the Open Source Initiative. + + Copyright (c) 2009-2011, joblib developpers + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of Gael Varoquaux. nor the names of other joblib + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + **This software is provided by the copyright holders and contributors + "as is" and any express or implied warranties, including, but not + limited to, the implied warranties of merchantability and fitness for + a particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such + damage.** + + diff --git a/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/RECORD new file mode 100644 index 0000000..989509e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/RECORD @@ -0,0 +1,231 @@ +joblib-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +joblib-1.0.0.dist-info/LICENSE.txt,sha256=-OWIkGu9oHPojgnkwRnCbbbHLJsFncP-e-fgi-_0y60,1527 +joblib-1.0.0.dist-info/METADATA,sha256=hr2xEredlXESKfw8QM2H42aT6FII6WeWc9friofngm8,6821 +joblib-1.0.0.dist-info/RECORD,, +joblib-1.0.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +joblib-1.0.0.dist-info/top_level.txt,sha256=P0LsoZ45gBL7ckL4lqQt7tdbrHD4xlVYhffmhHeeT_U,7 +joblib/__init__.py,sha256=sTGmir5X9GzCall0xc-un43_c2QcqNHVxO1ztlELFjQ,4993 +joblib/__pycache__/__init__.cpython-36.pyc,, +joblib/__pycache__/_dask.cpython-36.pyc,, +joblib/__pycache__/_deprecated_format_stack.cpython-36.pyc,, +joblib/__pycache__/_deprecated_my_exceptions.cpython-36.pyc,, +joblib/__pycache__/_memmapping_reducer.cpython-36.pyc,, +joblib/__pycache__/_multiprocessing_helpers.cpython-36.pyc,, +joblib/__pycache__/_parallel_backends.cpython-36.pyc,, +joblib/__pycache__/_store_backends.cpython-36.pyc,, +joblib/__pycache__/backports.cpython-36.pyc,, +joblib/__pycache__/compressor.cpython-36.pyc,, +joblib/__pycache__/disk.cpython-36.pyc,, +joblib/__pycache__/executor.cpython-36.pyc,, +joblib/__pycache__/format_stack.cpython-36.pyc,, +joblib/__pycache__/func_inspect.cpython-36.pyc,, +joblib/__pycache__/hashing.cpython-36.pyc,, +joblib/__pycache__/logger.cpython-36.pyc,, +joblib/__pycache__/memory.cpython-36.pyc,, +joblib/__pycache__/my_exceptions.cpython-36.pyc,, +joblib/__pycache__/numpy_pickle.cpython-36.pyc,, +joblib/__pycache__/numpy_pickle_compat.cpython-36.pyc,, +joblib/__pycache__/numpy_pickle_utils.cpython-36.pyc,, +joblib/__pycache__/parallel.cpython-36.pyc,, +joblib/__pycache__/pool.cpython-36.pyc,, +joblib/__pycache__/testing.cpython-36.pyc,, +joblib/_dask.py,sha256=tLaIhQoMP3gwXkCj-M0ekiaZwFlm-O52jnps1DADVg4,12552 +joblib/_deprecated_format_stack.py,sha256=_pPLwMH6hydhpPCNxRRTCkbcsKu-SdM0H-cxn5X-EDE,14505 +joblib/_deprecated_my_exceptions.py,sha256=q8QyZCf_sFBM6PUKaebZMTdIIyx8yeBuOfbyxZX7mA0,4134 +joblib/_memmapping_reducer.py,sha256=bRYIohUo-jxUlDz8u8shlcfiIkaktF6oYf3Z4gQQ6tA,28069 +joblib/_multiprocessing_helpers.py,sha256=fyJ3VqIdJZa5uUZTKI5P3aCVRxIspL9sDpu0J3xnclg,1885 +joblib/_parallel_backends.py,sha256=SIPG_0SjajAaGTKIxczIdE_cH7liHMXGi7xSnmaXdEg,23805 +joblib/_store_backends.py,sha256=E0I9SxDl2C9U0zzv5-lVZW3dwK9Ji0TZlSAIdcErsMs,14435 +joblib/backports.py,sha256=sKTyZ1jHFn27mozaxtKACI_8DmvRqpFxPy_cNxiYd5A,2778 +joblib/compressor.py,sha256=NGWbMpH3E0gm6OFUzKswNqqnj_i8Jx4Cht8aTb0esSw,19770 +joblib/disk.py,sha256=cfoYLnIU6NSBxGeeMcU1mkductw_9PuLaGxBRmpdpro,4386 +joblib/executor.py,sha256=wF4pTwot1wRzGLqozbQ8m46SN40RSUFVMLxprqxG2AY,5316 +joblib/externals/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +joblib/externals/__pycache__/__init__.cpython-36.pyc,, +joblib/externals/cloudpickle/__init__.py,sha256=ODsgHCkqRefRS8i3PlPjQ601MxIcAdiF9issSbfGkLo,333 +joblib/externals/cloudpickle/__pycache__/__init__.cpython-36.pyc,, +joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-36.pyc,, +joblib/externals/cloudpickle/__pycache__/cloudpickle_fast.cpython-36.pyc,, +joblib/externals/cloudpickle/__pycache__/compat.cpython-36.pyc,, +joblib/externals/cloudpickle/cloudpickle.py,sha256=Dif7Yv5JpyTF4HpQJuRSUXOG6I8KkjxIUaaM9n4fck4,30284 +joblib/externals/cloudpickle/cloudpickle_fast.py,sha256=Hfzw2IE_s7-7zWsBp4FDwtZhA_Lmgg0LO7V5Mosq44k,30485 +joblib/externals/cloudpickle/compat.py,sha256=FQfviKQTNfK27JY6hXo35_EwkayliBg3o9V_htuF8e4,354 +joblib/externals/loky/__init__.py,sha256=_7Jq6Sz5ffRbs9SqUw2C8Tv5Eb-8I0cDGyYtCP86FLQ,1072 +joblib/externals/loky/__pycache__/__init__.cpython-36.pyc,, +joblib/externals/loky/__pycache__/_base.cpython-36.pyc,, +joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-36.pyc,, +joblib/externals/loky/__pycache__/process_executor.cpython-36.pyc,, +joblib/externals/loky/__pycache__/reusable_executor.cpython-36.pyc,, +joblib/externals/loky/_base.py,sha256=Ze4r2g-HqBaQuBnXydo4FA1zmAVuTbRa_8GFadu7Wls,23423 +joblib/externals/loky/backend/__init__.py,sha256=HIn7kzGoXCowleEzLikOjptBPLDjAXWpVe3DdxiCTWQ,398 +joblib/externals/loky/backend/__pycache__/__init__.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/_posix_wait.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/_win_wait.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/compat.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/compat_posix.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/compat_win32.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/context.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/fork_exec.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/managers.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/popen_loky_win32.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/process.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/queues.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/reduction.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/semlock.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/spawn.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/synchronize.cpython-36.pyc,, +joblib/externals/loky/backend/__pycache__/utils.cpython-36.pyc,, +joblib/externals/loky/backend/_posix_reduction.py,sha256=kzZ00XEIZkCT6YmkArwy2QVgF30mWgkGyktjPxBVLdQ,2223 +joblib/externals/loky/backend/_posix_wait.py,sha256=4GDzBDe1kiHxHPGA9By5Zh2xpvsOf4zK9R5nuBjee3U,3319 +joblib/externals/loky/backend/_win_reduction.py,sha256=Zhqi-2SQsn-mOCiyd8GoTkzhgG-q-gw9VN6intLzk9M,3724 +joblib/externals/loky/backend/_win_wait.py,sha256=TaPjFsCWYhPgtzUZBjb961ShvEeuog5h_nc_bGG--gM,1956 +joblib/externals/loky/backend/compat.py,sha256=-wqR1Z_M-VlANX7htToCBHtWWQ7DFPFaZ3nWcKoGE1Q,995 +joblib/externals/loky/backend/compat_posix.py,sha256=V-0QGfaSWHDv2hgTxMgrhaf6ZyihutTnjd2Xy5FswD0,334 +joblib/externals/loky/backend/compat_win32.py,sha256=V9MsGseX2aib89DChKDfC2PgLrYtbNyATJb3OWKtRn8,1407 +joblib/externals/loky/backend/context.py,sha256=cByeOe4XHvmzC2DreuYkhucpxTy9tMeSOrhKBjt5rhI,13847 +joblib/externals/loky/backend/fork_exec.py,sha256=FkUlRNNVq-eYHsYPD5fHbyMkB_5I1nYz7AV_r6OEzI0,1372 +joblib/externals/loky/backend/managers.py,sha256=3amteDFgQ2Xxqaobv-W-8pYdzDd6NgTtwT8SmluB9Us,1836 +joblib/externals/loky/backend/popen_loky_posix.py,sha256=8Iy-pMFmp-UJ8ZPcYZgPNah1nEVbANkFrNxtNIvkcuw,7113 +joblib/externals/loky/backend/popen_loky_win32.py,sha256=gib6vwolIzndU-ag1hzepADkOuabW_9T-fmVD98ahaM,5720 +joblib/externals/loky/backend/process.py,sha256=3s86s4Ca-QibEN6haOTvBFRip_I5SovXBLAAhwx6WTk,3526 +joblib/externals/loky/backend/queues.py,sha256=bo_by1TiLyeCu4Y3Nei9Ly46D-_yjRP6bD0R8yupd3E,8990 +joblib/externals/loky/backend/reduction.py,sha256=5RgFf1UV51QzSUc1rW6_mOrH72shkIUZcNJYTCU1NeU,9663 +joblib/externals/loky/backend/resource_tracker.py,sha256=eQxKCE087ouWI71WCTngfGsom5F9xjjXBk-cUApHUg4,14821 +joblib/externals/loky/backend/semlock.py,sha256=5d7SXHLyw4AZROLZHwsZ9N7FgrrBLMzPB5YAPDWlu1o,8918 +joblib/externals/loky/backend/spawn.py,sha256=LDazsCB6G_RgX80YWWxDfSsKQCx0V3opeq4FWUjzUJg,9207 +joblib/externals/loky/backend/synchronize.py,sha256=6ayerlMy0nXU3jGooHwus7mY5WVRZoMZ8qbVsAuUkhk,11381 +joblib/externals/loky/backend/utils.py,sha256=GcKkfL1_kk6oDn-YC6a9mW_xyF0Vvt4M-t96iiNB5nY,5691 +joblib/externals/loky/cloudpickle_wrapper.py,sha256=U4vl1aG_W8u0_2blqua5np86wG4-21L5cjup79cD3Ww,3964 +joblib/externals/loky/process_executor.py,sha256=8znUy4vENw_9n4FgKnaM4jCqUFLJrFlhAi8JRUpnPcI,47800 +joblib/externals/loky/reusable_executor.py,sha256=ghk_MNmKhFJ3h23Zd2q17Tza52piw67P9F1ckPn4CbA,10236 +joblib/format_stack.py,sha256=I5WFLlnyYhrYEtZWJXtVhv70B7AwWV_pL4ElvgnIW1o,1045 +joblib/func_inspect.py,sha256=UFvSiZPYaE9h-yA25tmLvTddht9CIAsHq_UF5HSI0ec,12996 +joblib/hashing.py,sha256=PRgtDXkrv5WaX1njDJXrvcJpx53W35XnIpImUeUom4E,10536 +joblib/logger.py,sha256=xjTDhqjpQU8DjIqrL35IiZ1z7HJ-fgbVc-7Ijcii3Eg,5129 +joblib/memory.py,sha256=vQjIo3FmisO8NfyCNPfsNRZcsYCn1ROnwn0FCc7FaDM,40816 +joblib/my_exceptions.py,sha256=-TkSV9Uy6kVSOSm870CQFYHIbIM6CJKlQtrrM-XRDAw,962 +joblib/numpy_pickle.py,sha256=i4-eEmfJTpLYJTLqruEmXpLgN8NZSHmV-WMjbbOOwBI,23355 +joblib/numpy_pickle_compat.py,sha256=_bHo1GUOOVYti2pt2Qw_uBUguNfNK6W9MyVmPYpYHH4,8383 +joblib/numpy_pickle_utils.py,sha256=4KofLHG4PO1kwKeeawEtEAYB2txRDXY_EHx9-FxcM3A,7768 +joblib/parallel.py,sha256=EK-0nVnDd4qP0FO77C1juF87JPY6MI3zfzCRJqYvNXE,46534 +joblib/pool.py,sha256=JuG9uuE3KJwR8gJZIGzwJqObJD0xFuUhQP2IYvgQab0,14334 +joblib/test/__init__.py,sha256=bkIwY5OneyPcRn2VuzQlIFdtW5Cwo1mUJ7IfSztDO9c,73 +joblib/test/__pycache__/__init__.cpython-36.pyc,, +joblib/test/__pycache__/common.cpython-36.pyc,, +joblib/test/__pycache__/test_backports.cpython-36.pyc,, +joblib/test/__pycache__/test_dask.cpython-36.pyc,, +joblib/test/__pycache__/test_deprecated_objects.cpython-36.pyc,, +joblib/test/__pycache__/test_disk.cpython-36.pyc,, +joblib/test/__pycache__/test_format_stack.cpython-36.pyc,, +joblib/test/__pycache__/test_func_inspect.cpython-36.pyc,, +joblib/test/__pycache__/test_func_inspect_special_encoding.cpython-36.pyc,, +joblib/test/__pycache__/test_hashing.cpython-36.pyc,, +joblib/test/__pycache__/test_init.cpython-36.pyc,, +joblib/test/__pycache__/test_logger.cpython-36.pyc,, +joblib/test/__pycache__/test_memmapping.cpython-36.pyc,, +joblib/test/__pycache__/test_memory.cpython-36.pyc,, +joblib/test/__pycache__/test_module.cpython-36.pyc,, +joblib/test/__pycache__/test_my_exceptions.cpython-36.pyc,, +joblib/test/__pycache__/test_numpy_pickle.cpython-36.pyc,, +joblib/test/__pycache__/test_numpy_pickle_compat.cpython-36.pyc,, +joblib/test/__pycache__/test_numpy_pickle_utils.cpython-36.pyc,, +joblib/test/__pycache__/test_parallel.cpython-36.pyc,, +joblib/test/__pycache__/test_store_backends.cpython-36.pyc,, +joblib/test/__pycache__/test_testing.cpython-36.pyc,, +joblib/test/__pycache__/testutils.cpython-36.pyc,, +joblib/test/common.py,sha256=gWDIvGl8Ns6vPTBvItuSFoEWigI0EPXSoPkSvqSM4zM,3283 +joblib/test/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +joblib/test/data/__pycache__/__init__.cpython-36.pyc,, +joblib/test/data/__pycache__/create_numpy_pickle.cpython-36.pyc,, +joblib/test/data/create_numpy_pickle.py,sha256=8cmbXZ3azzwMN_OzwPObt6K10IPmxXQgDAJkFjRiNU4,3458 +joblib/test/data/joblib_0.10.0_compressed_pickle_py27_np16.gz,sha256=QYRH6Q2DSGVorjCSqWCxjTWCMOJKyew4Nl2qmfQVvQ8,769 +joblib/test/data/joblib_0.10.0_compressed_pickle_py27_np17.gz,sha256=ofTozM_KlPJa50TR8FCwc09mMmO6OO0GQhgUBLNIsXs,757 +joblib/test/data/joblib_0.10.0_compressed_pickle_py33_np18.gz,sha256=2eIVeA-XjOaT5IEQ6tI2UuHG3hwhiRciMmkBmPcIh4g,792 +joblib/test/data/joblib_0.10.0_compressed_pickle_py34_np19.gz,sha256=Gr2z_1tVWDH1H3_wCVHmakknf8KqeHKT8Yz4d1vmUCM,794 +joblib/test/data/joblib_0.10.0_compressed_pickle_py35_np19.gz,sha256=pWw_xuDbOkECqu1KGf1OFU7s2VbzC2v5F5iXhE7TwB4,790 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl,sha256=icRQjj374B-AHk5znxre0T9oWUHokoHIBQ8MqKo8l-U,986 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.bz2,sha256=oYQVIyMiUxyRgWSuBBSOvCWKzToA-kUpcoQWdV4UoV4,997 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.gzip,sha256=Jpv3iGcDgKTv-O4nZsUreIbUK7qnt2cugZ-VMgNeEDQ,798 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.lzma,sha256=c0wu0x8pPv4BcStj7pE61rZpf68FLG_pNzQZ4e82zH8,660 +joblib/test/data/joblib_0.10.0_pickle_py27_np17.pkl.xz,sha256=77FG1FDG0GHQav-1bxc4Tn9ky6ubUW_MbE0_iGmz5wc,712 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl,sha256=4GTC7s_cWNVShERn2nvVbspZYJgyK_0man4TEqvdVzU,1068 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.bz2,sha256=6G1vbs_iYmz2kYJ6w4qB1k7D67UnxUMus0S4SWeBtFo,1000 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.gzip,sha256=tlRUWeJS1BXmcwtLNSNK9L0hDHekFl07CqWxTShinmY,831 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.lzma,sha256=CorPwnfv3rR5hjNtJI01-sEBMOnkSxNlRVaWTszMopA,694 +joblib/test/data/joblib_0.10.0_pickle_py33_np18.pkl.xz,sha256=Dppj3MffOKsKETeptEtDaxPOv6MA6xnbpK5LzlDQ-oE,752 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl,sha256=HL5Fb1uR9aPLjjhoOPJ2wwM1Qyo1FCZoYYd2HVw0Fos,1068 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.bz2,sha256=Pyr2fqZnwfUxXdyrBr-kRwBYY8HA_Yi7fgSguKy5pUs,1021 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.gzip,sha256=os8NJjQI9FhnlZM-Ay9dX_Uo35gZnoJCgQSIVvcBPfE,831 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.lzma,sha256=Q_0y43qU7_GqAabJ8y3PWVhOisurnCAq3GzuCu04V58,697 +joblib/test/data/joblib_0.10.0_pickle_py34_np19.pkl.xz,sha256=BNfmiQfpeLVpdfkwlJK4hJ5Cpgl0vreVyekyc5d_PNM,752 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl,sha256=l7nvLolhBDIdPFznOz3lBHiMOPBPCMi1bXop1tFSCpY,1068 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.bz2,sha256=pqGpuIS-ZU4uP8mkglHs8MaSDiVcPy7l3XHYJSppRgY,1005 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.gzip,sha256=YRFXE6LEb6qK72yPqnXdqQVY8Ts8xKUS9PWQKhLxWvk,833 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.lzma,sha256=Bf7gCUeTuTjCkbcIdyZYz69irblX4SAVQEzxCnMQhNU,701 +joblib/test/data/joblib_0.10.0_pickle_py35_np19.pkl.xz,sha256=As8w2LGWwwNmKy3QNdKljK63Yq46gjRf_RJ0lh5_WqA,752 +joblib/test/data/joblib_0.11.0_compressed_pickle_py36_np111.gz,sha256=1WrnXDqDoNEPYOZX1Q5Wr2463b8vVV6fw4Wm5S4bMt4,800 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl,sha256=XmsOFxeC1f1aYdGETclG6yfF9rLoB11DayOAhDMULrw,1068 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.bz2,sha256=vI2yWb50LKL_NgZyd_XkoD5teIg93uI42mWnx9ee-AQ,991 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.gzip,sha256=1WrnXDqDoNEPYOZX1Q5Wr2463b8vVV6fw4Wm5S4bMt4,800 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.lzma,sha256=IWA0JlZG2ur53HgTUDl1m7q79dcVq6b0VOq33gKoJU0,715 +joblib/test/data/joblib_0.11.0_pickle_py36_np111.pkl.xz,sha256=3Xh_NbMZdBjYx7ynfJ3Fyke28izSRSSzzNB0z5D4k9Y,752 +joblib/test/data/joblib_0.8.4_compressed_pickle_py27_np17.gz,sha256=Sp-ZT7i6pj5on2gbptszu7RarzJpOmHJ67UKOmCPQMg,659 +joblib/test/data/joblib_0.9.2_compressed_pickle_py27_np16.gz,sha256=NLtDrvo2XIH0KvUUAvhOqMeoXEjGW0IuTk_osu5XiDw,658 +joblib/test/data/joblib_0.9.2_compressed_pickle_py27_np17.gz,sha256=NLtDrvo2XIH0KvUUAvhOqMeoXEjGW0IuTk_osu5XiDw,658 +joblib/test/data/joblib_0.9.2_compressed_pickle_py34_np19.gz,sha256=nzO9iiGkG3KbBdrF3usOho8higkrDj_lmICUzxZyF_Y,673 +joblib/test/data/joblib_0.9.2_compressed_pickle_py35_np19.gz,sha256=nzO9iiGkG3KbBdrF3usOho8higkrDj_lmICUzxZyF_Y,673 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl,sha256=naijdk2xIeKdIa3mfJw0JlmOdtiN6uRM1yOJg6-M73M,670 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_03.npy,sha256=oMRa4qKJhBy-uiRDt-uqOzHAqencxzKUrKVynaAJJAU,236 +joblib/test/data/joblib_0.9.2_pickle_py27_np16.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl,sha256=LynX8dLOygfxDfFywOgm7wgWOhSxLG7z-oDsU6X83Dw,670 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_03.npy,sha256=oMRa4qKJhBy-uiRDt-uqOzHAqencxzKUrKVynaAJJAU,236 +joblib/test/data/joblib_0.9.2_pickle_py27_np17.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl,sha256=w9TLxpDTzp5TI6cU6lRvMsAasXEChcQgGE9s30sm_CU,691 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307 +joblib/test/data/joblib_0.9.2_pickle_py33_np18.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl,sha256=ilOBAOaulLFvKrD32S1NfnpiK-LfzA9rC3O2I7xROuI,691 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307 +joblib/test/data/joblib_0.9.2_pickle_py34_np19.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl,sha256=WfDVIqKcMzzh1gSAshIfzBoIpdLdZQuG79yYf5kfpOo,691 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_01.npy,sha256=DvvX2c5-7DpuCg20HnleA5bMo9awN9rWxhtGSEPSiAk,120 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_02.npy,sha256=HBzzbLeB-8whuVO7CgtF3wktoOrg52WILlljzNcBBbE,120 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_03.npy,sha256=jt6aZKUrJdfbMJUJVsl47As5MrfRSs1avGMhbmS6vec,307 +joblib/test/data/joblib_0.9.2_pickle_py35_np19.pkl_04.npy,sha256=PsviRClLqT4IR5sWwbmpQR41af9mDtBFncodJBOB3wU,104 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz,sha256=8jYfWJsx0oY2J-3LlmEigK5cClnJSW2J2rfeSTZw-Ts,802 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_01.npy.z,sha256=YT9VvT3sEl2uWlOyvH2CkyE9Sok4od9O3kWtgeuUUqE,43 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_02.npy.z,sha256=txA5RDI0PRuiU_UNKY8pGp-zQgQQ9vaVvMi60hOPaVs,43 +joblib/test/data/joblib_0.9.4.dev0_compressed_cache_size_pickle_py35_np19.gz_03.npy.z,sha256=d3AwICvU2MpSNjh2aPIsdJeGZLlDjANAF1Soa6uM0Po,37 +joblib/test/test_backports.py,sha256=Y9bhGa6H-K_FgLkDyXaSHzpaWk148Rjn8R9IKCKdy-k,1175 +joblib/test/test_dask.py,sha256=lpWgUOjiF2PwPID7ji55bb4XSqZFU9lenKzDpXy_zVo,17042 +joblib/test/test_deprecated_objects.py,sha256=7eTuMu0A-uSoHuBywBFmSS7YUctL03n8AVgyxvznHc4,1249 +joblib/test/test_disk.py,sha256=wJd1o9nLzqEjLqxxkgB9S7-UcKjHPQ8qK5l0czcNp0o,2205 +joblib/test/test_format_stack.py,sha256=wTtjRlp0edNv7_NzxZU6DAVJQoebL-lnGsUEMwVZXpM,4250 +joblib/test/test_func_inspect.py,sha256=KDvLtRziyA_r6GiNEh05_zhlx9ntJW7abSuQ6cqJajU,8935 +joblib/test/test_func_inspect_special_encoding.py,sha256=oHbMTPOK3XI0YVoS0GsouJ-GfM_neP4GOIJC-TKnNgU,146 +joblib/test/test_hashing.py,sha256=QYYiEURI8sksZmy4nfy_VQeJSHCaeeGAOyCzEbDfhkM,16076 +joblib/test/test_init.py,sha256=bgNF-9CIJl1MFNA75LBWOaiNtvduVfuvglz_u9Tt8Uc,422 +joblib/test/test_logger.py,sha256=WjxDzpRmdwj_Uyt2R-S3DFRT9CGTk7G2DWucU3sqbu8,985 +joblib/test/test_memmapping.py,sha256=ADfy92kxC5anaWjCf43swvEUGrJ_4VW5A7_q0pih36E,42154 +joblib/test/test_memory.py,sha256=169E8KmLLtUTdqBmPxOzB4DNRByinqkJXMDTxz_Zgwg,43755 +joblib/test/test_module.py,sha256=MQESEUZv2rq8bEoPpoAf0KkN7bsQSq9b3EKIxvsQ6FM,1832 +joblib/test/test_my_exceptions.py,sha256=4_1xlIwbgEt6_bqaQ8lGyTfUAV7RhLMRRbQKIyYOTA8,2066 +joblib/test/test_numpy_pickle.py,sha256=ruXWIAZeUF9t1ng-lpWhkeV955NjSS841sK71xPHTxk,36786 +joblib/test/test_numpy_pickle_compat.py,sha256=C5OiaFrqmxYD57fr_LpmItd6OOZPeOMfo9RVr6ZZIkk,624 +joblib/test/test_numpy_pickle_utils.py,sha256=PJVVgr-v3so9oAf9LblASRCpt-wXAo19FvsUpw-fZjI,421 +joblib/test/test_parallel.py,sha256=haQ-DxFJWXuXKIYChKUh3XNrES_33J2Vb1mvUdAzo7c,59653 +joblib/test/test_store_backends.py,sha256=fZh0_E5Rj5VTJ_UzH3autHpWwEaWQvWTiQB8felVAN4,1942 +joblib/test/test_testing.py,sha256=I-EkdKHWdHu8m5fo2NnyB0AqR8zAOJ01WKKvyZYRneY,2467 +joblib/test/testutils.py,sha256=6a7zVJm1kg6M-t4CH9tz8A6rMdC6ZY9sU6wBB8C7Zzo,251 +joblib/testing.py,sha256=rCkEMsUeNQocS2Ns-Gmlg4ldanDyDFUODBmgMyuOJ8M,2180 diff --git a/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/WHEEL new file mode 100644 index 0000000..385faab --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..ca4af27 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib-1.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +joblib diff --git a/minor_project/lib/python3.6/site-packages/joblib/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/__init__.py new file mode 100644 index 0000000..5793ed0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/__init__.py @@ -0,0 +1,139 @@ +"""Joblib is a set of tools to provide **lightweight pipelining in +Python**. In particular: + +1. transparent disk-caching of functions and lazy re-evaluation + (memoize pattern) + +2. easy simple parallel computing + +Joblib is optimized to be **fast** and **robust** on large +data in particular and has specific optimizations for `numpy` arrays. It is +**BSD-licensed**. + + + ==================== =============================================== + **Documentation:** https://joblib.readthedocs.io + + **Download:** https://pypi.python.org/pypi/joblib#downloads + + **Source code:** https://github.com/joblib/joblib + + **Report issues:** https://github.com/joblib/joblib/issues + ==================== =============================================== + + +Vision +-------- + +The vision is to provide tools to easily achieve better performance and +reproducibility when working with long running jobs. + + * **Avoid computing the same thing twice**: code is often rerun again and + again, for instance when prototyping computational-heavy jobs (as in + scientific development), but hand-crafted solutions to alleviate this + issue are error-prone and often lead to unreproducible results. + + * **Persist to disk transparently**: efficiently persisting + arbitrary objects containing large data is hard. Using + joblib's caching mechanism avoids hand-written persistence and + implicitly links the file on disk to the execution context of + the original Python object. As a result, joblib's persistence is + good for resuming an application status or computational job, eg + after a crash. + +Joblib addresses these problems while **leaving your code and your flow +control as unmodified as possible** (no framework, no new paradigms). + +Main features +------------------ + +1) **Transparent and fast disk-caching of output value:** a memoize or + make-like functionality for Python functions that works well for + arbitrary Python objects, including very large numpy arrays. Separate + persistence and flow-execution logic from domain logic or algorithmic + code by writing the operations as a set of steps with well-defined + inputs and outputs: Python functions. Joblib can save their + computation to disk and rerun it only if necessary:: + + >>> from joblib import Memory + >>> cachedir = 'your_cache_dir_goes_here' + >>> mem = Memory(cachedir) + >>> import numpy as np + >>> a = np.vander(np.arange(3)).astype(np.float) + >>> square = mem.cache(np.square) + >>> b = square(a) # doctest: +ELLIPSIS + ________________________________________________________________________________ + [Memory] Calling square... + square(array([[0., 0., 1.], + [1., 1., 1.], + [4., 2., 1.]])) + ___________________________________________________________square - 0...s, 0.0min + + >>> c = square(a) + >>> # The above call did not trigger an evaluation + +2) **Embarrassingly parallel helper:** to make it easy to write readable + parallel code and debug it quickly:: + + >>> from joblib import Parallel, delayed + >>> from math import sqrt + >>> Parallel(n_jobs=1)(delayed(sqrt)(i**2) for i in range(10)) + [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] + + +3) **Fast compressed Persistence**: a replacement for pickle to work + efficiently on Python objects containing large data ( + *joblib.dump* & *joblib.load* ). + +.. + >>> import shutil ; shutil.rmtree(cachedir) + +""" + +# PEP0440 compatible formatted version, see: +# https://www.python.org/dev/peps/pep-0440/ +# +# Generic release markers: +# X.Y +# X.Y.Z # For bugfix releases +# +# Admissible pre-release markers: +# X.YaN # Alpha release +# X.YbN # Beta release +# X.YrcN # Release Candidate +# X.Y # Final release +# +# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer. +# 'X.Y.dev0' is the canonical version of 'X.Y.dev' +# +__version__ = '1.0.0' + + +import os +from .memory import Memory, MemorizedResult, register_store_backend +from .logger import PrintTime +from .logger import Logger +from .hashing import hash +from .numpy_pickle import dump +from .numpy_pickle import load +from .compressor import register_compressor +from .parallel import Parallel +from .parallel import delayed +from .parallel import cpu_count +from .parallel import register_parallel_backend +from .parallel import parallel_backend +from .parallel import effective_n_jobs + +from .externals.loky import wrap_non_picklable_objects + + +__all__ = ['Memory', 'MemorizedResult', 'PrintTime', 'Logger', 'hash', 'dump', + 'load', 'Parallel', 'delayed', 'cpu_count', 'effective_n_jobs', + 'register_parallel_backend', 'parallel_backend', + 'register_store_backend', 'register_compressor', + 'wrap_non_picklable_objects'] + + +# Workaround issue discovered in intel-openmp 2019.5: +# https://github.com/ContinuumIO/anaconda-issues/issues/11294 +os.environ.setdefault("KMP_INIT_AT_FORK", "FALSE") diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..acb84f1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_dask.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_dask.cpython-36.pyc new file mode 100644 index 0000000..acaf888 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_dask.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_deprecated_format_stack.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_deprecated_format_stack.cpython-36.pyc new file mode 100644 index 0000000..7e6d58b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_deprecated_format_stack.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_deprecated_my_exceptions.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_deprecated_my_exceptions.cpython-36.pyc new file mode 100644 index 0000000..0cac670 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_deprecated_my_exceptions.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-36.pyc new file mode 100644 index 0000000..6d52b06 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_memmapping_reducer.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-36.pyc new file mode 100644 index 0000000..b1e02bc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_multiprocessing_helpers.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_parallel_backends.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_parallel_backends.cpython-36.pyc new file mode 100644 index 0000000..b5b7ea8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_parallel_backends.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_store_backends.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_store_backends.cpython-36.pyc new file mode 100644 index 0000000..38b2e5f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/_store_backends.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/backports.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/backports.cpython-36.pyc new file mode 100644 index 0000000..fdd9fbc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/backports.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/compressor.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/compressor.cpython-36.pyc new file mode 100644 index 0000000..1cb2727 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/compressor.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/disk.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/disk.cpython-36.pyc new file mode 100644 index 0000000..6cdf9cd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/disk.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/executor.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/executor.cpython-36.pyc new file mode 100644 index 0000000..e879cf7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/executor.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/format_stack.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/format_stack.cpython-36.pyc new file mode 100644 index 0000000..7713269 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/format_stack.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/func_inspect.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/func_inspect.cpython-36.pyc new file mode 100644 index 0000000..33831dc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/func_inspect.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/hashing.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/hashing.cpython-36.pyc new file mode 100644 index 0000000..7c54e00 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/hashing.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/logger.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/logger.cpython-36.pyc new file mode 100644 index 0000000..bbe7c79 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/logger.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/memory.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/memory.cpython-36.pyc new file mode 100644 index 0000000..461bb42 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/memory.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/my_exceptions.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/my_exceptions.cpython-36.pyc new file mode 100644 index 0000000..f2307d9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/my_exceptions.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle.cpython-36.pyc new file mode 100644 index 0000000..1b86cb0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-36.pyc new file mode 100644 index 0000000..f42c491 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle_compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-36.pyc new file mode 100644 index 0000000..64da994 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/numpy_pickle_utils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/parallel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/parallel.cpython-36.pyc new file mode 100644 index 0000000..2243069 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/parallel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/pool.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/pool.cpython-36.pyc new file mode 100644 index 0000000..76c35d6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/pool.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/__pycache__/testing.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/testing.cpython-36.pyc new file mode 100644 index 0000000..90ab46d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/__pycache__/testing.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/_dask.py b/minor_project/lib/python3.6/site-packages/joblib/_dask.py new file mode 100644 index 0000000..0da978e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_dask.py @@ -0,0 +1,357 @@ +from __future__ import print_function, division, absolute_import + +import asyncio +import concurrent.futures +import contextlib + +import time +from uuid import uuid4 +import weakref + +from .parallel import AutoBatchingMixin, ParallelBackendBase, BatchedCalls +from .parallel import parallel_backend + +try: + import distributed +except ImportError: + distributed = None + +if distributed is not None: + from dask.utils import funcname, itemgetter + from dask.sizeof import sizeof + from dask.distributed import ( + Client, + as_completed, + get_client, + secede, + rejoin + ) + from distributed.utils import thread_state + + try: + # asyncio.TimeoutError, Python3-only error thrown by recent versions of + # distributed + from distributed.utils import TimeoutError as _TimeoutError + except ImportError: + from tornado.gen import TimeoutError as _TimeoutError + + +def is_weakrefable(obj): + try: + weakref.ref(obj) + return True + except TypeError: + return False + + +class _WeakKeyDictionary: + """A variant of weakref.WeakKeyDictionary for unhashable objects. + + This datastructure is used to store futures for broadcasted data objects + such as large numpy arrays or pandas dataframes that are not hashable and + therefore cannot be used as keys of traditional python dicts. + + Futhermore using a dict with id(array) as key is not safe because the + Python is likely to reuse id of recently collected arrays. + """ + + def __init__(self): + self._data = {} + + def __getitem__(self, obj): + ref, val = self._data[id(obj)] + if ref() is not obj: + # In case of a race condition with on_destroy. + raise KeyError(obj) + return val + + def __setitem__(self, obj, value): + key = id(obj) + try: + ref, _ = self._data[key] + if ref() is not obj: + # In case of race condition with on_destroy. + raise KeyError(obj) + except KeyError: + # Insert the new entry in the mapping along with a weakref + # callback to automatically delete the entry from the mapping + # as soon as the object used as key is garbage collected. + def on_destroy(_): + del self._data[key] + ref = weakref.ref(obj, on_destroy) + self._data[key] = ref, value + + def __len__(self): + return len(self._data) + + def clear(self): + self._data.clear() + + +def _funcname(x): + try: + if isinstance(x, list): + x = x[0][0] + except Exception: + pass + return funcname(x) + + +def _make_tasks_summary(tasks): + """Summarize of list of (func, args, kwargs) function calls""" + unique_funcs = {func for func, args, kwargs in tasks} + + if len(unique_funcs) == 1: + mixed = False + else: + mixed = True + return len(tasks), mixed, _funcname(tasks) + + +class Batch: + """dask-compatible wrapper that executes a batch of tasks""" + def __init__(self, tasks): + # collect some metadata from the tasks to ease Batch calls + # introspection when debugging + self._num_tasks, self._mixed, self._funcname = _make_tasks_summary( + tasks + ) + + def __call__(self, tasks=None): + results = [] + with parallel_backend('dask'): + for func, args, kwargs in tasks: + results.append(func(*args, **kwargs)) + return results + + def __repr__(self): + descr = f"batch_of_{self._funcname}_{self._num_tasks}_calls" + if self._mixed: + descr = "mixed_" + descr + return descr + + +def _joblib_probe_task(): + # Noop used by the joblib connector to probe when workers are ready. + pass + + +class DaskDistributedBackend(AutoBatchingMixin, ParallelBackendBase): + MIN_IDEAL_BATCH_DURATION = 0.2 + MAX_IDEAL_BATCH_DURATION = 1.0 + supports_timeout = True + + def __init__(self, scheduler_host=None, scatter=None, + client=None, loop=None, wait_for_workers_timeout=10, + **submit_kwargs): + super().__init__() + + if distributed is None: + msg = ("You are trying to use 'dask' as a joblib parallel backend " + "but dask is not installed. Please install dask " + "to fix this error.") + raise ValueError(msg) + + if client is None: + if scheduler_host: + client = Client(scheduler_host, loop=loop, + set_as_default=False) + else: + try: + client = get_client() + except ValueError as e: + msg = ("To use Joblib with Dask first create a Dask Client" + "\n\n" + " from dask.distributed import Client\n" + " client = Client()\n" + "or\n" + " client = Client('scheduler-address:8786')") + raise ValueError(msg) from e + + self.client = client + + if scatter is not None and not isinstance(scatter, (list, tuple)): + raise TypeError("scatter must be a list/tuple, got " + "`%s`" % type(scatter).__name__) + + if scatter is not None and len(scatter) > 0: + # Keep a reference to the scattered data to keep the ids the same + self._scatter = list(scatter) + scattered = self.client.scatter(scatter, broadcast=True) + self.data_futures = {id(x): f for x, f in zip(scatter, scattered)} + else: + self._scatter = [] + self.data_futures = {} + self.wait_for_workers_timeout = wait_for_workers_timeout + self.submit_kwargs = submit_kwargs + self.waiting_futures = as_completed( + [], + loop=client.loop, + with_results=True, + raise_errors=False + ) + self._results = {} + self._callbacks = {} + + async def _collect(self): + while self._continue: + async for future, result in self.waiting_futures: + cf_future = self._results.pop(future) + callback = self._callbacks.pop(future) + if future.status == "error": + typ, exc, tb = result + cf_future.set_exception(exc) + else: + cf_future.set_result(result) + callback(result) + await asyncio.sleep(0.01) + + def __reduce__(self): + return (DaskDistributedBackend, ()) + + def get_nested_backend(self): + return DaskDistributedBackend(client=self.client), -1 + + def configure(self, n_jobs=1, parallel=None, **backend_args): + self.parallel = parallel + return self.effective_n_jobs(n_jobs) + + def start_call(self): + self._continue = True + self.client.loop.add_callback(self._collect) + self.call_data_futures = _WeakKeyDictionary() + + def stop_call(self): + # The explicit call to clear is required to break a cycling reference + # to the futures. + self._continue = False + # wait for the future collection routine (self._backend._collect) to + # finish in order to limit asyncio warnings due to aborting _collect + # during a following backend termination call + time.sleep(0.01) + self.call_data_futures.clear() + + def effective_n_jobs(self, n_jobs): + effective_n_jobs = sum(self.client.ncores().values()) + if effective_n_jobs != 0 or not self.wait_for_workers_timeout: + return effective_n_jobs + + # If there is no worker, schedule a probe task to wait for the workers + # to come up and be available. If the dask cluster is in adaptive mode + # task might cause the cluster to provision some workers. + try: + self.client.submit(_joblib_probe_task).result( + timeout=self.wait_for_workers_timeout) + except _TimeoutError as e: + error_msg = ( + "DaskDistributedBackend has no worker after {} seconds. " + "Make sure that workers are started and can properly connect " + "to the scheduler and increase the joblib/dask connection " + "timeout with:\n\n" + "parallel_backend('dask', wait_for_workers_timeout={})" + ).format(self.wait_for_workers_timeout, + max(10, 2 * self.wait_for_workers_timeout)) + raise TimeoutError(error_msg) from e + return sum(self.client.ncores().values()) + + async def _to_func_args(self, func): + itemgetters = dict() + + # Futures that are dynamically generated during a single call to + # Parallel.__call__. + call_data_futures = getattr(self, 'call_data_futures', None) + + async def maybe_to_futures(args): + out = [] + for arg in args: + arg_id = id(arg) + if arg_id in itemgetters: + out.append(itemgetters[arg_id]) + continue + + f = self.data_futures.get(arg_id, None) + if f is None and call_data_futures is not None: + try: + f = call_data_futures[arg] + except KeyError: + pass + if f is None: + if is_weakrefable(arg) and sizeof(arg) > 1e3: + # Automatically scatter large objects to some of + # the workers to avoid duplicated data transfers. + # Rely on automated inter-worker data stealing if + # more workers need to reuse this data + # concurrently. + # set hash=False - nested scatter calls (i.e + # calling client.scatter inside a dask worker) + # using hash=True often raise CancelledError, + # see dask/distributed#3703 + [f] = await self.client.scatter( + [arg], + asynchronous=True, + hash=False + ) + call_data_futures[arg] = f + + if f is not None: + out.append(f) + else: + out.append(arg) + return out + + tasks = [] + for f, args, kwargs in func.items: + args = list(await maybe_to_futures(args)) + kwargs = dict(zip(kwargs.keys(), + await maybe_to_futures(kwargs.values()))) + tasks.append((f, args, kwargs)) + + return (Batch(tasks), tasks) + + def apply_async(self, func, callback=None): + + cf_future = concurrent.futures.Future() + cf_future.get = cf_future.result # achieve AsyncResult API + + async def f(func, callback): + batch, tasks = await self._to_func_args(func) + key = f'{repr(batch)}-{uuid4().hex}' + + dask_future = self.client.submit( + batch, tasks=tasks, key=key, **self.submit_kwargs + ) + self.waiting_futures.add(dask_future) + self._callbacks[dask_future] = callback + self._results[dask_future] = cf_future + + self.client.loop.add_callback(f, func, callback) + + return cf_future + + def abort_everything(self, ensure_ready=True): + """ Tell the client to cancel any task submitted via this instance + + joblib.Parallel will never access those results + """ + with self.waiting_futures.lock: + self.waiting_futures.futures.clear() + while not self.waiting_futures.queue.empty(): + self.waiting_futures.queue.get() + + @contextlib.contextmanager + def retrieval_context(self): + """Override ParallelBackendBase.retrieval_context to avoid deadlocks. + + This removes thread from the worker's thread pool (using 'secede'). + Seceding avoids deadlock in nested parallelism settings. + """ + # See 'joblib.Parallel.__call__' and 'joblib.Parallel.retrieve' for how + # this is used. + if hasattr(thread_state, 'execution_state'): + # we are in a worker. Secede to avoid deadlock. + secede() + + yield + + if hasattr(thread_state, 'execution_state'): + rejoin() diff --git a/minor_project/lib/python3.6/site-packages/joblib/_deprecated_format_stack.py b/minor_project/lib/python3.6/site-packages/joblib/_deprecated_format_stack.py new file mode 100644 index 0000000..0593b87 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_deprecated_format_stack.py @@ -0,0 +1,397 @@ +""" +Represent an exception with a lot of information. + +Provides 2 useful functions: + +format_exc: format an exception into a complete traceback, with full + debugging instruction. + +format_outer_frames: format the current position in the stack call. + +Adapted from IPython's VerboseTB. +""" +# Authors: Gael Varoquaux < gael dot varoquaux at normalesup dot org > +# Nathaniel Gray +# Fernando Perez +# Copyright: 2010, Gael Varoquaux +# 2001-2004, Fernando Perez +# 2001 Nathaniel Gray +# License: BSD 3 clause +# flake8: noqa + + +import inspect +import keyword +import linecache +import os +import pydoc +import sys +import time +import tokenize +import traceback + +INDENT = ' ' * 8 + + +############################################################################### +# some internal-use functions +def safe_repr(value): + """Hopefully pretty robust repr equivalent.""" + # this is pretty horrible but should always return *something* + try: + return pydoc.text.repr(value) + except KeyboardInterrupt: + raise + except: + try: + return repr(value) + except KeyboardInterrupt: + raise + except: + try: + # all still in an except block so we catch + # getattr raising + name = getattr(value, '__name__', None) + if name: + # ick, recursion + return safe_repr(name) + klass = getattr(value, '__class__', None) + if klass: + return '%s instance' % safe_repr(klass) + except KeyboardInterrupt: + raise + except: + return 'UNRECOVERABLE REPR FAILURE' + + +def eq_repr(value, repr=safe_repr): + return '=%s' % repr(value) + + +############################################################################### +def uniq_stable(elems): + """uniq_stable(elems) -> list + + Return from an iterable, a list of all the unique elements in the input, + but maintaining the order in which they first appear. + + A naive solution to this problem which just makes a dictionary with the + elements as keys fails to respect the stability condition, since + dictionaries are unsorted by nature. + + Note: All elements in the input must be hashable. + """ + unique = [] + unique_set = set() + for nn in elems: + if nn not in unique_set: + unique.append(nn) + unique_set.add(nn) + return unique + + +############################################################################### +def fix_frame_records_filenames(records): + """Try to fix the filenames in each record from inspect.getinnerframes(). + + Particularly, modules loaded from within zip files have useless filenames + attached to their code object, and inspect.getinnerframes() just uses it. + """ + fixed_records = [] + for frame, filename, line_no, func_name, lines, index in records: + # Look inside the frame's globals dictionary for __file__, which should + # be better. + better_fn = frame.f_globals.get('__file__', None) + if isinstance(better_fn, str): + # Check the type just in case someone did something weird with + # __file__. It might also be None if the error occurred during + # import. + filename = better_fn + fixed_records.append((frame, filename, line_no, func_name, lines, + index)) + return fixed_records + + +def _fixed_getframes(etb, context=1, tb_offset=0): + LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 + + records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) + + # If the error is at the console, don't build any context, since it would + # otherwise produce 5 blank lines printed out (there is no file at the + # console) + rec_check = records[tb_offset:] + try: + rname = rec_check[0][1] + if rname == '' or rname.endswith(''): + return rec_check + except IndexError: + pass + + aux = traceback.extract_tb(etb) + assert len(records) == len(aux) + for i, (file, lnum, _, _) in enumerate(aux): + maybe_start = lnum - 1 - context // 2 + start = max(maybe_start, 0) + end = start + context + lines = linecache.getlines(file)[start:end] + buf = list(records[i]) + buf[LNUM_POS] = lnum + buf[INDEX_POS] = lnum - 1 - start + buf[LINES_POS] = lines + records[i] = tuple(buf) + return records[tb_offset:] + + +def _format_traceback_lines(lnum, index, lines, lvals=None): + numbers_width = 7 + res = [] + i = lnum - index + + for line in lines: + if i == lnum: + # This is the line with the error + pad = numbers_width - len(str(i)) + if pad >= 3: + marker = '-' * (pad - 3) + '-> ' + elif pad == 2: + marker = '> ' + elif pad == 1: + marker = '>' + else: + marker = '' + num = marker + str(i) + else: + num = '%*s' % (numbers_width, i) + line = '%s %s' % (num, line) + + res.append(line) + if lvals and i == lnum: + res.append(lvals + '\n') + i = i + 1 + return res + + +def format_records(records): # , print_globals=False): + # Loop over all records printing context and info + frames = [] + abspath = os.path.abspath + for frame, file, lnum, func, lines, index in records: + try: + file = file and abspath(file) or '?' + except OSError: + # if file is '' or something not in the filesystem, + # the abspath call will throw an OSError. Just ignore it and + # keep the original file string. + pass + + if file.endswith('.pyc'): + file = file[:-4] + '.py' + + link = file + + args, varargs, varkw, locals = inspect.getargvalues(frame) + + if func == '?': + call = '' + else: + # Decide whether to include variable details or not + try: + call = 'in %s%s' % (func, inspect.formatargvalues(args, + varargs, varkw, locals, + formatvalue=eq_repr)) + except KeyError: + # Very odd crash from inspect.formatargvalues(). The + # scenario under which it appeared was a call to + # view(array,scale) in NumTut.view.view(), where scale had + # been defined as a scalar (it should be a tuple). Somehow + # inspect messes up resolving the argument list of view() + # and barfs out. At some point I should dig into this one + # and file a bug report about it. + print("\nJoblib's exception reporting continues...\n") + call = 'in %s(***failed resolving arguments***)' % func + + # Initialize a list of names on the current line, which the + # tokenizer below will populate. + names = [] + + def tokeneater(token_type, token, start, end, line): + """Stateful tokeneater which builds dotted names. + + The list of names it appends to (from the enclosing scope) can + contain repeated composite names. This is unavoidable, since + there is no way to disambiguate partial dotted structures until + the full list is known. The caller is responsible for pruning + the final list of duplicates before using it.""" + + # build composite names + if token == '.': + try: + names[-1] += '.' + # store state so the next token is added for x.y.z names + tokeneater.name_cont = True + return + except IndexError: + pass + if token_type == tokenize.NAME and token not in keyword.kwlist: + if tokeneater.name_cont: + # Dotted names + names[-1] += token + tokeneater.name_cont = False + else: + # Regular new names. We append everything, the caller + # will be responsible for pruning the list later. It's + # very tricky to try to prune as we go, b/c composite + # names can fool us. The pruning at the end is easy + # to do (or the caller can print a list with repeated + # names if so desired. + names.append(token) + elif token_type == tokenize.NEWLINE: + raise IndexError + # we need to store a bit of state in the tokenizer to build + # dotted names + tokeneater.name_cont = False + + def linereader(file=file, lnum=[lnum], getline=linecache.getline): + line = getline(file, lnum[0]) + lnum[0] += 1 + return line + + # Build the list of names on this line of code where the exception + # occurred. + try: + # This builds the names list in-place by capturing it from the + # enclosing scope. + for token in tokenize.generate_tokens(linereader): + tokeneater(*token) + except (IndexError, UnicodeDecodeError, SyntaxError): + # signals exit of tokenizer + # SyntaxError can happen when trying to tokenize + # a compiled (e.g. .so or .pyd) extension + pass + except tokenize.TokenError as msg: + _m = ("An unexpected error occurred while tokenizing input file %s\n" + "The following traceback may be corrupted or invalid\n" + "The error message is: %s\n" % (file, msg)) + print(_m) + + # prune names list of duplicates, but keep the right order + unique_names = uniq_stable(names) + + # Start loop over vars + lvals = [] + for name_full in unique_names: + name_base = name_full.split('.', 1)[0] + if name_base in frame.f_code.co_varnames: + if name_base in locals.keys(): + try: + value = safe_repr(eval(name_full, locals)) + except: + value = "undefined" + else: + value = "undefined" + name = name_full + lvals.append('%s = %s' % (name, value)) + #elif print_globals: + # if frame.f_globals.has_key(name_base): + # try: + # value = safe_repr(eval(name_full,frame.f_globals)) + # except: + # value = "undefined" + # else: + # value = "undefined" + # name = 'global %s' % name_full + # lvals.append('%s = %s' % (name,value)) + if lvals: + lvals = '%s%s' % (INDENT, ('\n%s' % INDENT).join(lvals)) + else: + lvals = '' + + level = '%s\n%s %s\n' % (75 * '.', link, call) + + if index is None: + frames.append(level) + else: + frames.append('%s%s' % (level, ''.join( + _format_traceback_lines(lnum, index, lines, lvals)))) + + return frames + + +############################################################################### +def format_exc(etype, evalue, etb, context=5, tb_offset=0): + """ Return a nice text document describing the traceback. + + Parameters + ----------- + etype, evalue, etb: as returned by sys.exc_info + context: number of lines of the source file to plot + tb_offset: the number of stack frame not to use (0 = use all) + + """ + # some locals + try: + etype = etype.__name__ + except AttributeError: + pass + + # Header with the exception type, python version, and date + pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable + date = time.ctime(time.time()) + pid = 'PID: %i' % os.getpid() + + head = '%s%s%s\n%s%s%s' % ( + etype, ' ' * (75 - len(str(etype)) - len(date)), + date, pid, ' ' * (75 - len(str(pid)) - len(pyver)), + pyver) + + # Drop topmost frames if requested + records = _fixed_getframes(etb, context, tb_offset) + + # Get (safely) a string form of the exception info + try: + etype_str, evalue_str = map(str, (etype, evalue)) + except BaseException: + # User exception is improperly defined. + etype, evalue = str, sys.exc_info()[:2] + etype_str, evalue_str = map(str, (etype, evalue)) + # ... and format it + exception = ['%s: %s' % (etype_str, evalue_str)] + frames = format_records(records) + return '%s\n%s\n%s' % (head, '\n'.join(frames), ''.join(exception[0])) + + +############################################################################### +def format_outer_frames(context=5, stack_start=None, stack_end=None, + ignore_ipython=True): + LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 + records = inspect.getouterframes(inspect.currentframe()) + output = list() + + for i, (frame, filename, line_no, func_name, lines, index) \ + in enumerate(records): + # Look inside the frame's globals dictionary for __file__, which should + # be better. + better_fn = frame.f_globals.get('__file__', None) + if isinstance(better_fn, str): + # Check the type just in case someone did something weird with + # __file__. It might also be None if the error occurred during + # import. + filename = better_fn + if filename.endswith('.pyc'): + filename = filename[:-4] + '.py' + if ignore_ipython: + # Hack to avoid printing the internals of IPython + if (os.path.basename(filename) in ('iplib.py', 'py3compat.py') + and func_name in ('execfile', 'safe_execfile', 'runcode')): + break + maybe_start = line_no - 1 - context // 2 + start = max(maybe_start, 0) + end = start + context + lines = linecache.getlines(filename)[start:end] + buf = list(records[i]) + buf[LNUM_POS] = line_no + buf[INDEX_POS] = line_no - 1 - start + buf[LINES_POS] = lines + output.append(tuple(buf)) + return '\n'.join(format_records(output[stack_end:stack_start:-1])) diff --git a/minor_project/lib/python3.6/site-packages/joblib/_deprecated_my_exceptions.py b/minor_project/lib/python3.6/site-packages/joblib/_deprecated_my_exceptions.py new file mode 100644 index 0000000..7b879d2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_deprecated_my_exceptions.py @@ -0,0 +1,115 @@ +""" +Exceptions + +This module is deprecated and will be removed in joblib 0.16. +""" +# Author: Gael Varoquaux < gael dot varoquaux at normalesup dot org > +# Copyright: 2010, Gael Varoquaux +# License: BSD 3 clause + + +class JoblibException(Exception): + """A simple exception with an error message that you can get to.""" + def __init__(self, *args): + # We need to implement __init__ so that it is picked in the + # multiple heritance hierarchy in the class created in + # _mk_exception. Note: in Python 2, if you implement __init__ + # in your exception class you need to set .args correctly, + # otherwise you can dump an exception instance with pickle but + # not load it (at load time an empty .args will be passed to + # the constructor). Also we want to be explicit and not use + # 'super' here. Using 'super' can cause a sibling class method + # to be called and we have no control the sibling class method + # constructor signature in the exception returned by + # _mk_exception. + Exception.__init__(self, *args) + + def __repr__(self): + if hasattr(self, 'args') and len(self.args) > 0: + message = self.args[0] + else: + message = '' + + name = self.__class__.__name__ + return '%s\n%s\n%s\n%s' % (name, 75 * '_', message, 75 * '_') + + __str__ = __repr__ + + +class TransportableException(JoblibException): + """An exception containing all the info to wrap an original + exception and recreate it. + """ + + def __init__(self, message, etype): + # The next line set the .args correctly. This is needed to + # make the exception loadable with pickle + JoblibException.__init__(self, message, etype) + self.message = message + self.etype = etype + + def unwrap(self, context_message=""): + report = """\ +%s +--------------------------------------------------------------------------- +Joblib worker traceback: +--------------------------------------------------------------------------- +%s""" % (context_message, self.message) + # Unwrap the exception to a JoblibException + exception_type = _mk_exception(self.etype)[0] + return exception_type(report) + + +_exception_mapping = dict() + + +def _mk_exception(exception, name=None): + if issubclass(exception, JoblibException): + # No need to wrap recursively JoblibException + return exception, exception.__name__ + + # Create an exception inheriting from both JoblibException + # and that exception + if name is None: + name = exception.__name__ + this_name = 'Joblib%s' % name + if this_name in _exception_mapping: + # Avoid creating twice the same exception + this_exception = _exception_mapping[this_name] + else: + if exception is Exception: + # JoblibException is already a subclass of Exception. No + # need to use multiple inheritance + return JoblibException, this_name + try: + this_exception = type( + this_name, (JoblibException, exception), {}) + _exception_mapping[this_name] = this_exception + except TypeError: + # This happens if "Cannot create a consistent method + # resolution order", e.g. because 'exception' is a + # subclass of JoblibException or 'exception' is not an + # acceptable base class + this_exception = JoblibException + + return this_exception, this_name + + +def _mk_common_exceptions(): + namespace = dict() + import builtins as _builtin_exceptions + common_exceptions = filter( + lambda x: x.endswith('Error'), + dir(_builtin_exceptions)) + + for name in common_exceptions: + obj = getattr(_builtin_exceptions, name) + if isinstance(obj, type) and issubclass(obj, BaseException): + this_obj, this_name = _mk_exception(obj, name=name) + namespace[this_name] = this_obj + return namespace + + +# Updating module locals so that the exceptions pickle right. AFAIK this +# works only at module-creation time +locals().update(_mk_common_exceptions()) diff --git a/minor_project/lib/python3.6/site-packages/joblib/_memmapping_reducer.py b/minor_project/lib/python3.6/site-packages/joblib/_memmapping_reducer.py new file mode 100644 index 0000000..d583822 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_memmapping_reducer.py @@ -0,0 +1,664 @@ +""" +Reducer using memory mapping for numpy arrays +""" +# Author: Thomas Moreau +# Copyright: 2017, Thomas Moreau +# License: BSD 3 clause + +from mmap import mmap +import errno +import os +import stat +import threading +import atexit +import tempfile +import time +import warnings +import weakref +from uuid import uuid4 +from multiprocessing import util + +from pickle import whichmodule, loads, dumps, HIGHEST_PROTOCOL, PicklingError + +try: + WindowsError +except NameError: + WindowsError = type(None) + +try: + import numpy as np + from numpy.lib.stride_tricks import as_strided +except ImportError: + np = None + +from .numpy_pickle import dump, load, load_temporary_memmap +from .backports import make_memmap +from .disk import delete_folder +from .externals.loky.backend import resource_tracker + +# Some system have a ramdisk mounted by default, we can use it instead of /tmp +# as the default folder to dump big arrays to share with subprocesses. +SYSTEM_SHARED_MEM_FS = '/dev/shm' + +# Minimal number of bytes available on SYSTEM_SHARED_MEM_FS to consider using +# it as the default folder to dump big arrays to share with subprocesses. +SYSTEM_SHARED_MEM_FS_MIN_SIZE = int(2e9) + +# Folder and file permissions to chmod temporary files generated by the +# memmapping pool. Only the owner of the Python process can access the +# temporary files and folder. +FOLDER_PERMISSIONS = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR +FILE_PERMISSIONS = stat.S_IRUSR | stat.S_IWUSR + +# Set used in joblib workers, referencing the filenames of temporary memmaps +# created by joblib to speed up data communication. In child processes, we add +# a finalizer to these memmaps that sends a maybe_unlink call to the +# resource_tracker, in order to free main memory as fast as possible. +JOBLIB_MMAPS = set() + + +def _log_and_unlink(filename): + from .externals.loky.backend.resource_tracker import _resource_tracker + util.debug( + "[FINALIZER CALL] object mapping to {} about to be deleted," + " decrementing the refcount of the file (pid: {})".format( + os.path.basename(filename), os.getpid())) + _resource_tracker.maybe_unlink(filename, "file") + + +def add_maybe_unlink_finalizer(memmap): + util.debug( + "[FINALIZER ADD] adding finalizer to {} (id {}, filename {}, pid {})" + "".format(type(memmap), id(memmap), os.path.basename(memmap.filename), + os.getpid())) + weakref.finalize(memmap, _log_and_unlink, memmap.filename) + + +def unlink_file(filename): + """Wrapper around os.unlink with a retry mechanism. + + The retry mechanism has been implemented primarily to overcome a race + condition happening during the finalizer of a np.memmap: when a process + holding the last reference to a mmap-backed np.memmap/np.array is about to + delete this array (and close the reference), it sends a maybe_unlink + request to the resource_tracker. This request can be processed faster than + it takes for the last reference of the memmap to be closed, yielding (on + Windows) a PermissionError in the resource_tracker loop. + """ + NUM_RETRIES = 10 + for retry_no in range(1, NUM_RETRIES + 1): + try: + os.unlink(filename) + break + except PermissionError: + util.debug( + '[ResourceTracker] tried to unlink {}, got ' + 'PermissionError'.format(filename) + ) + if retry_no == NUM_RETRIES: + raise + else: + time.sleep(.2) + + +resource_tracker._CLEANUP_FUNCS['file'] = unlink_file + + +class _WeakArrayKeyMap: + """A variant of weakref.WeakKeyDictionary for unhashable numpy arrays. + + This datastructure will be used with numpy arrays as obj keys, therefore we + do not use the __get__ / __set__ methods to avoid any conflict with the + numpy fancy indexing syntax. + """ + + def __init__(self): + self._data = {} + + def get(self, obj): + ref, val = self._data[id(obj)] + if ref() is not obj: + # In case of race condition with on_destroy: could never be + # triggered by the joblib tests with CPython. + raise KeyError(obj) + return val + + def set(self, obj, value): + key = id(obj) + try: + ref, _ = self._data[key] + if ref() is not obj: + # In case of race condition with on_destroy: could never be + # triggered by the joblib tests with CPython. + raise KeyError(obj) + except KeyError: + # Insert the new entry in the mapping along with a weakref + # callback to automatically delete the entry from the mapping + # as soon as the object used as key is garbage collected. + def on_destroy(_): + del self._data[key] + ref = weakref.ref(obj, on_destroy) + self._data[key] = ref, value + + def __getstate__(self): + raise PicklingError("_WeakArrayKeyMap is not pickleable") + + +############################################################################### +# Support for efficient transient pickling of numpy data structures + + +def _get_backing_memmap(a): + """Recursively look up the original np.memmap instance base if any.""" + b = getattr(a, 'base', None) + if b is None: + # TODO: check scipy sparse datastructure if scipy is installed + # a nor its descendants do not have a memmap base + return None + + elif isinstance(b, mmap): + # a is already a real memmap instance. + return a + + else: + # Recursive exploration of the base ancestry + return _get_backing_memmap(b) + + +def _get_temp_dir(pool_folder_name, temp_folder=None): + """Get the full path to a subfolder inside the temporary folder. + + Parameters + ---------- + pool_folder_name : str + Sub-folder name used for the serialization of a pool instance. + + temp_folder: str, optional + Folder to be used by the pool for memmapping large arrays + for sharing memory with worker processes. If None, this will try in + order: + + - a folder pointed by the JOBLIB_TEMP_FOLDER environment + variable, + - /dev/shm if the folder exists and is writable: this is a + RAMdisk filesystem available by default on modern Linux + distributions, + - the default system temporary folder that can be + overridden with TMP, TMPDIR or TEMP environment + variables, typically /tmp under Unix operating systems. + + Returns + ------- + pool_folder : str + full path to the temporary folder + use_shared_mem : bool + whether the temporary folder is written to the system shared memory + folder or some other temporary folder. + """ + use_shared_mem = False + if temp_folder is None: + temp_folder = os.environ.get('JOBLIB_TEMP_FOLDER', None) + if temp_folder is None: + if os.path.exists(SYSTEM_SHARED_MEM_FS): + try: + shm_stats = os.statvfs(SYSTEM_SHARED_MEM_FS) + available_nbytes = shm_stats.f_bsize * shm_stats.f_bavail + if available_nbytes > SYSTEM_SHARED_MEM_FS_MIN_SIZE: + # Try to see if we have write access to the shared mem + # folder only if it is reasonably large (that is 2GB or + # more). + temp_folder = SYSTEM_SHARED_MEM_FS + pool_folder = os.path.join(temp_folder, pool_folder_name) + if not os.path.exists(pool_folder): + os.makedirs(pool_folder) + use_shared_mem = True + except (IOError, OSError): + # Missing rights in the /dev/shm partition, fallback to regular + # temp folder. + temp_folder = None + if temp_folder is None: + # Fallback to the default tmp folder, typically /tmp + temp_folder = tempfile.gettempdir() + temp_folder = os.path.abspath(os.path.expanduser(temp_folder)) + pool_folder = os.path.join(temp_folder, pool_folder_name) + return pool_folder, use_shared_mem + + +def has_shareable_memory(a): + """Return True if a is backed by some mmap buffer directly or not.""" + return _get_backing_memmap(a) is not None + + +def _strided_from_memmap(filename, dtype, mode, offset, order, shape, strides, + total_buffer_len, unlink_on_gc_collect): + """Reconstruct an array view on a memory mapped file.""" + if mode == 'w+': + # Do not zero the original data when unpickling + mode = 'r+' + + if strides is None: + # Simple, contiguous memmap + return make_memmap( + filename, dtype=dtype, shape=shape, mode=mode, offset=offset, + order=order, unlink_on_gc_collect=unlink_on_gc_collect + ) + else: + # For non-contiguous data, memmap the total enclosing buffer and then + # extract the non-contiguous view with the stride-tricks API + base = make_memmap( + filename, dtype=dtype, shape=total_buffer_len, offset=offset, + mode=mode, order=order, unlink_on_gc_collect=unlink_on_gc_collect + ) + return as_strided(base, shape=shape, strides=strides) + + +def _reduce_memmap_backed(a, m): + """Pickling reduction for memmap backed arrays. + + a is expected to be an instance of np.ndarray (or np.memmap) + m is expected to be an instance of np.memmap on the top of the ``base`` + attribute ancestry of a. ``m.base`` should be the real python mmap object. + """ + # offset that comes from the striding differences between a and m + util.debug('[MEMMAP REDUCE] reducing a memmap-backed array ' + '(shape, {}, pid: {})'.format(a.shape, os.getpid())) + a_start, a_end = np.byte_bounds(a) + m_start = np.byte_bounds(m)[0] + offset = a_start - m_start + + # offset from the backing memmap + offset += m.offset + + if m.flags['F_CONTIGUOUS']: + order = 'F' + else: + # The backing memmap buffer is necessarily contiguous hence C if not + # Fortran + order = 'C' + + if a.flags['F_CONTIGUOUS'] or a.flags['C_CONTIGUOUS']: + # If the array is a contiguous view, no need to pass the strides + strides = None + total_buffer_len = None + else: + # Compute the total number of items to map from which the strided + # view will be extracted. + strides = a.strides + total_buffer_len = (a_end - a_start) // a.itemsize + + return (_strided_from_memmap, + (m.filename, a.dtype, m.mode, offset, order, a.shape, strides, + total_buffer_len, False)) + + +def reduce_array_memmap_backward(a): + """reduce a np.array or a np.memmap from a child process""" + m = _get_backing_memmap(a) + if isinstance(m, np.memmap) and m.filename not in JOBLIB_MMAPS: + # if a is backed by a memmaped file, reconstruct a using the + # memmaped file. + return _reduce_memmap_backed(a, m) + else: + # a is either a regular (not memmap-backed) numpy array, or an array + # backed by a shared temporary file created by joblib. In the latter + # case, in order to limit the lifespan of these temporary files, we + # serialize the memmap as a regular numpy array, and decref the + # file backing the memmap (done implicitly in a previously registered + # finalizer, see ``unlink_on_gc_collect`` for more details) + return ( + loads, (dumps(np.asarray(a), protocol=HIGHEST_PROTOCOL), ) + ) + + +class ArrayMemmapForwardReducer(object): + """Reducer callable to dump large arrays to memmap files. + + Parameters + ---------- + max_nbytes: int + Threshold to trigger memmapping of large arrays to files created + a folder. + temp_folder_resolver: callable + An callable in charge of resolving a temporary folder name where files + for backing memmapped arrays are created. + mmap_mode: 'r', 'r+' or 'c' + Mode for the created memmap datastructure. See the documentation of + numpy.memmap for more details. Note: 'w+' is coerced to 'r+' + automatically to avoid zeroing the data on unpickling. + verbose: int, optional, 0 by default + If verbose > 0, memmap creations are logged. + If verbose > 1, both memmap creations, reuse and array pickling are + logged. + prewarm: bool, optional, False by default. + Force a read on newly memmapped array to make sure that OS pre-cache it + memory. This can be useful to avoid concurrent disk access when the + same data array is passed to different worker processes. + """ + + def __init__(self, max_nbytes, temp_folder_resolver, mmap_mode, + unlink_on_gc_collect, verbose=0, prewarm=True): + self._max_nbytes = max_nbytes + self._temp_folder_resolver = temp_folder_resolver + self._mmap_mode = mmap_mode + self.verbose = int(verbose) + if prewarm == "auto": + self._prewarm = not self._temp_folder.startswith( + SYSTEM_SHARED_MEM_FS + ) + else: + self._prewarm = prewarm + self._prewarm = prewarm + self._memmaped_arrays = _WeakArrayKeyMap() + self._temporary_memmaped_filenames = set() + self._unlink_on_gc_collect = unlink_on_gc_collect + + @property + def _temp_folder(self): + return self._temp_folder_resolver() + + def __reduce__(self): + # The ArrayMemmapForwardReducer is passed to the children processes: it + # needs to be pickled but the _WeakArrayKeyMap need to be skipped as + # it's only guaranteed to be consistent with the parent process memory + # garbage collection. + # Although this reducer is pickled, it is not needed in its destination + # process (child processes), as we only use this reducer to send + # memmaps from the parent process to the children processes. For this + # reason, we can afford skipping the resolver, (which would otherwise + # be unpicklable), and pass it as None instead. + args = (self._max_nbytes, None, self._mmap_mode, + self._unlink_on_gc_collect) + kwargs = { + 'verbose': self.verbose, + 'prewarm': self._prewarm, + } + return ArrayMemmapForwardReducer, args, kwargs + + def __call__(self, a): + m = _get_backing_memmap(a) + if m is not None and isinstance(m, np.memmap): + # a is already backed by a memmap file, let's reuse it directly + return _reduce_memmap_backed(a, m) + + if (not a.dtype.hasobject and self._max_nbytes is not None and + a.nbytes > self._max_nbytes): + # check that the folder exists (lazily create the pool temp folder + # if required) + try: + os.makedirs(self._temp_folder) + os.chmod(self._temp_folder, FOLDER_PERMISSIONS) + except OSError as e: + if e.errno != errno.EEXIST: + raise e + + try: + basename = self._memmaped_arrays.get(a) + except KeyError: + # Generate a new unique random filename. The process and thread + # ids are only useful for debugging purpose and to make it + # easier to cleanup orphaned files in case of hard process + # kill (e.g. by "kill -9" or segfault). + basename = "{}-{}-{}.pkl".format( + os.getpid(), id(threading.current_thread()), uuid4().hex) + self._memmaped_arrays.set(a, basename) + filename = os.path.join(self._temp_folder, basename) + + # In case the same array with the same content is passed several + # times to the pool subprocess children, serialize it only once + + is_new_memmap = filename not in self._temporary_memmaped_filenames + + # add the memmap to the list of temporary memmaps created by joblib + self._temporary_memmaped_filenames.add(filename) + + if self._unlink_on_gc_collect: + # Bump reference count of the memmap by 1 to account for + # shared usage of the memmap by a child process. The + # corresponding decref call will be executed upon calling + # resource_tracker.maybe_unlink, registered as a finalizer in + # the child. + # the incref/decref calls here are only possible when the child + # and the parent share the same resource_tracker. It is not the + # case for the multiprocessing backend, but it does not matter + # because unlinking a memmap from a child process is only + # useful to control the memory usage of long-lasting child + # processes, while the multiprocessing-based pools terminate + # their workers at the end of a map() call. + resource_tracker.register(filename, "file") + + if is_new_memmap: + # Incref each temporary memmap created by joblib one extra + # time. This means that these memmaps will only be deleted + # once an extra maybe_unlink() is called, which is done once + # all the jobs have completed (or been canceled) in the + # Parallel._terminate_backend() method. + resource_tracker.register(filename, "file") + + if not os.path.exists(filename): + util.debug( + "[ARRAY DUMP] Pickling new array (shape={}, dtype={}) " + "creating a new memmap at {}".format( + a.shape, a.dtype, filename)) + for dumped_filename in dump(a, filename): + os.chmod(dumped_filename, FILE_PERMISSIONS) + + if self._prewarm: + # Warm up the data by accessing it. This operation ensures + # that the disk access required to create the memmapping + # file are performed in the reducing process and avoids + # concurrent memmap creation in multiple children + # processes. + load(filename, mmap_mode=self._mmap_mode).max() + + else: + util.debug( + "[ARRAY DUMP] Pickling known array (shape={}, dtype={}) " + "reusing memmap file: {}".format( + a.shape, a.dtype, os.path.basename(filename))) + + # The worker process will use joblib.load to memmap the data + return ( + (load_temporary_memmap, (filename, self._mmap_mode, + self._unlink_on_gc_collect)) + ) + else: + # do not convert a into memmap, let pickler do its usual copy with + # the default system pickler + util.debug( + '[ARRAY DUMP] Pickling array (NO MEMMAPPING) (shape={}, ' + ' dtype={}).'.format(a.shape, a.dtype)) + return (loads, (dumps(a, protocol=HIGHEST_PROTOCOL),)) + + +def get_memmapping_reducers( + forward_reducers=None, backward_reducers=None, + temp_folder_resolver=None, max_nbytes=1e6, mmap_mode='r', verbose=0, + prewarm=False, unlink_on_gc_collect=True, **kwargs): + """Construct a pair of memmapping reducer linked to a tmpdir. + + This function manage the creation and the clean up of the temporary folders + underlying the memory maps and should be use to get the reducers necessary + to construct joblib pool or executor. + """ + if forward_reducers is None: + forward_reducers = dict() + if backward_reducers is None: + backward_reducers = dict() + + if np is not None: + # Register smart numpy.ndarray reducers that detects memmap backed + # arrays and that is also able to dump to memmap large in-memory + # arrays over the max_nbytes threshold + forward_reduce_ndarray = ArrayMemmapForwardReducer( + max_nbytes, temp_folder_resolver, mmap_mode, unlink_on_gc_collect, + verbose, prewarm=prewarm) + forward_reducers[np.ndarray] = forward_reduce_ndarray + forward_reducers[np.memmap] = forward_reduce_ndarray + + # Communication from child process to the parent process always + # pickles in-memory numpy.ndarray without dumping them as memmap + # to avoid confusing the caller and make it tricky to collect the + # temporary folder + backward_reducers[np.ndarray] = reduce_array_memmap_backward + backward_reducers[np.memmap] = reduce_array_memmap_backward + + return forward_reducers, backward_reducers + + +class TemporaryResourcesManager(object): + """Stateful object able to manage temporary folder and pickles + + It exposes: + - a per-context folder name resolving API that memmap-based reducers will + rely on to know where to pickle the temporary memmaps + - a temporary file/folder management API that internally uses the + resource_tracker. + """ + + def __init__(self, temp_folder_root=None, context_id=None): + self._current_temp_folder = None + self._temp_folder_root = temp_folder_root + self._use_shared_mem = None + self._cached_temp_folders = dict() + self._id = uuid4().hex + self._finalizers = {} + if context_id is None: + # It would be safer to not assign a default context id (less silent + # bugs), but doing this while maintaining backward compatibility + # with the previous, context-unaware version get_memmaping_executor + # exposes exposes too many low-level details. + context_id = uuid4().hex + self.set_current_context(context_id) + + def set_current_context(self, context_id): + self._current_context_id = context_id + self.register_new_context(context_id) + + def register_new_context(self, context_id): + # Prepare a sub-folder name specific to a context (usually a unique id + # generated by each instance of the Parallel class). Do not create in + # advance to spare FS write access if no array is to be dumped). + if context_id in self._cached_temp_folders: + return + else: + # During its lifecycle, one Parallel object can have several + # executors associated to it (for instance, if a loky worker raises + # an exception, joblib shutdowns the executor and instantly + # recreates a new one before raising the error - see + # ``ensure_ready``. Because we don't want two executors tied to + # the same Parallel object (and thus the same context id) to + # register/use/delete the same folder, we also add an id specific + # to the current Manager (and thus specific to its associated + # executor) to the folder name. + new_folder_name = ( + "joblib_memmapping_folder_{}_{}_{}".format( + os.getpid(), self._id, context_id) + ) + new_folder_path, _ = _get_temp_dir( + new_folder_name, self._temp_folder_root + ) + self.register_folder_finalizer(new_folder_path, context_id) + self._cached_temp_folders[context_id] = new_folder_path + + def resolve_temp_folder_name(self): + """Return a folder name specific to the currently activated context""" + return self._cached_temp_folders[self._current_context_id] + + def _unregister_context(self, context_id=None): + if context_id is None: + for context_id in list(self._cached_temp_folders): + self._unregister_context(context_id) + else: + temp_folder = self._cached_temp_folders[context_id] + finalizer = self._finalizers[context_id] + + resource_tracker.unregister(temp_folder, "folder") + atexit.unregister(finalizer) + + self._cached_temp_folders.pop(context_id) + self._finalizers.pop(context_id) + + # resource management API + + def register_folder_finalizer(self, pool_subfolder, context_id): + # Register the garbage collector at program exit in case caller forgets + # to call terminate explicitly: note we do not pass any reference to + # ensure that this callback won't prevent garbage collection of + # parallel instance and related file handler resources such as POSIX + # semaphores and pipes + pool_module_name = whichmodule(delete_folder, 'delete_folder') + resource_tracker.register(pool_subfolder, "folder") + + def _cleanup(): + # In some cases the Python runtime seems to set delete_folder to + # None just before exiting when accessing the delete_folder + # function from the closure namespace. So instead we reimport + # the delete_folder function explicitly. + # https://github.com/joblib/joblib/issues/328 + # We cannot just use from 'joblib.pool import delete_folder' + # because joblib should only use relative imports to allow + # easy vendoring. + delete_folder = __import__( + pool_module_name, fromlist=['delete_folder']).delete_folder + try: + delete_folder(pool_subfolder, allow_non_empty=True) + resource_tracker.unregister(pool_subfolder, "folder") + except OSError: + warnings.warn("Failed to delete temporary folder: {}" + .format(pool_subfolder)) + + self._finalizers[context_id] = atexit.register(_cleanup) + + def _unlink_temporary_resources(self, context_id=None): + """Unlink temporary resources created by a process-based pool""" + if context_id is None: + # iterate over a copy of the cache keys because + # unlink_temporary_resources further deletes an entry in this + # cache + for context_id in self._cached_temp_folders.copy(): + self._unlink_temporary_resources(context_id) + else: + temp_folder = self._cached_temp_folders[context_id] + if os.path.exists(temp_folder): + for filename in os.listdir(temp_folder): + resource_tracker.maybe_unlink( + os.path.join(temp_folder, filename), "file" + ) + self._try_delete_folder( + allow_non_empty=False, context_id=context_id + ) + + def _unregister_temporary_resources(self, context_id=None): + """Unregister temporary resources created by a process-based pool""" + if context_id is None: + for context_id in self._cached_temp_folders: + self._unregister_temporary_resources(context_id) + else: + temp_folder = self._cached_temp_folders[context_id] + if os.path.exists(temp_folder): + for filename in os.listdir(temp_folder): + resource_tracker.unregister( + os.path.join(temp_folder, filename), "file" + ) + + def _try_delete_folder(self, allow_non_empty, context_id=None): + if context_id is None: + # ditto + for context_id in self._cached_temp_folders.copy(): + self._try_delete_folder( + allow_non_empty=allow_non_empty, context_id=context_id + ) + else: + temp_folder = self._cached_temp_folders[context_id] + try: + delete_folder( + temp_folder, allow_non_empty=allow_non_empty + ) + # Now that this folder is deleted, we can forget about it + self._unregister_context(context_id) + + except OSError: + # Temporary folder cannot be deleted right now. No need to + # handle it though, as this folder will be cleaned up by an + # atexit finalizer registered by the memmapping_reducer. + pass diff --git a/minor_project/lib/python3.6/site-packages/joblib/_multiprocessing_helpers.py b/minor_project/lib/python3.6/site-packages/joblib/_multiprocessing_helpers.py new file mode 100644 index 0000000..1c5de2f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_multiprocessing_helpers.py @@ -0,0 +1,52 @@ +"""Helper module to factorize the conditional multiprocessing import logic + +We use a distinct module to simplify import statements and avoid introducing +circular dependencies (for instance for the assert_spawning name). +""" +import os +import warnings + + +# Obtain possible configuration from the environment, assuming 1 (on) +# by default, upon 0 set to None. Should instructively fail if some non +# 0/1 value is set. +mp = int(os.environ.get('JOBLIB_MULTIPROCESSING', 1)) or None +if mp: + try: + import multiprocessing as mp + except ImportError: + mp = None + +# 2nd stage: validate that locking is available on the system and +# issue a warning if not +if mp is not None: + try: + # try to create a named semaphore using SemLock to make sure they are + # available on this platform. We use the low level object + # _multiprocessing.SemLock to avoid spawning a resource tracker on + # Unix system or changing the default backend. + import tempfile + from _multiprocessing import SemLock + + _rand = tempfile._RandomNameSequence() + for i in range(100): + try: + name = '/joblib-{}-{}' .format( + os.getpid(), next(_rand)) + _sem = SemLock(0, 0, 1, name=name, unlink=True) + del _sem # cleanup + break + except FileExistsError as e: # pragma: no cover + if i >= 99: + raise FileExistsError( + 'cannot find name for semaphore') from e + except (FileExistsError, AttributeError, ImportError, OSError) as e: + mp = None + warnings.warn('%s. joblib will operate in serial mode' % (e,)) + + +# 3rd stage: backward compat for the assert_spawning helper +if mp is not None: + from multiprocessing.context import assert_spawning +else: + assert_spawning = None diff --git a/minor_project/lib/python3.6/site-packages/joblib/_parallel_backends.py b/minor_project/lib/python3.6/site-packages/joblib/_parallel_backends.py new file mode 100644 index 0000000..4264528 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_parallel_backends.py @@ -0,0 +1,610 @@ +""" +Backends for embarrassingly parallel code. +""" + +import gc +import os +import warnings +import threading +import functools +import contextlib +from abc import ABCMeta, abstractmethod + +from .my_exceptions import WorkerInterrupt +from ._multiprocessing_helpers import mp + +if mp is not None: + from .pool import MemmappingPool + from multiprocessing.pool import ThreadPool + from .executor import get_memmapping_executor + + # Compat between concurrent.futures and multiprocessing TimeoutError + from multiprocessing import TimeoutError + from concurrent.futures._base import TimeoutError as CfTimeoutError + from .externals.loky import process_executor, cpu_count + + +class ParallelBackendBase(metaclass=ABCMeta): + """Helper abc which defines all methods a ParallelBackend must implement""" + + supports_timeout = False + supports_inner_max_num_threads = False + nesting_level = None + + def __init__(self, nesting_level=None, inner_max_num_threads=None, + **kwargs): + super().__init__(**kwargs) + self.nesting_level = nesting_level + self.inner_max_num_threads = inner_max_num_threads + + MAX_NUM_THREADS_VARS = [ + 'OMP_NUM_THREADS', 'OPENBLAS_NUM_THREADS', 'MKL_NUM_THREADS', + 'BLIS_NUM_THREADS', 'VECLIB_MAXIMUM_THREADS', 'NUMBA_NUM_THREADS', + 'NUMEXPR_NUM_THREADS', + ] + + TBB_ENABLE_IPC_VAR = "ENABLE_IPC" + + @abstractmethod + def effective_n_jobs(self, n_jobs): + """Determine the number of jobs that can actually run in parallel + + n_jobs is the number of workers requested by the callers. Passing + n_jobs=-1 means requesting all available workers for instance matching + the number of CPU cores on the worker host(s). + + This method should return a guesstimate of the number of workers that + can actually perform work concurrently. The primary use case is to make + it possible for the caller to know in how many chunks to slice the + work. + + In general working on larger data chunks is more efficient (less + scheduling overhead and better use of CPU cache prefetching heuristics) + as long as all the workers have enough work to do. + """ + + @abstractmethod + def apply_async(self, func, callback=None): + """Schedule a func to be run""" + + def configure(self, n_jobs=1, parallel=None, prefer=None, require=None, + **backend_args): + """Reconfigure the backend and return the number of workers. + + This makes it possible to reuse an existing backend instance for + successive independent calls to Parallel with different parameters. + """ + self.parallel = parallel + return self.effective_n_jobs(n_jobs) + + def start_call(self): + """Call-back method called at the beginning of a Parallel call""" + + def stop_call(self): + """Call-back method called at the end of a Parallel call""" + + def terminate(self): + """Shutdown the workers and free the shared memory.""" + + def compute_batch_size(self): + """Determine the optimal batch size""" + return 1 + + def batch_completed(self, batch_size, duration): + """Callback indicate how long it took to run a batch""" + + def get_exceptions(self): + """List of exception types to be captured.""" + return [] + + def abort_everything(self, ensure_ready=True): + """Abort any running tasks + + This is called when an exception has been raised when executing a tasks + and all the remaining tasks will be ignored and can therefore be + aborted to spare computation resources. + + If ensure_ready is True, the backend should be left in an operating + state as future tasks might be re-submitted via that same backend + instance. + + If ensure_ready is False, the implementer of this method can decide + to leave the backend in a closed / terminated state as no new task + are expected to be submitted to this backend. + + Setting ensure_ready to False is an optimization that can be leveraged + when aborting tasks via killing processes from a local process pool + managed by the backend it-self: if we expect no new tasks, there is no + point in re-creating new workers. + """ + # Does nothing by default: to be overridden in subclasses when + # canceling tasks is possible. + pass + + def get_nested_backend(self): + """Backend instance to be used by nested Parallel calls. + + By default a thread-based backend is used for the first level of + nesting. Beyond, switch to sequential backend to avoid spawning too + many threads on the host. + """ + nesting_level = getattr(self, 'nesting_level', 0) + 1 + if nesting_level > 1: + return SequentialBackend(nesting_level=nesting_level), None + else: + return ThreadingBackend(nesting_level=nesting_level), None + + @contextlib.contextmanager + def retrieval_context(self): + """Context manager to manage an execution context. + + Calls to Parallel.retrieve will be made inside this context. + + By default, this does nothing. It may be useful for subclasses to + handle nested parallelism. In particular, it may be required to avoid + deadlocks if a backend manages a fixed number of workers, when those + workers may be asked to do nested Parallel calls. Without + 'retrieval_context' this could lead to deadlock, as all the workers + managed by the backend may be "busy" waiting for the nested parallel + calls to finish, but the backend has no free workers to execute those + tasks. + """ + yield + + def _prepare_worker_env(self, n_jobs): + """Return environment variables limiting threadpools in external libs. + + This function return a dict containing environment variables to pass + when creating a pool of process. These environment variables limit the + number of threads to `n_threads` for OpenMP, MKL, Accelerated and + OpenBLAS libraries in the child processes. + """ + explicit_n_threads = self.inner_max_num_threads + default_n_threads = str(max(cpu_count() // n_jobs, 1)) + + # Set the inner environment variables to self.inner_max_num_threads if + # it is given. Else, default to cpu_count // n_jobs unless the variable + # is already present in the parent process environment. + env = {} + for var in self.MAX_NUM_THREADS_VARS: + if explicit_n_threads is None: + var_value = os.environ.get(var, None) + if var_value is None: + var_value = default_n_threads + else: + var_value = str(explicit_n_threads) + + env[var] = var_value + + if self.TBB_ENABLE_IPC_VAR not in os.environ: + # To avoid over-subscription when using TBB, let the TBB schedulers + # use Inter Process Communication to coordinate: + env[self.TBB_ENABLE_IPC_VAR] = "1" + return env + + @staticmethod + def in_main_thread(): + return isinstance(threading.current_thread(), threading._MainThread) + + +class SequentialBackend(ParallelBackendBase): + """A ParallelBackend which will execute all batches sequentially. + + Does not use/create any threading objects, and hence has minimal + overhead. Used when n_jobs == 1. + """ + + uses_threads = True + supports_sharedmem = True + + def effective_n_jobs(self, n_jobs): + """Determine the number of jobs which are going to run in parallel""" + if n_jobs == 0: + raise ValueError('n_jobs == 0 in Parallel has no meaning') + return 1 + + def apply_async(self, func, callback=None): + """Schedule a func to be run""" + result = ImmediateResult(func) + if callback: + callback(result) + return result + + def get_nested_backend(self): + # import is not top level to avoid cyclic import errors. + from .parallel import get_active_backend + + # SequentialBackend should neither change the nesting level, the + # default backend or the number of jobs. Just return the current one. + return get_active_backend() + + +class PoolManagerMixin(object): + """A helper class for managing pool of workers.""" + + _pool = None + + def effective_n_jobs(self, n_jobs): + """Determine the number of jobs which are going to run in parallel""" + if n_jobs == 0: + raise ValueError('n_jobs == 0 in Parallel has no meaning') + elif mp is None or n_jobs is None: + # multiprocessing is not available or disabled, fallback + # to sequential mode + return 1 + elif n_jobs < 0: + n_jobs = max(cpu_count() + 1 + n_jobs, 1) + return n_jobs + + def terminate(self): + """Shutdown the process or thread pool""" + if self._pool is not None: + self._pool.close() + self._pool.terminate() # terminate does a join() + self._pool = None + + def _get_pool(self): + """Used by apply_async to make it possible to implement lazy init""" + return self._pool + + def apply_async(self, func, callback=None): + """Schedule a func to be run""" + return self._get_pool().apply_async( + SafeFunction(func), callback=callback) + + def abort_everything(self, ensure_ready=True): + """Shutdown the pool and restart a new one with the same parameters""" + self.terminate() + if ensure_ready: + self.configure(n_jobs=self.parallel.n_jobs, parallel=self.parallel, + **self.parallel._backend_args) + + +class AutoBatchingMixin(object): + """A helper class for automagically batching jobs.""" + + # In seconds, should be big enough to hide multiprocessing dispatching + # overhead. + # This settings was found by running benchmarks/bench_auto_batching.py + # with various parameters on various platforms. + MIN_IDEAL_BATCH_DURATION = .2 + + # Should not be too high to avoid stragglers: long jobs running alone + # on a single worker while other workers have no work to process any more. + MAX_IDEAL_BATCH_DURATION = 2 + + # Batching counters default values + _DEFAULT_EFFECTIVE_BATCH_SIZE = 1 + _DEFAULT_SMOOTHED_BATCH_DURATION = 0.0 + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._effective_batch_size = self._DEFAULT_EFFECTIVE_BATCH_SIZE + self._smoothed_batch_duration = self._DEFAULT_SMOOTHED_BATCH_DURATION + + def compute_batch_size(self): + """Determine the optimal batch size""" + old_batch_size = self._effective_batch_size + batch_duration = self._smoothed_batch_duration + if (batch_duration > 0 and + batch_duration < self.MIN_IDEAL_BATCH_DURATION): + # The current batch size is too small: the duration of the + # processing of a batch of task is not large enough to hide + # the scheduling overhead. + ideal_batch_size = int(old_batch_size * + self.MIN_IDEAL_BATCH_DURATION / + batch_duration) + # Multiply by two to limit oscilations between min and max. + ideal_batch_size *= 2 + + # dont increase the batch size too fast to limit huge batch sizes + # potentially leading to starving worker + batch_size = min(2 * old_batch_size, ideal_batch_size) + + batch_size = max(batch_size, 1) + + self._effective_batch_size = batch_size + if self.parallel.verbose >= 10: + self.parallel._print( + "Batch computation too fast (%.4fs.) " + "Setting batch_size=%d.", (batch_duration, batch_size)) + elif (batch_duration > self.MAX_IDEAL_BATCH_DURATION and + old_batch_size >= 2): + # The current batch size is too big. If we schedule overly long + # running batches some CPUs might wait with nothing left to do + # while a couple of CPUs a left processing a few long running + # batches. Better reduce the batch size a bit to limit the + # likelihood of scheduling such stragglers. + + # decrease the batch size quickly to limit potential starving + ideal_batch_size = int( + old_batch_size * self.MIN_IDEAL_BATCH_DURATION / batch_duration + ) + # Multiply by two to limit oscilations between min and max. + batch_size = max(2 * ideal_batch_size, 1) + self._effective_batch_size = batch_size + if self.parallel.verbose >= 10: + self.parallel._print( + "Batch computation too slow (%.4fs.) " + "Setting batch_size=%d.", (batch_duration, batch_size)) + else: + # No batch size adjustment + batch_size = old_batch_size + + if batch_size != old_batch_size: + # Reset estimation of the smoothed mean batch duration: this + # estimate is updated in the multiprocessing apply_async + # CallBack as long as the batch_size is constant. Therefore + # we need to reset the estimate whenever we re-tune the batch + # size. + self._smoothed_batch_duration = \ + self._DEFAULT_SMOOTHED_BATCH_DURATION + + return batch_size + + def batch_completed(self, batch_size, duration): + """Callback indicate how long it took to run a batch""" + if batch_size == self._effective_batch_size: + # Update the smoothed streaming estimate of the duration of a batch + # from dispatch to completion + old_duration = self._smoothed_batch_duration + if old_duration == self._DEFAULT_SMOOTHED_BATCH_DURATION: + # First record of duration for this batch size after the last + # reset. + new_duration = duration + else: + # Update the exponentially weighted average of the duration of + # batch for the current effective size. + new_duration = 0.8 * old_duration + 0.2 * duration + self._smoothed_batch_duration = new_duration + + def reset_batch_stats(self): + """Reset batch statistics to default values. + + This avoids interferences with future jobs. + """ + self._effective_batch_size = self._DEFAULT_EFFECTIVE_BATCH_SIZE + self._smoothed_batch_duration = self._DEFAULT_SMOOTHED_BATCH_DURATION + + +class ThreadingBackend(PoolManagerMixin, ParallelBackendBase): + """A ParallelBackend which will use a thread pool to execute batches in. + + This is a low-overhead backend but it suffers from the Python Global + Interpreter Lock if the called function relies a lot on Python objects. + Mostly useful when the execution bottleneck is a compiled extension that + explicitly releases the GIL (for instance a Cython loop wrapped in a "with + nogil" block or an expensive call to a library such as NumPy). + + The actual thread pool is lazily initialized: the actual thread pool + construction is delayed to the first call to apply_async. + + ThreadingBackend is used as the default backend for nested calls. + """ + + supports_timeout = True + uses_threads = True + supports_sharedmem = True + + def configure(self, n_jobs=1, parallel=None, **backend_args): + """Build a process or thread pool and return the number of workers""" + n_jobs = self.effective_n_jobs(n_jobs) + if n_jobs == 1: + # Avoid unnecessary overhead and use sequential backend instead. + raise FallbackToBackend( + SequentialBackend(nesting_level=self.nesting_level)) + self.parallel = parallel + self._n_jobs = n_jobs + return n_jobs + + def _get_pool(self): + """Lazily initialize the thread pool + + The actual pool of worker threads is only initialized at the first + call to apply_async. + """ + if self._pool is None: + self._pool = ThreadPool(self._n_jobs) + return self._pool + + +class MultiprocessingBackend(PoolManagerMixin, AutoBatchingMixin, + ParallelBackendBase): + """A ParallelBackend which will use a multiprocessing.Pool. + + Will introduce some communication and memory overhead when exchanging + input and output data with the with the worker Python processes. + However, does not suffer from the Python Global Interpreter Lock. + """ + + supports_timeout = True + + def effective_n_jobs(self, n_jobs): + """Determine the number of jobs which are going to run in parallel. + + This also checks if we are attempting to create a nested parallel + loop. + """ + if mp is None: + return 1 + + if mp.current_process().daemon: + # Daemonic processes cannot have children + if n_jobs != 1: + warnings.warn( + 'Multiprocessing-backed parallel loops cannot be nested,' + ' setting n_jobs=1', + stacklevel=3) + return 1 + + if process_executor._CURRENT_DEPTH > 0: + # Mixing loky and multiprocessing in nested loop is not supported + if n_jobs != 1: + warnings.warn( + 'Multiprocessing-backed parallel loops cannot be nested,' + ' below loky, setting n_jobs=1', + stacklevel=3) + return 1 + + elif not (self.in_main_thread() or self.nesting_level == 0): + # Prevent posix fork inside in non-main posix threads + if n_jobs != 1: + warnings.warn( + 'Multiprocessing-backed parallel loops cannot be nested' + ' below threads, setting n_jobs=1', + stacklevel=3) + return 1 + + return super(MultiprocessingBackend, self).effective_n_jobs(n_jobs) + + def configure(self, n_jobs=1, parallel=None, prefer=None, require=None, + **memmappingpool_args): + """Build a process or thread pool and return the number of workers""" + n_jobs = self.effective_n_jobs(n_jobs) + if n_jobs == 1: + raise FallbackToBackend( + SequentialBackend(nesting_level=self.nesting_level)) + + # Make sure to free as much memory as possible before forking + gc.collect() + self._pool = MemmappingPool(n_jobs, **memmappingpool_args) + self.parallel = parallel + return n_jobs + + def terminate(self): + """Shutdown the process or thread pool""" + super(MultiprocessingBackend, self).terminate() + self.reset_batch_stats() + + +class LokyBackend(AutoBatchingMixin, ParallelBackendBase): + """Managing pool of workers with loky instead of multiprocessing.""" + + supports_timeout = True + supports_inner_max_num_threads = True + + def configure(self, n_jobs=1, parallel=None, prefer=None, require=None, + idle_worker_timeout=300, **memmappingexecutor_args): + """Build a process executor and return the number of workers""" + n_jobs = self.effective_n_jobs(n_jobs) + if n_jobs == 1: + raise FallbackToBackend( + SequentialBackend(nesting_level=self.nesting_level)) + + self._workers = get_memmapping_executor( + n_jobs, timeout=idle_worker_timeout, + env=self._prepare_worker_env(n_jobs=n_jobs), + context_id=parallel._id, **memmappingexecutor_args) + self.parallel = parallel + return n_jobs + + def effective_n_jobs(self, n_jobs): + """Determine the number of jobs which are going to run in parallel""" + if n_jobs == 0: + raise ValueError('n_jobs == 0 in Parallel has no meaning') + elif mp is None or n_jobs is None: + # multiprocessing is not available or disabled, fallback + # to sequential mode + return 1 + elif mp.current_process().daemon: + # Daemonic processes cannot have children + if n_jobs != 1: + warnings.warn( + 'Loky-backed parallel loops cannot be called in a' + ' multiprocessing, setting n_jobs=1', + stacklevel=3) + return 1 + elif not (self.in_main_thread() or self.nesting_level == 0): + # Prevent posix fork inside in non-main posix threads + if n_jobs != 1: + warnings.warn( + 'Loky-backed parallel loops cannot be nested below ' + 'threads, setting n_jobs=1', + stacklevel=3) + return 1 + elif n_jobs < 0: + n_jobs = max(cpu_count() + 1 + n_jobs, 1) + return n_jobs + + def apply_async(self, func, callback=None): + """Schedule a func to be run""" + future = self._workers.submit(SafeFunction(func)) + future.get = functools.partial(self.wrap_future_result, future) + if callback is not None: + future.add_done_callback(callback) + return future + + @staticmethod + def wrap_future_result(future, timeout=None): + """Wrapper for Future.result to implement the same behaviour as + AsyncResults.get from multiprocessing.""" + try: + return future.result(timeout=timeout) + except CfTimeoutError as e: + raise TimeoutError from e + + def terminate(self): + if self._workers is not None: + # Don't terminate the workers as we want to reuse them in later + # calls, but cleanup the temporary resources that the Parallel call + # created. This 'hack' requires a private, low-level operation. + self._workers._temp_folder_manager._unlink_temporary_resources( + context_id=self.parallel._id + ) + self._workers = None + + self.reset_batch_stats() + + def abort_everything(self, ensure_ready=True): + """Shutdown the workers and restart a new one with the same parameters + """ + self._workers.terminate(kill_workers=True) + self._workers = None + + if ensure_ready: + self.configure(n_jobs=self.parallel.n_jobs, parallel=self.parallel) + + +class ImmediateResult(object): + def __init__(self, batch): + # Don't delay the application, to avoid keeping the input + # arguments in memory + self.results = batch() + + def get(self): + return self.results + + +class SafeFunction(object): + """Wrapper that handles the serialization of exception tracebacks. + + TODO python2_drop: check whether SafeFunction is still needed since we + dropped support for Python 2. If not needed anymore it should be + deprecated. + + If an exception is triggered when calling the inner function, a copy of + the full traceback is captured to make it possible to serialize + it so that it can be rendered in a different Python process. + + """ + def __init__(self, func): + self.func = func + + def __call__(self, *args, **kwargs): + try: + return self.func(*args, **kwargs) + except KeyboardInterrupt as e: + # We capture the KeyboardInterrupt and reraise it as + # something different, as multiprocessing does not + # interrupt processing for a KeyboardInterrupt + raise WorkerInterrupt() from e + except BaseException: + # Rely on Python 3 built-in Remote Traceback reporting + raise + + +class FallbackToBackend(Exception): + """Raised when configuration should fallback to another backend""" + + def __init__(self, backend): + self.backend = backend diff --git a/minor_project/lib/python3.6/site-packages/joblib/_store_backends.py b/minor_project/lib/python3.6/site-packages/joblib/_store_backends.py new file mode 100644 index 0000000..d4389ed --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/_store_backends.py @@ -0,0 +1,414 @@ +"""Storage providers backends for Memory caching.""" + +import re +import os +import os.path +import datetime +import json +import shutil +import warnings +import collections +import operator +import threading +from abc import ABCMeta, abstractmethod + +from .backports import concurrency_safe_rename +from .disk import mkdirp, memstr_to_bytes, rm_subdirs +from . import numpy_pickle + +CacheItemInfo = collections.namedtuple('CacheItemInfo', + 'path size last_access') + + +def concurrency_safe_write(object_to_write, filename, write_func): + """Writes an object into a unique file in a concurrency-safe way.""" + thread_id = id(threading.current_thread()) + temporary_filename = '{}.thread-{}-pid-{}'.format( + filename, thread_id, os.getpid()) + write_func(object_to_write, temporary_filename) + + return temporary_filename + + +class StoreBackendBase(metaclass=ABCMeta): + """Helper Abstract Base Class which defines all methods that + a StorageBackend must implement.""" + + location = None + + @abstractmethod + def _open_item(self, f, mode): + """Opens an item on the store and return a file-like object. + + This method is private and only used by the StoreBackendMixin object. + + Parameters + ---------- + f: a file-like object + The file-like object where an item is stored and retrieved + mode: string, optional + the mode in which the file-like object is opened allowed valued are + 'rb', 'wb' + + Returns + ------- + a file-like object + """ + + @abstractmethod + def _item_exists(self, location): + """Checks if an item location exists in the store. + + This method is private and only used by the StoreBackendMixin object. + + Parameters + ---------- + location: string + The location of an item. On a filesystem, this corresponds to the + absolute path, including the filename, of a file. + + Returns + ------- + True if the item exists, False otherwise + """ + + @abstractmethod + def _move_item(self, src, dst): + """Moves an item from src to dst in the store. + + This method is private and only used by the StoreBackendMixin object. + + Parameters + ---------- + src: string + The source location of an item + dst: string + The destination location of an item + """ + + @abstractmethod + def create_location(self, location): + """Creates a location on the store. + + Parameters + ---------- + location: string + The location in the store. On a filesystem, this corresponds to a + directory. + """ + + @abstractmethod + def clear_location(self, location): + """Clears a location on the store. + + Parameters + ---------- + location: string + The location in the store. On a filesystem, this corresponds to a + directory or a filename absolute path + """ + + @abstractmethod + def get_items(self): + """Returns the whole list of items available in the store. + + Returns + ------- + The list of items identified by their ids (e.g filename in a + filesystem). + """ + + @abstractmethod + def configure(self, location, verbose=0, backend_options=dict()): + """Configures the store. + + Parameters + ---------- + location: string + The base location used by the store. On a filesystem, this + corresponds to a directory. + verbose: int + The level of verbosity of the store + backend_options: dict + Contains a dictionnary of named paremeters used to configure the + store backend. + """ + + +class StoreBackendMixin(object): + """Class providing all logic for managing the store in a generic way. + + The StoreBackend subclass has to implement 3 methods: create_location, + clear_location and configure. The StoreBackend also has to provide + a private _open_item, _item_exists and _move_item methods. The _open_item + method has to have the same signature as the builtin open and return a + file-like object. + """ + + def load_item(self, path, verbose=1, msg=None): + """Load an item from the store given its path as a list of + strings.""" + full_path = os.path.join(self.location, *path) + + if verbose > 1: + if verbose < 10: + print('{0}...'.format(msg)) + else: + print('{0} from {1}'.format(msg, full_path)) + + mmap_mode = (None if not hasattr(self, 'mmap_mode') + else self.mmap_mode) + + filename = os.path.join(full_path, 'output.pkl') + if not self._item_exists(filename): + raise KeyError("Non-existing item (may have been " + "cleared).\nFile %s does not exist" % filename) + + # file-like object cannot be used when mmap_mode is set + if mmap_mode is None: + with self._open_item(filename, "rb") as f: + item = numpy_pickle.load(f) + else: + item = numpy_pickle.load(filename, mmap_mode=mmap_mode) + return item + + def dump_item(self, path, item, verbose=1): + """Dump an item in the store at the path given as a list of + strings.""" + try: + item_path = os.path.join(self.location, *path) + if not self._item_exists(item_path): + self.create_location(item_path) + filename = os.path.join(item_path, 'output.pkl') + if verbose > 10: + print('Persisting in %s' % item_path) + + def write_func(to_write, dest_filename): + with self._open_item(dest_filename, "wb") as f: + numpy_pickle.dump(to_write, f, + compress=self.compress) + + self._concurrency_safe_write(item, filename, write_func) + except: # noqa: E722 + " Race condition in the creation of the directory " + + def clear_item(self, path): + """Clear the item at the path, given as a list of strings.""" + item_path = os.path.join(self.location, *path) + if self._item_exists(item_path): + self.clear_location(item_path) + + def contains_item(self, path): + """Check if there is an item at the path, given as a list of + strings""" + item_path = os.path.join(self.location, *path) + filename = os.path.join(item_path, 'output.pkl') + + return self._item_exists(filename) + + def get_item_info(self, path): + """Return information about item.""" + return {'location': os.path.join(self.location, + *path)} + + def get_metadata(self, path): + """Return actual metadata of an item.""" + try: + item_path = os.path.join(self.location, *path) + filename = os.path.join(item_path, 'metadata.json') + with self._open_item(filename, 'rb') as f: + return json.loads(f.read().decode('utf-8')) + except: # noqa: E722 + return {} + + def store_metadata(self, path, metadata): + """Store metadata of a computation.""" + try: + item_path = os.path.join(self.location, *path) + self.create_location(item_path) + filename = os.path.join(item_path, 'metadata.json') + + def write_func(to_write, dest_filename): + with self._open_item(dest_filename, "wb") as f: + f.write(json.dumps(to_write).encode('utf-8')) + + self._concurrency_safe_write(metadata, filename, write_func) + except: # noqa: E722 + pass + + def contains_path(self, path): + """Check cached function is available in store.""" + func_path = os.path.join(self.location, *path) + return self.object_exists(func_path) + + def clear_path(self, path): + """Clear all items with a common path in the store.""" + func_path = os.path.join(self.location, *path) + if self._item_exists(func_path): + self.clear_location(func_path) + + def store_cached_func_code(self, path, func_code=None): + """Store the code of the cached function.""" + func_path = os.path.join(self.location, *path) + if not self._item_exists(func_path): + self.create_location(func_path) + + if func_code is not None: + filename = os.path.join(func_path, "func_code.py") + with self._open_item(filename, 'wb') as f: + f.write(func_code.encode('utf-8')) + + def get_cached_func_code(self, path): + """Store the code of the cached function.""" + path += ['func_code.py', ] + filename = os.path.join(self.location, *path) + try: + with self._open_item(filename, 'rb') as f: + return f.read().decode('utf-8') + except: # noqa: E722 + raise + + def get_cached_func_info(self, path): + """Return information related to the cached function if it exists.""" + return {'location': os.path.join(self.location, *path)} + + def clear(self): + """Clear the whole store content.""" + self.clear_location(self.location) + + def reduce_store_size(self, bytes_limit): + """Reduce store size to keep it under the given bytes limit.""" + items_to_delete = self._get_items_to_delete(bytes_limit) + + for item in items_to_delete: + if self.verbose > 10: + print('Deleting item {0}'.format(item)) + try: + self.clear_location(item.path) + except OSError: + # Even with ignore_errors=True shutil.rmtree can raise OSError + # with: + # [Errno 116] Stale file handle if another process has deleted + # the folder already. + pass + + def _get_items_to_delete(self, bytes_limit): + """Get items to delete to keep the store under a size limit.""" + if isinstance(bytes_limit, str): + bytes_limit = memstr_to_bytes(bytes_limit) + + items = self.get_items() + size = sum(item.size for item in items) + + to_delete_size = size - bytes_limit + if to_delete_size < 0: + return [] + + # We want to delete first the cache items that were accessed a + # long time ago + items.sort(key=operator.attrgetter('last_access')) + + items_to_delete = [] + size_so_far = 0 + + for item in items: + if size_so_far > to_delete_size: + break + + items_to_delete.append(item) + size_so_far += item.size + + return items_to_delete + + def _concurrency_safe_write(self, to_write, filename, write_func): + """Writes an object into a file in a concurrency-safe way.""" + temporary_filename = concurrency_safe_write(to_write, + filename, write_func) + self._move_item(temporary_filename, filename) + + def __repr__(self): + """Printable representation of the store location.""" + return '{class_name}(location="{location}")'.format( + class_name=self.__class__.__name__, location=self.location) + + +class FileSystemStoreBackend(StoreBackendBase, StoreBackendMixin): + """A StoreBackend used with local or network file systems.""" + + _open_item = staticmethod(open) + _item_exists = staticmethod(os.path.exists) + _move_item = staticmethod(concurrency_safe_rename) + + def clear_location(self, location): + """Delete location on store.""" + if (location == self.location): + rm_subdirs(location) + else: + shutil.rmtree(location, ignore_errors=True) + + def create_location(self, location): + """Create object location on store""" + mkdirp(location) + + def get_items(self): + """Returns the whole list of items available in the store.""" + items = [] + + for dirpath, _, filenames in os.walk(self.location): + is_cache_hash_dir = re.match('[a-f0-9]{32}', + os.path.basename(dirpath)) + + if is_cache_hash_dir: + output_filename = os.path.join(dirpath, 'output.pkl') + try: + last_access = os.path.getatime(output_filename) + except OSError: + try: + last_access = os.path.getatime(dirpath) + except OSError: + # The directory has already been deleted + continue + + last_access = datetime.datetime.fromtimestamp(last_access) + try: + full_filenames = [os.path.join(dirpath, fn) + for fn in filenames] + dirsize = sum(os.path.getsize(fn) + for fn in full_filenames) + except OSError: + # Either output_filename or one of the files in + # dirpath does not exist any more. We assume this + # directory is being cleaned by another process already + continue + + items.append(CacheItemInfo(dirpath, dirsize, + last_access)) + + return items + + def configure(self, location, verbose=1, backend_options=None): + """Configure the store backend. + + For this backend, valid store options are 'compress' and 'mmap_mode' + """ + if backend_options is None: + backend_options = {} + + # setup location directory + self.location = location + if not os.path.exists(self.location): + mkdirp(self.location) + + # item can be stored compressed for faster I/O + self.compress = backend_options.get('compress', False) + + # FileSystemStoreBackend can be used with mmap_mode options under + # certain conditions. + mmap_mode = backend_options.get('mmap_mode') + if self.compress and mmap_mode is not None: + warnings.warn('Compressed items cannot be memmapped in a ' + 'filesystem store. Option will be ignored.', + stacklevel=2) + + self.mmap_mode = mmap_mode + self.verbose = verbose diff --git a/minor_project/lib/python3.6/site-packages/joblib/backports.py b/minor_project/lib/python3.6/site-packages/joblib/backports.py new file mode 100644 index 0000000..cb2f723 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/backports.py @@ -0,0 +1,78 @@ +""" +Backports of fixes for joblib dependencies +""" +import os +import time + +from distutils.version import LooseVersion +from os.path import basename +from multiprocessing import util + + +try: + import numpy as np + + def make_memmap(filename, dtype='uint8', mode='r+', offset=0, + shape=None, order='C', unlink_on_gc_collect=False): + """Custom memmap constructor compatible with numpy.memmap. + + This function: + - is a backport the numpy memmap offset fix (See + https://github.com/numpy/numpy/pull/8443 for more details. + The numpy fix is available starting numpy 1.13) + - adds ``unlink_on_gc_collect``, which specifies explicitly whether + the process re-constructing the memmap owns a reference to the + underlying file. If set to True, it adds a finalizer to the + newly-created memmap that sends a maybe_unlink request for the + memmaped file to resource_tracker. + """ + util.debug( + "[MEMMAP READ] creating a memmap (shape {}, filename {}, " + "pid {})".format(shape, basename(filename), os.getpid()) + ) + + mm = np.memmap(filename, dtype=dtype, mode=mode, offset=offset, + shape=shape, order=order) + if LooseVersion(np.__version__) < '1.13': + mm.offset = offset + if unlink_on_gc_collect: + from ._memmapping_reducer import add_maybe_unlink_finalizer + add_maybe_unlink_finalizer(mm) + return mm +except ImportError: + def make_memmap(filename, dtype='uint8', mode='r+', offset=0, + shape=None, order='C', unlink_on_gc_collect=False): + raise NotImplementedError( + "'joblib.backports.make_memmap' should not be used " + 'if numpy is not installed.') + + +if os.name == 'nt': + # https://github.com/joblib/joblib/issues/540 + access_denied_errors = (5, 13) + from os import replace + + def concurrency_safe_rename(src, dst): + """Renames ``src`` into ``dst`` overwriting ``dst`` if it exists. + + On Windows os.replace can yield permission errors if executed by two + different processes. + """ + max_sleep_time = 1 + total_sleep_time = 0 + sleep_time = 0.001 + while total_sleep_time < max_sleep_time: + try: + replace(src, dst) + break + except Exception as exc: + if getattr(exc, 'winerror', None) in access_denied_errors: + time.sleep(sleep_time) + total_sleep_time += sleep_time + sleep_time *= 2 + else: + raise + else: + raise +else: + from os import replace as concurrency_safe_rename # noqa diff --git a/minor_project/lib/python3.6/site-packages/joblib/compressor.py b/minor_project/lib/python3.6/site-packages/joblib/compressor.py new file mode 100644 index 0000000..0dbd3dc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/compressor.py @@ -0,0 +1,570 @@ +"""Classes and functions for managing compressors.""" + +import io +import zlib +from distutils.version import LooseVersion + +try: + from threading import RLock +except ImportError: + from dummy_threading import RLock + +try: + import bz2 +except ImportError: + bz2 = None + +try: + import lz4 + from lz4.frame import LZ4FrameFile +except ImportError: + lz4 = None + +try: + import lzma +except ImportError: + lzma = None + + +LZ4_NOT_INSTALLED_ERROR = ('LZ4 is not installed. Install it with pip: ' + 'https://python-lz4.readthedocs.io/') + +# Registered compressors +_COMPRESSORS = {} + +# Magic numbers of supported compression file formats. +_ZFILE_PREFIX = b'ZF' # used with pickle files created before 0.9.3. +_ZLIB_PREFIX = b'\x78' +_GZIP_PREFIX = b'\x1f\x8b' +_BZ2_PREFIX = b'BZ' +_XZ_PREFIX = b'\xfd\x37\x7a\x58\x5a' +_LZMA_PREFIX = b'\x5d\x00' +_LZ4_PREFIX = b'\x04\x22\x4D\x18' + + +def register_compressor(compressor_name, compressor, + force=False): + """Register a new compressor. + + Parameters + ----------- + compressor_name: str. + The name of the compressor. + compressor: CompressorWrapper + An instance of a 'CompressorWrapper'. + """ + global _COMPRESSORS + if not isinstance(compressor_name, str): + raise ValueError("Compressor name should be a string, " + "'{}' given.".format(compressor_name)) + + if not isinstance(compressor, CompressorWrapper): + raise ValueError("Compressor should implement the CompressorWrapper " + "interface, '{}' given.".format(compressor)) + + if (compressor.fileobj_factory is not None and + (not hasattr(compressor.fileobj_factory, 'read') or + not hasattr(compressor.fileobj_factory, 'write') or + not hasattr(compressor.fileobj_factory, 'seek') or + not hasattr(compressor.fileobj_factory, 'tell'))): + raise ValueError("Compressor 'fileobj_factory' attribute should " + "implement the file object interface, '{}' given." + .format(compressor.fileobj_factory)) + + if compressor_name in _COMPRESSORS and not force: + raise ValueError("Compressor '{}' already registered." + .format(compressor_name)) + + _COMPRESSORS[compressor_name] = compressor + + +class CompressorWrapper(): + """A wrapper around a compressor file object. + + Attributes + ---------- + obj: a file-like object + The object must implement the buffer interface and will be used + internally to compress/decompress the data. + prefix: bytestring + A bytestring corresponding to the magic number that identifies the + file format associated to the compressor. + extention: str + The file extension used to automatically select this compressor during + a dump to a file. + """ + + def __init__(self, obj, prefix=b'', extension=''): + self.fileobj_factory = obj + self.prefix = prefix + self.extension = extension + + def compressor_file(self, fileobj, compresslevel=None): + """Returns an instance of a compressor file object.""" + if compresslevel is None: + return self.fileobj_factory(fileobj, 'wb') + else: + return self.fileobj_factory(fileobj, 'wb', + compresslevel=compresslevel) + + def decompressor_file(self, fileobj): + """Returns an instance of a decompressor file object.""" + return self.fileobj_factory(fileobj, 'rb') + + +class BZ2CompressorWrapper(CompressorWrapper): + + prefix = _BZ2_PREFIX + extension = '.bz2' + + def __init__(self): + if bz2 is not None: + self.fileobj_factory = bz2.BZ2File + else: + self.fileobj_factory = None + + def _check_versions(self): + if bz2 is None: + raise ValueError('bz2 module is not compiled on your python ' + 'standard library.') + + def compressor_file(self, fileobj, compresslevel=None): + """Returns an instance of a compressor file object.""" + self._check_versions() + if compresslevel is None: + return self.fileobj_factory(fileobj, 'wb') + else: + return self.fileobj_factory(fileobj, 'wb', + compresslevel=compresslevel) + + def decompressor_file(self, fileobj): + """Returns an instance of a decompressor file object.""" + self._check_versions() + fileobj = self.fileobj_factory(fileobj, 'rb') + return fileobj + + +class LZMACompressorWrapper(CompressorWrapper): + + prefix = _LZMA_PREFIX + extension = '.lzma' + _lzma_format_name = 'FORMAT_ALONE' + + def __init__(self): + if lzma is not None: + self.fileobj_factory = lzma.LZMAFile + self._lzma_format = getattr(lzma, self._lzma_format_name) + else: + self.fileobj_factory = None + + def _check_versions(self): + if lzma is None: + raise ValueError('lzma module is not compiled on your python ' + 'standard library.') + + def compressor_file(self, fileobj, compresslevel=None): + """Returns an instance of a compressor file object.""" + if compresslevel is None: + return self.fileobj_factory(fileobj, 'wb', + format=self._lzma_format) + else: + return self.fileobj_factory(fileobj, 'wb', + format=self._lzma_format, + preset=compresslevel) + + def decompressor_file(self, fileobj): + """Returns an instance of a decompressor file object.""" + return lzma.LZMAFile(fileobj, 'rb') + + +class XZCompressorWrapper(LZMACompressorWrapper): + + prefix = _XZ_PREFIX + extension = '.xz' + _lzma_format_name = 'FORMAT_XZ' + + +class LZ4CompressorWrapper(CompressorWrapper): + + prefix = _LZ4_PREFIX + extension = '.lz4' + + def __init__(self): + if lz4 is not None: + self.fileobj_factory = LZ4FrameFile + else: + self.fileobj_factory = None + + def _check_versions(self): + if lz4 is None: + raise ValueError(LZ4_NOT_INSTALLED_ERROR) + lz4_version = lz4.__version__ + if lz4_version.startswith("v"): + lz4_version = lz4_version[1:] + if LooseVersion(lz4_version) < LooseVersion('0.19'): + raise ValueError(LZ4_NOT_INSTALLED_ERROR) + + def compressor_file(self, fileobj, compresslevel=None): + """Returns an instance of a compressor file object.""" + self._check_versions() + if compresslevel is None: + return self.fileobj_factory(fileobj, 'wb') + else: + return self.fileobj_factory(fileobj, 'wb', + compression_level=compresslevel) + + def decompressor_file(self, fileobj): + """Returns an instance of a decompressor file object.""" + self._check_versions() + return self.fileobj_factory(fileobj, 'rb') + + +############################################################################### +# base file compression/decompression object definition +_MODE_CLOSED = 0 +_MODE_READ = 1 +_MODE_READ_EOF = 2 +_MODE_WRITE = 3 +_BUFFER_SIZE = 8192 + + +class BinaryZlibFile(io.BufferedIOBase): + """A file object providing transparent zlib (de)compression. + + TODO python2_drop: is it still needed since we dropped Python 2 support A + BinaryZlibFile can act as a wrapper for an existing file object, or refer + directly to a named file on disk. + + Note that BinaryZlibFile provides only a *binary* file interface: data read + is returned as bytes, and data to be written should be given as bytes. + + This object is an adaptation of the BZ2File object and is compatible with + versions of python >= 2.7. + + If filename is a str or bytes object, it gives the name + of the file to be opened. Otherwise, it should be a file object, + which will be used to read or write the compressed data. + + mode can be 'rb' for reading (default) or 'wb' for (over)writing + + If mode is 'wb', compresslevel can be a number between 1 + and 9 specifying the level of compression: 1 produces the least + compression, and 9 produces the most compression. 3 is the default. + """ + + wbits = zlib.MAX_WBITS + + def __init__(self, filename, mode="rb", compresslevel=3): + # This lock must be recursive, so that BufferedIOBase's + # readline(), readlines() and writelines() don't deadlock. + self._lock = RLock() + self._fp = None + self._closefp = False + self._mode = _MODE_CLOSED + self._pos = 0 + self._size = -1 + self.compresslevel = compresslevel + + if not isinstance(compresslevel, int) or not (1 <= compresslevel <= 9): + raise ValueError("'compresslevel' must be an integer " + "between 1 and 9. You provided 'compresslevel={}'" + .format(compresslevel)) + + if mode == "rb": + self._mode = _MODE_READ + self._decompressor = zlib.decompressobj(self.wbits) + self._buffer = b"" + self._buffer_offset = 0 + elif mode == "wb": + self._mode = _MODE_WRITE + self._compressor = zlib.compressobj(self.compresslevel, + zlib.DEFLATED, self.wbits, + zlib.DEF_MEM_LEVEL, 0) + else: + raise ValueError("Invalid mode: %r" % (mode,)) + + if isinstance(filename, str): + self._fp = io.open(filename, mode) + self._closefp = True + elif hasattr(filename, "read") or hasattr(filename, "write"): + self._fp = filename + else: + raise TypeError("filename must be a str or bytes object, " + "or a file") + + def close(self): + """Flush and close the file. + + May be called more than once without error. Once the file is + closed, any other operation on it will raise a ValueError. + """ + with self._lock: + if self._mode == _MODE_CLOSED: + return + try: + if self._mode in (_MODE_READ, _MODE_READ_EOF): + self._decompressor = None + elif self._mode == _MODE_WRITE: + self._fp.write(self._compressor.flush()) + self._compressor = None + finally: + try: + if self._closefp: + self._fp.close() + finally: + self._fp = None + self._closefp = False + self._mode = _MODE_CLOSED + self._buffer = b"" + self._buffer_offset = 0 + + @property + def closed(self): + """True if this file is closed.""" + return self._mode == _MODE_CLOSED + + def fileno(self): + """Return the file descriptor for the underlying file.""" + self._check_not_closed() + return self._fp.fileno() + + def seekable(self): + """Return whether the file supports seeking.""" + return self.readable() and self._fp.seekable() + + def readable(self): + """Return whether the file was opened for reading.""" + self._check_not_closed() + return self._mode in (_MODE_READ, _MODE_READ_EOF) + + def writable(self): + """Return whether the file was opened for writing.""" + self._check_not_closed() + return self._mode == _MODE_WRITE + + # Mode-checking helper functions. + + def _check_not_closed(self): + if self.closed: + fname = getattr(self._fp, 'name', None) + msg = "I/O operation on closed file" + if fname is not None: + msg += " {}".format(fname) + msg += "." + raise ValueError(msg) + + def _check_can_read(self): + if self._mode not in (_MODE_READ, _MODE_READ_EOF): + self._check_not_closed() + raise io.UnsupportedOperation("File not open for reading") + + def _check_can_write(self): + if self._mode != _MODE_WRITE: + self._check_not_closed() + raise io.UnsupportedOperation("File not open for writing") + + def _check_can_seek(self): + if self._mode not in (_MODE_READ, _MODE_READ_EOF): + self._check_not_closed() + raise io.UnsupportedOperation("Seeking is only supported " + "on files open for reading") + if not self._fp.seekable(): + raise io.UnsupportedOperation("The underlying file object " + "does not support seeking") + + # Fill the readahead buffer if it is empty. Returns False on EOF. + def _fill_buffer(self): + if self._mode == _MODE_READ_EOF: + return False + # Depending on the input data, our call to the decompressor may not + # return any data. In this case, try again after reading another block. + while self._buffer_offset == len(self._buffer): + try: + rawblock = (self._decompressor.unused_data or + self._fp.read(_BUFFER_SIZE)) + if not rawblock: + raise EOFError + except EOFError: + # End-of-stream marker and end of file. We're good. + self._mode = _MODE_READ_EOF + self._size = self._pos + return False + else: + self._buffer = self._decompressor.decompress(rawblock) + self._buffer_offset = 0 + return True + + # Read data until EOF. + # If return_data is false, consume the data without returning it. + def _read_all(self, return_data=True): + # The loop assumes that _buffer_offset is 0. Ensure that this is true. + self._buffer = self._buffer[self._buffer_offset:] + self._buffer_offset = 0 + + blocks = [] + while self._fill_buffer(): + if return_data: + blocks.append(self._buffer) + self._pos += len(self._buffer) + self._buffer = b"" + if return_data: + return b"".join(blocks) + + # Read a block of up to n bytes. + # If return_data is false, consume the data without returning it. + def _read_block(self, n_bytes, return_data=True): + # If we have enough data buffered, return immediately. + end = self._buffer_offset + n_bytes + if end <= len(self._buffer): + data = self._buffer[self._buffer_offset: end] + self._buffer_offset = end + self._pos += len(data) + return data if return_data else None + + # The loop assumes that _buffer_offset is 0. Ensure that this is true. + self._buffer = self._buffer[self._buffer_offset:] + self._buffer_offset = 0 + + blocks = [] + while n_bytes > 0 and self._fill_buffer(): + if n_bytes < len(self._buffer): + data = self._buffer[:n_bytes] + self._buffer_offset = n_bytes + else: + data = self._buffer + self._buffer = b"" + if return_data: + blocks.append(data) + self._pos += len(data) + n_bytes -= len(data) + if return_data: + return b"".join(blocks) + + def read(self, size=-1): + """Read up to size uncompressed bytes from the file. + + If size is negative or omitted, read until EOF is reached. + Returns b'' if the file is already at EOF. + """ + with self._lock: + self._check_can_read() + if size == 0: + return b"" + elif size < 0: + return self._read_all() + else: + return self._read_block(size) + + def readinto(self, b): + """Read up to len(b) bytes into b. + + Returns the number of bytes read (0 for EOF). + """ + with self._lock: + return io.BufferedIOBase.readinto(self, b) + + def write(self, data): + """Write a byte string to the file. + + Returns the number of uncompressed bytes written, which is + always len(data). Note that due to buffering, the file on disk + may not reflect the data written until close() is called. + """ + with self._lock: + self._check_can_write() + # Convert data type if called by io.BufferedWriter. + if isinstance(data, memoryview): + data = data.tobytes() + + compressed = self._compressor.compress(data) + self._fp.write(compressed) + self._pos += len(data) + return len(data) + + # Rewind the file to the beginning of the data stream. + def _rewind(self): + self._fp.seek(0, 0) + self._mode = _MODE_READ + self._pos = 0 + self._decompressor = zlib.decompressobj(self.wbits) + self._buffer = b"" + self._buffer_offset = 0 + + def seek(self, offset, whence=0): + """Change the file position. + + The new position is specified by offset, relative to the + position indicated by whence. Values for whence are: + + 0: start of stream (default); offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive + + Returns the new file position. + + Note that seeking is emulated, so depending on the parameters, + this operation may be extremely slow. + """ + with self._lock: + self._check_can_seek() + + # Recalculate offset as an absolute file position. + if whence == 0: + pass + elif whence == 1: + offset = self._pos + offset + elif whence == 2: + # Seeking relative to EOF - we need to know the file's size. + if self._size < 0: + self._read_all(return_data=False) + offset = self._size + offset + else: + raise ValueError("Invalid value for whence: %s" % (whence,)) + + # Make it so that offset is the number of bytes to skip forward. + if offset < self._pos: + self._rewind() + else: + offset -= self._pos + + # Read and discard data until we reach the desired position. + self._read_block(offset, return_data=False) + + return self._pos + + def tell(self): + """Return the current file position.""" + with self._lock: + self._check_not_closed() + return self._pos + + +class ZlibCompressorWrapper(CompressorWrapper): + + def __init__(self): + CompressorWrapper.__init__(self, obj=BinaryZlibFile, + prefix=_ZLIB_PREFIX, extension='.z') + + +class BinaryGzipFile(BinaryZlibFile): + """A file object providing transparent gzip (de)compression. + + If filename is a str or bytes object, it gives the name + of the file to be opened. Otherwise, it should be a file object, + which will be used to read or write the compressed data. + + mode can be 'rb' for reading (default) or 'wb' for (over)writing + + If mode is 'wb', compresslevel can be a number between 1 + and 9 specifying the level of compression: 1 produces the least + compression, and 9 produces the most compression. 3 is the default. + """ + + wbits = 31 # zlib compressor/decompressor wbits value for gzip format. + + +class GzipCompressorWrapper(CompressorWrapper): + + def __init__(self): + CompressorWrapper.__init__(self, obj=BinaryGzipFile, + prefix=_GZIP_PREFIX, extension='.gz') diff --git a/minor_project/lib/python3.6/site-packages/joblib/disk.py b/minor_project/lib/python3.6/site-packages/joblib/disk.py new file mode 100644 index 0000000..3b2735d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/disk.py @@ -0,0 +1,136 @@ +""" +Disk management utilities. +""" + +# Authors: Gael Varoquaux +# Lars Buitinck +# Copyright (c) 2010 Gael Varoquaux +# License: BSD Style, 3 clauses. + + +import os +import sys +import time +import errno +import shutil + +from multiprocessing import util + + +try: + WindowsError +except NameError: + WindowsError = OSError + + +def disk_used(path): + """ Return the disk usage in a directory.""" + size = 0 + for file in os.listdir(path) + ['.']: + stat = os.stat(os.path.join(path, file)) + if hasattr(stat, 'st_blocks'): + size += stat.st_blocks * 512 + else: + # on some platform st_blocks is not available (e.g., Windows) + # approximate by rounding to next multiple of 512 + size += (stat.st_size // 512 + 1) * 512 + # We need to convert to int to avoid having longs on some systems (we + # don't want longs to avoid problems we SQLite) + return int(size / 1024.) + + +def memstr_to_bytes(text): + """ Convert a memory text to its value in bytes. + """ + kilo = 1024 + units = dict(K=kilo, M=kilo ** 2, G=kilo ** 3) + try: + size = int(units[text[-1]] * float(text[:-1])) + except (KeyError, ValueError) as e: + raise ValueError( + "Invalid literal for size give: %s (type %s) should be " + "alike '10G', '500M', '50K'." % (text, type(text))) from e + return size + + +def mkdirp(d): + """Ensure directory d exists (like mkdir -p on Unix) + No guarantee that the directory is writable. + """ + try: + os.makedirs(d) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +# if a rmtree operation fails in rm_subdirs, wait for this much time (in secs), +# then retry up to RM_SUBDIRS_N_RETRY times. If it still fails, raise the +# exception. this mecanism ensures that the sub-process gc have the time to +# collect and close the memmaps before we fail. +RM_SUBDIRS_RETRY_TIME = 0.1 +RM_SUBDIRS_N_RETRY = 5 + + +def rm_subdirs(path, onerror=None): + """Remove all subdirectories in this path. + + The directory indicated by `path` is left in place, and its subdirectories + are erased. + + If onerror is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If onerror is None, + an exception is raised. + """ + + # NOTE this code is adapted from the one in shutil.rmtree, and is + # just as fast + + names = [] + try: + names = os.listdir(path) + except os.error: + if onerror is not None: + onerror(os.listdir, path, sys.exc_info()) + else: + raise + + for name in names: + fullname = os.path.join(path, name) + delete_folder(fullname, onerror=onerror) + + +def delete_folder(folder_path, onerror=None, allow_non_empty=True): + """Utility function to cleanup a temporary folder if it still exists.""" + if os.path.isdir(folder_path): + if onerror is not None: + shutil.rmtree(folder_path, False, onerror) + else: + # allow the rmtree to fail once, wait and re-try. + # if the error is raised again, fail + err_count = 0 + while True: + files = os.listdir(folder_path) + try: + if len(files) == 0 or allow_non_empty: + shutil.rmtree( + folder_path, ignore_errors=False, onerror=None + ) + util.debug( + "Sucessfully deleted {}".format(folder_path)) + break + else: + raise OSError( + "Expected empty folder {} but got {} " + "files.".format(folder_path, len(files)) + ) + except (OSError, WindowsError): + err_count += 1 + if err_count > RM_SUBDIRS_N_RETRY: + # the folder cannot be deleted right now. It maybe + # because some temporary files have not been deleted + # yet. + raise + time.sleep(RM_SUBDIRS_RETRY_TIME) diff --git a/minor_project/lib/python3.6/site-packages/joblib/executor.py b/minor_project/lib/python3.6/site-packages/joblib/executor.py new file mode 100644 index 0000000..9273fed --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/executor.py @@ -0,0 +1,120 @@ +"""Utility function to construct a loky.ReusableExecutor with custom pickler. + +This module provides efficient ways of working with data stored in +shared memory with numpy.memmap arrays without inducing any memory +copy between the parent and child processes. +""" +# Author: Thomas Moreau +# Copyright: 2017, Thomas Moreau +# License: BSD 3 clause + +from ._memmapping_reducer import get_memmapping_reducers +from ._memmapping_reducer import TemporaryResourcesManager +from .externals.loky.reusable_executor import _ReusablePoolExecutor + + +_executor_args = None + + +def get_memmapping_executor(n_jobs, **kwargs): + return MemmappingExecutor.get_memmapping_executor(n_jobs, **kwargs) + + +class MemmappingExecutor(_ReusablePoolExecutor): + + @classmethod + def get_memmapping_executor(cls, n_jobs, timeout=300, initializer=None, + initargs=(), env=None, temp_folder=None, + context_id=None, **backend_args): + """Factory for ReusableExecutor with automatic memmapping for large numpy + arrays. + """ + global _executor_args + # Check if we can reuse the executor here instead of deferring the test + # to loky as the reducers are objects that changes at each call. + executor_args = backend_args.copy() + executor_args.update(env if env else {}) + executor_args.update(dict( + timeout=timeout, initializer=initializer, initargs=initargs)) + reuse = _executor_args is None or _executor_args == executor_args + _executor_args = executor_args + + manager = TemporaryResourcesManager(temp_folder) + + # reducers access the temporary folder in which to store temporary + # pickles through a call to manager.resolve_temp_folder_name. resolving + # the folder name dynamically is useful to use different folders across + # calls of a same reusable executor + job_reducers, result_reducers = get_memmapping_reducers( + unlink_on_gc_collect=True, + temp_folder_resolver=manager.resolve_temp_folder_name, + **backend_args) + _executor, executor_is_reused = super().get_reusable_executor( + n_jobs, job_reducers=job_reducers, result_reducers=result_reducers, + reuse=reuse, timeout=timeout, initializer=initializer, + initargs=initargs, env=env + ) + + if not executor_is_reused: + # Only set a _temp_folder_manager for new executors. Reused + # executors already have a _temporary_folder_manager that must not + # be re-assigned like that because it is referenced in various + # places in the reducing machinery of the executor. + _executor._temp_folder_manager = manager + + if context_id is not None: + # Only register the specified context once we know which manager + # the current executor is using, in order to not register an atexit + # finalizer twice for the same folder. + _executor._temp_folder_manager.register_new_context(context_id) + + return _executor + + def terminate(self, kill_workers=False): + self.shutdown(kill_workers=kill_workers) + if kill_workers: + # When workers are killed in such a brutal manner, they cannot + # execute the finalizer of their shared memmaps. The refcount of + # those memmaps may be off by an unknown number, so instead of + # decref'ing them, we delete the whole temporary folder, and + # unregister them. There is no risk of PermissionError at folder + # deletion because because at this point, all child processes are + # dead, so all references to temporary memmaps are closed. + + # unregister temporary resources from all contexts + with self._submit_resize_lock: + self._temp_folder_manager._unregister_temporary_resources() + self._temp_folder_manager._try_delete_folder( + allow_non_empty=True + ) + else: + self._temp_folder_manager._unlink_temporary_resources() + self._temp_folder_manager._try_delete_folder(allow_non_empty=True) + + @property + def _temp_folder(self): + # Legacy property in tests. could be removed if we refactored the + # memmapping tests. SHOULD ONLY BE USED IN TESTS! + # We cache this property because it is called late in the tests - at + # this point, all context have been unregistered, and + # resolve_temp_folder_name raises an error. + if getattr(self, '_cached_temp_folder', None) is not None: + return self._cached_temp_folder + else: + self._cached_temp_folder = self._temp_folder_manager.resolve_temp_folder_name() # noqa + return self._cached_temp_folder + + +class _TestingMemmappingExecutor(MemmappingExecutor): + """Wrapper around ReusableExecutor to ease memmapping testing with Pool + and Executor. This is only for testing purposes. + + """ + def apply_async(self, func, args): + """Schedule a func to be run""" + future = self.submit(func, *args) + future.get = future.result + return future + + def map(self, f, *args): + return list(super().map(f, *args)) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/externals/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..0481b6a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__init__.py new file mode 100644 index 0000000..f461d65 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__init__.py @@ -0,0 +1,11 @@ +from __future__ import absolute_import + + +from .cloudpickle import * # noqa +from .cloudpickle_fast import CloudPickler, dumps, dump # noqa + +# Conform to the convention used by python serialization libraries, which +# expose their Pickler subclass at top-level under the "Pickler" name. +Pickler = CloudPickler + +__version__ = '1.6.0' diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..2bcc2cd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-36.pyc new file mode 100644 index 0000000..90378c2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle_fast.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle_fast.cpython-36.pyc new file mode 100644 index 0000000..d0a2b05 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/cloudpickle_fast.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..11cde67 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/__pycache__/compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/cloudpickle.py b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/cloudpickle.py new file mode 100644 index 0000000..05d52af --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/cloudpickle.py @@ -0,0 +1,842 @@ +""" +This class is defined to override standard pickle functionality + +The goals of it follow: +-Serialize lambdas and nested functions to compiled byte code +-Deal with main module correctly +-Deal with other non-serializable objects + +It does not include an unpickler, as standard python unpickling suffices. + +This module was extracted from the `cloud` package, developed by `PiCloud, Inc. +`_. + +Copyright (c) 2012, Regents of the University of California. +Copyright (c) 2009 `PiCloud, Inc. `_. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the University of California, Berkeley nor the + names of its contributors may be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +from __future__ import print_function + +import builtins +import dis +import opcode +import platform +import sys +import types +import weakref +import uuid +import threading +import typing +import warnings + +from .compat import pickle +from typing import Generic, Union, Tuple, Callable +from pickle import _getattribute +from importlib._bootstrap import _find_spec + +try: # pragma: no branch + import typing_extensions as _typing_extensions + from typing_extensions import Literal, Final +except ImportError: + _typing_extensions = Literal = Final = None + +if sys.version_info >= (3, 5, 3): + from typing import ClassVar +else: # pragma: no cover + ClassVar = None + +if sys.version_info >= (3, 8): + from types import CellType +else: + def f(): + a = 1 + + def g(): + return a + return g + CellType = type(f().__closure__[0]) + + +# cloudpickle is meant for inter process communication: we expect all +# communicating processes to run the same Python version hence we favor +# communication speed over compatibility: +DEFAULT_PROTOCOL = pickle.HIGHEST_PROTOCOL + +# Track the provenance of reconstructed dynamic classes to make it possible to +# recontruct instances from the matching singleton class definition when +# appropriate and preserve the usual "isinstance" semantics of Python objects. +_DYNAMIC_CLASS_TRACKER_BY_CLASS = weakref.WeakKeyDictionary() +_DYNAMIC_CLASS_TRACKER_BY_ID = weakref.WeakValueDictionary() +_DYNAMIC_CLASS_TRACKER_LOCK = threading.Lock() + +PYPY = platform.python_implementation() == "PyPy" + +builtin_code_type = None +if PYPY: + # builtin-code objects only exist in pypy + builtin_code_type = type(float.__new__.__code__) + +_extract_code_globals_cache = weakref.WeakKeyDictionary() + + +def _get_or_create_tracker_id(class_def): + with _DYNAMIC_CLASS_TRACKER_LOCK: + class_tracker_id = _DYNAMIC_CLASS_TRACKER_BY_CLASS.get(class_def) + if class_tracker_id is None: + class_tracker_id = uuid.uuid4().hex + _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id + _DYNAMIC_CLASS_TRACKER_BY_ID[class_tracker_id] = class_def + return class_tracker_id + + +def _lookup_class_or_track(class_tracker_id, class_def): + if class_tracker_id is not None: + with _DYNAMIC_CLASS_TRACKER_LOCK: + class_def = _DYNAMIC_CLASS_TRACKER_BY_ID.setdefault( + class_tracker_id, class_def) + _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id + return class_def + + +def _whichmodule(obj, name): + """Find the module an object belongs to. + + This function differs from ``pickle.whichmodule`` in two ways: + - it does not mangle the cases where obj's module is __main__ and obj was + not found in any module. + - Errors arising during module introspection are ignored, as those errors + are considered unwanted side effects. + """ + if sys.version_info[:2] < (3, 7) and isinstance(obj, typing.TypeVar): # pragma: no branch # noqa + # Workaround bug in old Python versions: prior to Python 3.7, + # T.__module__ would always be set to "typing" even when the TypeVar T + # would be defined in a different module. + # + # For such older Python versions, we ignore the __module__ attribute of + # TypeVar instances and instead exhaustively lookup those instances in + # all currently imported modules. + module_name = None + else: + module_name = getattr(obj, '__module__', None) + + if module_name is not None: + return module_name + # Protect the iteration by using a copy of sys.modules against dynamic + # modules that trigger imports of other modules upon calls to getattr or + # other threads importing at the same time. + for module_name, module in sys.modules.copy().items(): + # Some modules such as coverage can inject non-module objects inside + # sys.modules + if ( + module_name == '__main__' or + module is None or + not isinstance(module, types.ModuleType) + ): + continue + try: + if _getattribute(module, name)[0] is obj: + return module_name + except Exception: + pass + return None + + +def _is_importable(obj, name=None): + """Dispatcher utility to test the importability of various constructs.""" + if isinstance(obj, types.FunctionType): + return _lookup_module_and_qualname(obj, name=name) is not None + elif issubclass(type(obj), type): + return _lookup_module_and_qualname(obj, name=name) is not None + elif isinstance(obj, types.ModuleType): + # We assume that sys.modules is primarily used as a cache mechanism for + # the Python import machinery. Checking if a module has been added in + # is sys.modules therefore a cheap and simple heuristic to tell us whether + # we can assume that a given module could be imported by name in + # another Python process. + return obj.__name__ in sys.modules + else: + raise TypeError( + "cannot check importability of {} instances".format( + type(obj).__name__) + ) + + +def _lookup_module_and_qualname(obj, name=None): + if name is None: + name = getattr(obj, '__qualname__', None) + if name is None: # pragma: no cover + # This used to be needed for Python 2.7 support but is probably not + # needed anymore. However we keep the __name__ introspection in case + # users of cloudpickle rely on this old behavior for unknown reasons. + name = getattr(obj, '__name__', None) + + module_name = _whichmodule(obj, name) + + if module_name is None: + # In this case, obj.__module__ is None AND obj was not found in any + # imported module. obj is thus treated as dynamic. + return None + + if module_name == "__main__": + return None + + # Note: if module_name is in sys.modules, the corresponding module is + # assumed importable at unpickling time. See #357 + module = sys.modules.get(module_name, None) + if module is None: + # The main reason why obj's module would not be imported is that this + # module has been dynamically created, using for example + # types.ModuleType. The other possibility is that module was removed + # from sys.modules after obj was created/imported. But this case is not + # supported, as the standard pickle does not support it either. + return None + + try: + obj2, parent = _getattribute(module, name) + except AttributeError: + # obj was not found inside the module it points to + return None + if obj2 is not obj: + return None + return module, name + + +def _extract_code_globals(co): + """ + Find all globals names read or written to by codeblock co + """ + out_names = _extract_code_globals_cache.get(co) + if out_names is None: + names = co.co_names + out_names = {names[oparg] for _, oparg in _walk_global_ops(co)} + + # Declaring a function inside another one using the "def ..." + # syntax generates a constant code object corresonding to the one + # of the nested function's As the nested function may itself need + # global variables, we need to introspect its code, extract its + # globals, (look for code object in it's co_consts attribute..) and + # add the result to code_globals + if co.co_consts: + for const in co.co_consts: + if isinstance(const, types.CodeType): + out_names |= _extract_code_globals(const) + + _extract_code_globals_cache[co] = out_names + + return out_names + + +def _find_imported_submodules(code, top_level_dependencies): + """ + Find currently imported submodules used by a function. + + Submodules used by a function need to be detected and referenced for the + function to work correctly at depickling time. Because submodules can be + referenced as attribute of their parent package (``package.submodule``), we + need a special introspection technique that does not rely on GLOBAL-related + opcodes to find references of them in a code object. + + Example: + ``` + import concurrent.futures + import cloudpickle + def func(): + x = concurrent.futures.ThreadPoolExecutor + if __name__ == '__main__': + cloudpickle.dumps(func) + ``` + The globals extracted by cloudpickle in the function's state include the + concurrent package, but not its submodule (here, concurrent.futures), which + is the module used by func. Find_imported_submodules will detect the usage + of concurrent.futures. Saving this module alongside with func will ensure + that calling func once depickled does not fail due to concurrent.futures + not being imported + """ + + subimports = [] + # check if any known dependency is an imported package + for x in top_level_dependencies: + if (isinstance(x, types.ModuleType) and + hasattr(x, '__package__') and x.__package__): + # check if the package has any currently loaded sub-imports + prefix = x.__name__ + '.' + # A concurrent thread could mutate sys.modules, + # make sure we iterate over a copy to avoid exceptions + for name in list(sys.modules): + # Older versions of pytest will add a "None" module to + # sys.modules. + if name is not None and name.startswith(prefix): + # check whether the function can address the sub-module + tokens = set(name[len(prefix):].split('.')) + if not tokens - set(code.co_names): + subimports.append(sys.modules[name]) + return subimports + + +def cell_set(cell, value): + """Set the value of a closure cell. + + The point of this function is to set the cell_contents attribute of a cell + after its creation. This operation is necessary in case the cell contains a + reference to the function the cell belongs to, as when calling the + function's constructor + ``f = types.FunctionType(code, globals, name, argdefs, closure)``, + closure will not be able to contain the yet-to-be-created f. + + In Python3.7, cell_contents is writeable, so setting the contents of a cell + can be done simply using + >>> cell.cell_contents = value + + In earlier Python3 versions, the cell_contents attribute of a cell is read + only, but this limitation can be worked around by leveraging the Python 3 + ``nonlocal`` keyword. + + In Python2 however, this attribute is read only, and there is no + ``nonlocal`` keyword. For this reason, we need to come up with more + complicated hacks to set this attribute. + + The chosen approach is to create a function with a STORE_DEREF opcode, + which sets the content of a closure variable. Typically: + + >>> def inner(value): + ... lambda: cell # the lambda makes cell a closure + ... cell = value # cell is a closure, so this triggers a STORE_DEREF + + (Note that in Python2, A STORE_DEREF can never be triggered from an inner + function. The function g for example here + >>> def f(var): + ... def g(): + ... var += 1 + ... return g + + will not modify the closure variable ``var```inplace, but instead try to + load a local variable var and increment it. As g does not assign the local + variable ``var`` any initial value, calling f(1)() will fail at runtime.) + + Our objective is to set the value of a given cell ``cell``. So we need to + somewhat reference our ``cell`` object into the ``inner`` function so that + this object (and not the smoke cell of the lambda function) gets affected + by the STORE_DEREF operation. + + In inner, ``cell`` is referenced as a cell variable (an enclosing variable + that is referenced by the inner function). If we create a new function + cell_set with the exact same code as ``inner``, but with ``cell`` marked as + a free variable instead, the STORE_DEREF will be applied on its closure - + ``cell``, which we can specify explicitly during construction! The new + cell_set variable thus actually sets the contents of a specified cell! + + Note: we do not make use of the ``nonlocal`` keyword to set the contents of + a cell in early python3 versions to limit possible syntax errors in case + test and checker libraries decide to parse the whole file. + """ + + if sys.version_info[:2] >= (3, 7): # pragma: no branch + cell.cell_contents = value + else: + _cell_set = types.FunctionType( + _cell_set_template_code, {}, '_cell_set', (), (cell,),) + _cell_set(value) + + +def _make_cell_set_template_code(): + def _cell_set_factory(value): + lambda: cell + cell = value + + co = _cell_set_factory.__code__ + + _cell_set_template_code = types.CodeType( + co.co_argcount, + co.co_kwonlyargcount, # Python 3 only argument + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + co.co_cellvars, # co_freevars is initialized with co_cellvars + (), # co_cellvars is made empty + ) + return _cell_set_template_code + + +if sys.version_info[:2] < (3, 7): + _cell_set_template_code = _make_cell_set_template_code() + +# relevant opcodes +STORE_GLOBAL = opcode.opmap['STORE_GLOBAL'] +DELETE_GLOBAL = opcode.opmap['DELETE_GLOBAL'] +LOAD_GLOBAL = opcode.opmap['LOAD_GLOBAL'] +GLOBAL_OPS = (STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL) +HAVE_ARGUMENT = dis.HAVE_ARGUMENT +EXTENDED_ARG = dis.EXTENDED_ARG + + +_BUILTIN_TYPE_NAMES = {} +for k, v in types.__dict__.items(): + if type(v) is type: + _BUILTIN_TYPE_NAMES[v] = k + + +def _builtin_type(name): + if name == "ClassType": # pragma: no cover + # Backward compat to load pickle files generated with cloudpickle + # < 1.3 even if loading pickle files from older versions is not + # officially supported. + return type + return getattr(types, name) + + +def _walk_global_ops(code): + """ + Yield (opcode, argument number) tuples for all + global-referencing instructions in *code*. + """ + for instr in dis.get_instructions(code): + op = instr.opcode + if op in GLOBAL_OPS: + yield op, instr.arg + + +def _extract_class_dict(cls): + """Retrieve a copy of the dict of a class without the inherited methods""" + clsdict = dict(cls.__dict__) # copy dict proxy to a dict + if len(cls.__bases__) == 1: + inherited_dict = cls.__bases__[0].__dict__ + else: + inherited_dict = {} + for base in reversed(cls.__bases__): + inherited_dict.update(base.__dict__) + to_remove = [] + for name, value in clsdict.items(): + try: + base_value = inherited_dict[name] + if value is base_value: + to_remove.append(name) + except KeyError: + pass + for name in to_remove: + clsdict.pop(name) + return clsdict + + +if sys.version_info[:2] < (3, 7): # pragma: no branch + def _is_parametrized_type_hint(obj): + # This is very cheap but might generate false positives. + # general typing Constructs + is_typing = getattr(obj, '__origin__', None) is not None + + # typing_extensions.Literal + is_litteral = getattr(obj, '__values__', None) is not None + + # typing_extensions.Final + is_final = getattr(obj, '__type__', None) is not None + + # typing.Union/Tuple for old Python 3.5 + is_union = getattr(obj, '__union_params__', None) is not None + is_tuple = getattr(obj, '__tuple_params__', None) is not None + is_callable = ( + getattr(obj, '__result__', None) is not None and + getattr(obj, '__args__', None) is not None + ) + return any((is_typing, is_litteral, is_final, is_union, is_tuple, + is_callable)) + + def _create_parametrized_type_hint(origin, args): + return origin[args] +else: + _is_parametrized_type_hint = None + _create_parametrized_type_hint = None + + +def parametrized_type_hint_getinitargs(obj): + # The distorted type check sematic for typing construct becomes: + # ``type(obj) is type(TypeHint)``, which means "obj is a + # parametrized TypeHint" + if type(obj) is type(Literal): # pragma: no branch + initargs = (Literal, obj.__values__) + elif type(obj) is type(Final): # pragma: no branch + initargs = (Final, obj.__type__) + elif type(obj) is type(ClassVar): + initargs = (ClassVar, obj.__type__) + elif type(obj) is type(Generic): + parameters = obj.__parameters__ + if len(obj.__parameters__) > 0: + # in early Python 3.5, __parameters__ was sometimes + # preferred to __args__ + initargs = (obj.__origin__, parameters) + + else: + initargs = (obj.__origin__, obj.__args__) + elif type(obj) is type(Union): + if sys.version_info < (3, 5, 3): # pragma: no cover + initargs = (Union, obj.__union_params__) + else: + initargs = (Union, obj.__args__) + elif type(obj) is type(Tuple): + if sys.version_info < (3, 5, 3): # pragma: no cover + initargs = (Tuple, obj.__tuple_params__) + else: + initargs = (Tuple, obj.__args__) + elif type(obj) is type(Callable): + if sys.version_info < (3, 5, 3): # pragma: no cover + args = obj.__args__ + result = obj.__result__ + if args != Ellipsis: + if isinstance(args, tuple): + args = list(args) + else: + args = [args] + else: + (*args, result) = obj.__args__ + if len(args) == 1 and args[0] is Ellipsis: + args = Ellipsis + else: + args = list(args) + initargs = (Callable, (args, result)) + else: # pragma: no cover + raise pickle.PicklingError( + "Cloudpickle Error: Unknown type {}".format(type(obj)) + ) + return initargs + + +# Tornado support + +def is_tornado_coroutine(func): + """ + Return whether *func* is a Tornado coroutine function. + Running coroutines are not supported. + """ + if 'tornado.gen' not in sys.modules: + return False + gen = sys.modules['tornado.gen'] + if not hasattr(gen, "is_coroutine_function"): + # Tornado version is too old + return False + return gen.is_coroutine_function(func) + + +def _rebuild_tornado_coroutine(func): + from tornado import gen + return gen.coroutine(func) + + +# including pickles unloading functions in this namespace +load = pickle.load +loads = pickle.loads + + +# hack for __import__ not working as desired +def subimport(name): + __import__(name) + return sys.modules[name] + + +def dynamic_subimport(name, vars): + mod = types.ModuleType(name) + mod.__dict__.update(vars) + mod.__dict__['__builtins__'] = builtins.__dict__ + return mod + + +def _gen_ellipsis(): + return Ellipsis + + +def _gen_not_implemented(): + return NotImplemented + + +def _get_cell_contents(cell): + try: + return cell.cell_contents + except ValueError: + # sentinel used by ``_fill_function`` which will leave the cell empty + return _empty_cell_value + + +def instance(cls): + """Create a new instance of a class. + + Parameters + ---------- + cls : type + The class to create an instance of. + + Returns + ------- + instance : cls + A new instance of ``cls``. + """ + return cls() + + +@instance +class _empty_cell_value(object): + """sentinel for empty closures + """ + @classmethod + def __reduce__(cls): + return cls.__name__ + + +def _fill_function(*args): + """Fills in the rest of function data into the skeleton function object + + The skeleton itself is create by _make_skel_func(). + """ + if len(args) == 2: + func = args[0] + state = args[1] + elif len(args) == 5: + # Backwards compat for cloudpickle v0.4.0, after which the `module` + # argument was introduced + func = args[0] + keys = ['globals', 'defaults', 'dict', 'closure_values'] + state = dict(zip(keys, args[1:])) + elif len(args) == 6: + # Backwards compat for cloudpickle v0.4.1, after which the function + # state was passed as a dict to the _fill_function it-self. + func = args[0] + keys = ['globals', 'defaults', 'dict', 'module', 'closure_values'] + state = dict(zip(keys, args[1:])) + else: + raise ValueError('Unexpected _fill_value arguments: %r' % (args,)) + + # - At pickling time, any dynamic global variable used by func is + # serialized by value (in state['globals']). + # - At unpickling time, func's __globals__ attribute is initialized by + # first retrieving an empty isolated namespace that will be shared + # with other functions pickled from the same original module + # by the same CloudPickler instance and then updated with the + # content of state['globals'] to populate the shared isolated + # namespace with all the global variables that are specifically + # referenced for this function. + func.__globals__.update(state['globals']) + + func.__defaults__ = state['defaults'] + func.__dict__ = state['dict'] + if 'annotations' in state: + func.__annotations__ = state['annotations'] + if 'doc' in state: + func.__doc__ = state['doc'] + if 'name' in state: + func.__name__ = state['name'] + if 'module' in state: + func.__module__ = state['module'] + if 'qualname' in state: + func.__qualname__ = state['qualname'] + if 'kwdefaults' in state: + func.__kwdefaults__ = state['kwdefaults'] + # _cloudpickle_subimports is a set of submodules that must be loaded for + # the pickled function to work correctly at unpickling time. Now that these + # submodules are depickled (hence imported), they can be removed from the + # object's state (the object state only served as a reference holder to + # these submodules) + if '_cloudpickle_submodules' in state: + state.pop('_cloudpickle_submodules') + + cells = func.__closure__ + if cells is not None: + for cell, value in zip(cells, state['closure_values']): + if value is not _empty_cell_value: + cell_set(cell, value) + + return func + + +def _make_empty_cell(): + if False: + # trick the compiler into creating an empty cell in our lambda + cell = None + raise AssertionError('this route should not be executed') + + return (lambda: cell).__closure__[0] + + +def _make_cell(value=_empty_cell_value): + cell = _make_empty_cell() + if value is not _empty_cell_value: + cell_set(cell, value) + return cell + + +def _make_skel_func(code, cell_count, base_globals=None): + """ Creates a skeleton function object that contains just the provided + code and the correct number of cells in func_closure. All other + func attributes (e.g. func_globals) are empty. + """ + # This function is deprecated and should be removed in cloudpickle 1.7 + warnings.warn( + "A pickle file created using an old (<=1.4.1) version of cloudpicke " + "is currently being loaded. This is not supported by cloudpickle and " + "will break in cloudpickle 1.7", category=UserWarning + ) + # This is backward-compatibility code: for cloudpickle versions between + # 0.5.4 and 0.7, base_globals could be a string or None. base_globals + # should now always be a dictionary. + if base_globals is None or isinstance(base_globals, str): + base_globals = {} + + base_globals['__builtins__'] = __builtins__ + + closure = ( + tuple(_make_empty_cell() for _ in range(cell_count)) + if cell_count >= 0 else + None + ) + return types.FunctionType(code, base_globals, None, None, closure) + + +def _make_skeleton_class(type_constructor, name, bases, type_kwargs, + class_tracker_id, extra): + """Build dynamic class with an empty __dict__ to be filled once memoized + + If class_tracker_id is not None, try to lookup an existing class definition + matching that id. If none is found, track a newly reconstructed class + definition under that id so that other instances stemming from the same + class id will also reuse this class definition. + + The "extra" variable is meant to be a dict (or None) that can be used for + forward compatibility shall the need arise. + """ + skeleton_class = types.new_class( + name, bases, {'metaclass': type_constructor}, + lambda ns: ns.update(type_kwargs) + ) + return _lookup_class_or_track(class_tracker_id, skeleton_class) + + +def _rehydrate_skeleton_class(skeleton_class, class_dict): + """Put attributes from `class_dict` back on `skeleton_class`. + + See CloudPickler.save_dynamic_class for more info. + """ + registry = None + for attrname, attr in class_dict.items(): + if attrname == "_abc_impl": + registry = attr + else: + setattr(skeleton_class, attrname, attr) + if registry is not None: + for subclass in registry: + skeleton_class.register(subclass) + + return skeleton_class + + +def _make_skeleton_enum(bases, name, qualname, members, module, + class_tracker_id, extra): + """Build dynamic enum with an empty __dict__ to be filled once memoized + + The creation of the enum class is inspired by the code of + EnumMeta._create_. + + If class_tracker_id is not None, try to lookup an existing enum definition + matching that id. If none is found, track a newly reconstructed enum + definition under that id so that other instances stemming from the same + class id will also reuse this enum definition. + + The "extra" variable is meant to be a dict (or None) that can be used for + forward compatibility shall the need arise. + """ + # enums always inherit from their base Enum class at the last position in + # the list of base classes: + enum_base = bases[-1] + metacls = enum_base.__class__ + classdict = metacls.__prepare__(name, bases) + + for member_name, member_value in members.items(): + classdict[member_name] = member_value + enum_class = metacls.__new__(metacls, name, bases, classdict) + enum_class.__module__ = module + enum_class.__qualname__ = qualname + + return _lookup_class_or_track(class_tracker_id, enum_class) + + +def _make_typevar(name, bound, constraints, covariant, contravariant, + class_tracker_id): + tv = typing.TypeVar( + name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant + ) + if class_tracker_id is not None: + return _lookup_class_or_track(class_tracker_id, tv) + else: # pragma: nocover + # Only for Python 3.5.3 compat. + return tv + + +def _decompose_typevar(obj): + try: + class_tracker_id = _get_or_create_tracker_id(obj) + except TypeError: # pragma: nocover + # TypeVar instances are not weakref-able in Python 3.5.3 + class_tracker_id = None + return ( + obj.__name__, obj.__bound__, obj.__constraints__, + obj.__covariant__, obj.__contravariant__, + class_tracker_id, + ) + + +def _typevar_reduce(obj): + # TypeVar instances have no __qualname__ hence we pass the name explicitly. + module_and_name = _lookup_module_and_qualname(obj, name=obj.__name__) + if module_and_name is None: + return (_make_typevar, _decompose_typevar(obj)) + return (getattr, module_and_name) + + +def _get_bases(typ): + if hasattr(typ, '__orig_bases__'): + # For generic types (see PEP 560) + bases_attr = '__orig_bases__' + else: + # For regular class objects + bases_attr = '__bases__' + return getattr(typ, bases_attr) + + +def _make_dict_keys(obj): + return dict.fromkeys(obj).keys() + + +def _make_dict_values(obj): + return {i: _ for i, _ in enumerate(obj)}.values() + + +def _make_dict_items(obj): + return obj.items() diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/cloudpickle_fast.py b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/cloudpickle_fast.py new file mode 100644 index 0000000..fa8da0f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/cloudpickle_fast.py @@ -0,0 +1,770 @@ +""" +New, fast version of the CloudPickler. + +This new CloudPickler class can now extend the fast C Pickler instead of the +previous Python implementation of the Pickler class. Because this functionality +is only available for Python versions 3.8+, a lot of backward-compatibility +code is also removed. + +Note that the C Pickler sublassing API is CPython-specific. Therefore, some +guards present in cloudpickle.py that were written to handle PyPy specificities +are not present in cloudpickle_fast.py +""" +import _collections_abc +import abc +import copyreg +import io +import itertools +import logging +import sys +import struct +import types +import weakref +import typing + +from enum import Enum +from collections import ChainMap + +from .compat import pickle, Pickler +from .cloudpickle import ( + _extract_code_globals, _BUILTIN_TYPE_NAMES, DEFAULT_PROTOCOL, + _find_imported_submodules, _get_cell_contents, _is_importable, + _builtin_type, _get_or_create_tracker_id, _make_skeleton_class, + _make_skeleton_enum, _extract_class_dict, dynamic_subimport, subimport, + _typevar_reduce, _get_bases, _make_cell, _make_empty_cell, CellType, + _is_parametrized_type_hint, PYPY, cell_set, + parametrized_type_hint_getinitargs, _create_parametrized_type_hint, + builtin_code_type, + _make_dict_keys, _make_dict_values, _make_dict_items, +) + + +if pickle.HIGHEST_PROTOCOL >= 5 and not PYPY: + # Shorthands similar to pickle.dump/pickle.dumps + + def dump(obj, file, protocol=None, buffer_callback=None): + """Serialize obj as bytes streamed into file + + protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to + pickle.HIGHEST_PROTOCOL. This setting favors maximum communication + speed between processes running the same Python version. + + Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure + compatibility with older versions of Python. + """ + CloudPickler( + file, protocol=protocol, buffer_callback=buffer_callback + ).dump(obj) + + def dumps(obj, protocol=None, buffer_callback=None): + """Serialize obj as a string of bytes allocated in memory + + protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to + pickle.HIGHEST_PROTOCOL. This setting favors maximum communication + speed between processes running the same Python version. + + Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure + compatibility with older versions of Python. + """ + with io.BytesIO() as file: + cp = CloudPickler( + file, protocol=protocol, buffer_callback=buffer_callback + ) + cp.dump(obj) + return file.getvalue() + +else: + # Shorthands similar to pickle.dump/pickle.dumps + def dump(obj, file, protocol=None): + """Serialize obj as bytes streamed into file + + protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to + pickle.HIGHEST_PROTOCOL. This setting favors maximum communication + speed between processes running the same Python version. + + Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure + compatibility with older versions of Python. + """ + CloudPickler(file, protocol=protocol).dump(obj) + + def dumps(obj, protocol=None): + """Serialize obj as a string of bytes allocated in memory + + protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to + pickle.HIGHEST_PROTOCOL. This setting favors maximum communication + speed between processes running the same Python version. + + Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure + compatibility with older versions of Python. + """ + with io.BytesIO() as file: + cp = CloudPickler(file, protocol=protocol) + cp.dump(obj) + return file.getvalue() + + +load, loads = pickle.load, pickle.loads + + +# COLLECTION OF OBJECTS __getnewargs__-LIKE METHODS +# ------------------------------------------------- + +def _class_getnewargs(obj): + type_kwargs = {} + if "__slots__" in obj.__dict__: + type_kwargs["__slots__"] = obj.__slots__ + + __dict__ = obj.__dict__.get('__dict__', None) + if isinstance(__dict__, property): + type_kwargs['__dict__'] = __dict__ + + return (type(obj), obj.__name__, _get_bases(obj), type_kwargs, + _get_or_create_tracker_id(obj), None) + + +def _enum_getnewargs(obj): + members = dict((e.name, e.value) for e in obj) + return (obj.__bases__, obj.__name__, obj.__qualname__, members, + obj.__module__, _get_or_create_tracker_id(obj), None) + + +# COLLECTION OF OBJECTS RECONSTRUCTORS +# ------------------------------------ +def _file_reconstructor(retval): + return retval + + +# COLLECTION OF OBJECTS STATE GETTERS +# ----------------------------------- +def _function_getstate(func): + # - Put func's dynamic attributes (stored in func.__dict__) in state. These + # attributes will be restored at unpickling time using + # f.__dict__.update(state) + # - Put func's members into slotstate. Such attributes will be restored at + # unpickling time by iterating over slotstate and calling setattr(func, + # slotname, slotvalue) + slotstate = { + "__name__": func.__name__, + "__qualname__": func.__qualname__, + "__annotations__": func.__annotations__, + "__kwdefaults__": func.__kwdefaults__, + "__defaults__": func.__defaults__, + "__module__": func.__module__, + "__doc__": func.__doc__, + "__closure__": func.__closure__, + } + + f_globals_ref = _extract_code_globals(func.__code__) + f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in + func.__globals__} + + closure_values = ( + list(map(_get_cell_contents, func.__closure__)) + if func.__closure__ is not None else () + ) + + # Extract currently-imported submodules used by func. Storing these modules + # in a smoke _cloudpickle_subimports attribute of the object's state will + # trigger the side effect of importing these modules at unpickling time + # (which is necessary for func to work correctly once depickled) + slotstate["_cloudpickle_submodules"] = _find_imported_submodules( + func.__code__, itertools.chain(f_globals.values(), closure_values)) + slotstate["__globals__"] = f_globals + + state = func.__dict__ + return state, slotstate + + +def _class_getstate(obj): + clsdict = _extract_class_dict(obj) + clsdict.pop('__weakref__', None) + + if issubclass(type(obj), abc.ABCMeta): + # If obj is an instance of an ABCMeta subclass, dont pickle the + # cache/negative caches populated during isinstance/issubclass + # checks, but pickle the list of registered subclasses of obj. + clsdict.pop('_abc_cache', None) + clsdict.pop('_abc_negative_cache', None) + clsdict.pop('_abc_negative_cache_version', None) + registry = clsdict.pop('_abc_registry', None) + if registry is None: + # in Python3.7+, the abc caches and registered subclasses of a + # class are bundled into the single _abc_impl attribute + clsdict.pop('_abc_impl', None) + (registry, _, _, _) = abc._get_dump(obj) + + clsdict["_abc_impl"] = [subclass_weakref() + for subclass_weakref in registry] + else: + # In the above if clause, registry is a set of weakrefs -- in + # this case, registry is a WeakSet + clsdict["_abc_impl"] = [type_ for type_ in registry] + + if "__slots__" in clsdict: + # pickle string length optimization: member descriptors of obj are + # created automatically from obj's __slots__ attribute, no need to + # save them in obj's state + if isinstance(obj.__slots__, str): + clsdict.pop(obj.__slots__) + else: + for k in obj.__slots__: + clsdict.pop(k, None) + + clsdict.pop('__dict__', None) # unpicklable property object + + return (clsdict, {}) + + +def _enum_getstate(obj): + clsdict, slotstate = _class_getstate(obj) + + members = dict((e.name, e.value) for e in obj) + # Cleanup the clsdict that will be passed to _rehydrate_skeleton_class: + # Those attributes are already handled by the metaclass. + for attrname in ["_generate_next_value_", "_member_names_", + "_member_map_", "_member_type_", + "_value2member_map_"]: + clsdict.pop(attrname, None) + for member in members: + clsdict.pop(member) + # Special handling of Enum subclasses + return clsdict, slotstate + + +# COLLECTIONS OF OBJECTS REDUCERS +# ------------------------------- +# A reducer is a function taking a single argument (obj), and that returns a +# tuple with all the necessary data to re-construct obj. Apart from a few +# exceptions (list, dict, bytes, int, etc.), a reducer is necessary to +# correctly pickle an object. +# While many built-in objects (Exceptions objects, instances of the "object" +# class, etc), are shipped with their own built-in reducer (invoked using +# obj.__reduce__), some do not. The following methods were created to "fill +# these holes". + +def _code_reduce(obj): + """codeobject reducer""" + if hasattr(obj, "co_posonlyargcount"): # pragma: no branch + args = ( + obj.co_argcount, obj.co_posonlyargcount, + obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, + obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, + obj.co_varnames, obj.co_filename, obj.co_name, + obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, + obj.co_cellvars + ) + else: + args = ( + obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, + obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, + obj.co_names, obj.co_varnames, obj.co_filename, + obj.co_name, obj.co_firstlineno, obj.co_lnotab, + obj.co_freevars, obj.co_cellvars + ) + return types.CodeType, args + + +def _cell_reduce(obj): + """Cell (containing values of a function's free variables) reducer""" + try: + obj.cell_contents + except ValueError: # cell is empty + return _make_empty_cell, () + else: + return _make_cell, (obj.cell_contents, ) + + +def _classmethod_reduce(obj): + orig_func = obj.__func__ + return type(obj), (orig_func,) + + +def _file_reduce(obj): + """Save a file""" + import io + + if not hasattr(obj, "name") or not hasattr(obj, "mode"): + raise pickle.PicklingError( + "Cannot pickle files that do not map to an actual file" + ) + if obj is sys.stdout: + return getattr, (sys, "stdout") + if obj is sys.stderr: + return getattr, (sys, "stderr") + if obj is sys.stdin: + raise pickle.PicklingError("Cannot pickle standard input") + if obj.closed: + raise pickle.PicklingError("Cannot pickle closed files") + if hasattr(obj, "isatty") and obj.isatty(): + raise pickle.PicklingError( + "Cannot pickle files that map to tty objects" + ) + if "r" not in obj.mode and "+" not in obj.mode: + raise pickle.PicklingError( + "Cannot pickle files that are not opened for reading: %s" + % obj.mode + ) + + name = obj.name + + retval = io.StringIO() + + try: + # Read the whole file + curloc = obj.tell() + obj.seek(0) + contents = obj.read() + obj.seek(curloc) + except IOError as e: + raise pickle.PicklingError( + "Cannot pickle file %s as it cannot be read" % name + ) from e + retval.write(contents) + retval.seek(curloc) + + retval.name = name + return _file_reconstructor, (retval,) + + +def _getset_descriptor_reduce(obj): + return getattr, (obj.__objclass__, obj.__name__) + + +def _mappingproxy_reduce(obj): + return types.MappingProxyType, (dict(obj),) + + +def _memoryview_reduce(obj): + return bytes, (obj.tobytes(),) + + +def _module_reduce(obj): + if _is_importable(obj): + return subimport, (obj.__name__,) + else: + obj.__dict__.pop('__builtins__', None) + return dynamic_subimport, (obj.__name__, vars(obj)) + + +def _method_reduce(obj): + return (types.MethodType, (obj.__func__, obj.__self__)) + + +def _logger_reduce(obj): + return logging.getLogger, (obj.name,) + + +def _root_logger_reduce(obj): + return logging.getLogger, () + + +def _property_reduce(obj): + return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__) + + +def _weakset_reduce(obj): + return weakref.WeakSet, (list(obj),) + + +def _dynamic_class_reduce(obj): + """ + Save a class that can't be stored as module global. + + This method is used to serialize classes that are defined inside + functions, or that otherwise can't be serialized as attribute lookups + from global modules. + """ + if Enum is not None and issubclass(obj, Enum): + return ( + _make_skeleton_enum, _enum_getnewargs(obj), _enum_getstate(obj), + None, None, _class_setstate + ) + else: + return ( + _make_skeleton_class, _class_getnewargs(obj), _class_getstate(obj), + None, None, _class_setstate + ) + + +def _class_reduce(obj): + """Select the reducer depending on the dynamic nature of the class obj""" + if obj is type(None): # noqa + return type, (None,) + elif obj is type(Ellipsis): + return type, (Ellipsis,) + elif obj is type(NotImplemented): + return type, (NotImplemented,) + elif obj in _BUILTIN_TYPE_NAMES: + return _builtin_type, (_BUILTIN_TYPE_NAMES[obj],) + elif not _is_importable(obj): + return _dynamic_class_reduce(obj) + return NotImplemented + + +def _dict_keys_reduce(obj): + # Safer not to ship the full dict as sending the rest might + # be unintended and could potentially cause leaking of + # sensitive information + return _make_dict_keys, (list(obj), ) + + +def _dict_values_reduce(obj): + # Safer not to ship the full dict as sending the rest might + # be unintended and could potentially cause leaking of + # sensitive information + return _make_dict_values, (list(obj), ) + + +def _dict_items_reduce(obj): + return _make_dict_items, (dict(obj), ) + + +# COLLECTIONS OF OBJECTS STATE SETTERS +# ------------------------------------ +# state setters are called at unpickling time, once the object is created and +# it has to be updated to how it was at unpickling time. + + +def _function_setstate(obj, state): + """Update the state of a dynaamic function. + + As __closure__ and __globals__ are readonly attributes of a function, we + cannot rely on the native setstate routine of pickle.load_build, that calls + setattr on items of the slotstate. Instead, we have to modify them inplace. + """ + state, slotstate = state + obj.__dict__.update(state) + + obj_globals = slotstate.pop("__globals__") + obj_closure = slotstate.pop("__closure__") + # _cloudpickle_subimports is a set of submodules that must be loaded for + # the pickled function to work correctly at unpickling time. Now that these + # submodules are depickled (hence imported), they can be removed from the + # object's state (the object state only served as a reference holder to + # these submodules) + slotstate.pop("_cloudpickle_submodules") + + obj.__globals__.update(obj_globals) + obj.__globals__["__builtins__"] = __builtins__ + + if obj_closure is not None: + for i, cell in enumerate(obj_closure): + try: + value = cell.cell_contents + except ValueError: # cell is empty + continue + cell_set(obj.__closure__[i], value) + + for k, v in slotstate.items(): + setattr(obj, k, v) + + +def _class_setstate(obj, state): + state, slotstate = state + registry = None + for attrname, attr in state.items(): + if attrname == "_abc_impl": + registry = attr + else: + setattr(obj, attrname, attr) + if registry is not None: + for subclass in registry: + obj.register(subclass) + + return obj + + +class CloudPickler(Pickler): + # set of reducers defined and used by cloudpickle (private) + _dispatch_table = {} + _dispatch_table[classmethod] = _classmethod_reduce + _dispatch_table[io.TextIOWrapper] = _file_reduce + _dispatch_table[logging.Logger] = _logger_reduce + _dispatch_table[logging.RootLogger] = _root_logger_reduce + _dispatch_table[memoryview] = _memoryview_reduce + _dispatch_table[property] = _property_reduce + _dispatch_table[staticmethod] = _classmethod_reduce + _dispatch_table[CellType] = _cell_reduce + _dispatch_table[types.CodeType] = _code_reduce + _dispatch_table[types.GetSetDescriptorType] = _getset_descriptor_reduce + _dispatch_table[types.ModuleType] = _module_reduce + _dispatch_table[types.MethodType] = _method_reduce + _dispatch_table[types.MappingProxyType] = _mappingproxy_reduce + _dispatch_table[weakref.WeakSet] = _weakset_reduce + _dispatch_table[typing.TypeVar] = _typevar_reduce + _dispatch_table[_collections_abc.dict_keys] = _dict_keys_reduce + _dispatch_table[_collections_abc.dict_values] = _dict_values_reduce + _dispatch_table[_collections_abc.dict_items] = _dict_items_reduce + + + dispatch_table = ChainMap(_dispatch_table, copyreg.dispatch_table) + + # function reducers are defined as instance methods of CloudPickler + # objects, as they rely on a CloudPickler attribute (globals_ref) + def _dynamic_function_reduce(self, func): + """Reduce a function that is not pickleable via attribute lookup.""" + newargs = self._function_getnewargs(func) + state = _function_getstate(func) + return (types.FunctionType, newargs, state, None, None, + _function_setstate) + + def _function_reduce(self, obj): + """Reducer for function objects. + + If obj is a top-level attribute of a file-backed module, this + reducer returns NotImplemented, making the CloudPickler fallback to + traditional _pickle.Pickler routines to save obj. Otherwise, it reduces + obj using a custom cloudpickle reducer designed specifically to handle + dynamic functions. + + As opposed to cloudpickle.py, There no special handling for builtin + pypy functions because cloudpickle_fast is CPython-specific. + """ + if _is_importable(obj): + return NotImplemented + else: + return self._dynamic_function_reduce(obj) + + def _function_getnewargs(self, func): + code = func.__code__ + + # base_globals represents the future global namespace of func at + # unpickling time. Looking it up and storing it in + # CloudpiPickler.globals_ref allow functions sharing the same globals + # at pickling time to also share them once unpickled, at one condition: + # since globals_ref is an attribute of a CloudPickler instance, and + # that a new CloudPickler is created each time pickle.dump or + # pickle.dumps is called, functions also need to be saved within the + # same invocation of cloudpickle.dump/cloudpickle.dumps (for example: + # cloudpickle.dumps([f1, f2])). There is no such limitation when using + # CloudPickler.dump, as long as the multiple invocations are bound to + # the same CloudPickler. + base_globals = self.globals_ref.setdefault(id(func.__globals__), {}) + + if base_globals == {}: + # Add module attributes used to resolve relative imports + # instructions inside func. + for k in ["__package__", "__name__", "__path__", "__file__"]: + if k in func.__globals__: + base_globals[k] = func.__globals__[k] + + # Do not bind the free variables before the function is created to + # avoid infinite recursion. + if func.__closure__ is None: + closure = None + else: + closure = tuple( + _make_empty_cell() for _ in range(len(code.co_freevars))) + + return code, base_globals, None, None, closure + + def dump(self, obj): + try: + return Pickler.dump(self, obj) + except RuntimeError as e: + if "recursion" in e.args[0]: + msg = ( + "Could not pickle object as excessively deep recursion " + "required." + ) + raise pickle.PicklingError(msg) from e + else: + raise + + if pickle.HIGHEST_PROTOCOL >= 5: + # `CloudPickler.dispatch` is only left for backward compatibility - note + # that when using protocol 5, `CloudPickler.dispatch` is not an + # extension of `Pickler.dispatch` dictionary, because CloudPickler + # subclasses the C-implemented Pickler, which does not expose a + # `dispatch` attribute. Earlier versions of the protocol 5 CloudPickler + # used `CloudPickler.dispatch` as a class-level attribute storing all + # reducers implemented by cloudpickle, but the attribute name was not a + # great choice given the meaning of `Cloudpickler.dispatch` when + # `CloudPickler` extends the pure-python pickler. + dispatch = dispatch_table + + # Implementation of the reducer_override callback, in order to + # efficiently serialize dynamic functions and classes by subclassing + # the C-implemented Pickler. + # TODO: decorrelate reducer_override (which is tied to CPython's + # implementation - would it make sense to backport it to pypy? - and + # pickle's protocol 5 which is implementation agnostic. Currently, the + # availability of both notions coincide on CPython's pickle and the + # pickle5 backport, but it may not be the case anymore when pypy + # implements protocol 5 + def __init__(self, file, protocol=None, buffer_callback=None): + if protocol is None: + protocol = DEFAULT_PROTOCOL + Pickler.__init__( + self, file, protocol=protocol, buffer_callback=buffer_callback + ) + # map functions __globals__ attribute ids, to ensure that functions + # sharing the same global namespace at pickling time also share + # their global namespace at unpickling time. + self.globals_ref = {} + self.proto = int(protocol) + + def reducer_override(self, obj): + """Type-agnostic reducing callback for function and classes. + + For performance reasons, subclasses of the C _pickle.Pickler class + cannot register custom reducers for functions and classes in the + dispatch_table. Reducer for such types must instead implemented in + the special reducer_override method. + + Note that method will be called for any object except a few + builtin-types (int, lists, dicts etc.), which differs from reducers + in the Pickler's dispatch_table, each of them being invoked for + objects of a specific type only. + + This property comes in handy for classes: although most classes are + instances of the ``type`` metaclass, some of them can be instances + of other custom metaclasses (such as enum.EnumMeta for example). In + particular, the metaclass will likely not be known in advance, and + thus cannot be special-cased using an entry in the dispatch_table. + reducer_override, among other things, allows us to register a + reducer that will be called for any class, independently of its + type. + + + Notes: + + * reducer_override has the priority over dispatch_table-registered + reducers. + * reducer_override can be used to fix other limitations of + cloudpickle for other types that suffered from type-specific + reducers, such as Exceptions. See + https://github.com/cloudpipe/cloudpickle/issues/248 + """ + if sys.version_info[:2] < (3, 7) and _is_parametrized_type_hint(obj): # noqa # pragma: no branch + return ( + _create_parametrized_type_hint, + parametrized_type_hint_getinitargs(obj) + ) + t = type(obj) + try: + is_anyclass = issubclass(t, type) + except TypeError: # t is not a class (old Boost; see SF #502085) + is_anyclass = False + + if is_anyclass: + return _class_reduce(obj) + elif isinstance(obj, types.FunctionType): + return self._function_reduce(obj) + else: + # fallback to save_global, including the Pickler's + # distpatch_table + return NotImplemented + + else: + # When reducer_override is not available, hack the pure-Python + # Pickler's types.FunctionType and type savers. Note: the type saver + # must override Pickler.save_global, because pickle.py contains a + # hard-coded call to save_global when pickling meta-classes. + dispatch = Pickler.dispatch.copy() + + def __init__(self, file, protocol=None): + if protocol is None: + protocol = DEFAULT_PROTOCOL + Pickler.__init__(self, file, protocol=protocol) + # map functions __globals__ attribute ids, to ensure that functions + # sharing the same global namespace at pickling time also share + # their global namespace at unpickling time. + self.globals_ref = {} + assert hasattr(self, 'proto') + + def _save_reduce_pickle5(self, func, args, state=None, listitems=None, + dictitems=None, state_setter=None, obj=None): + save = self.save + write = self.write + self.save_reduce( + func, args, state=None, listitems=listitems, + dictitems=dictitems, obj=obj + ) + # backport of the Python 3.8 state_setter pickle operations + save(state_setter) + save(obj) # simple BINGET opcode as obj is already memoized. + save(state) + write(pickle.TUPLE2) + # Trigger a state_setter(obj, state) function call. + write(pickle.REDUCE) + # The purpose of state_setter is to carry-out an + # inplace modification of obj. We do not care about what the + # method might return, so its output is eventually removed from + # the stack. + write(pickle.POP) + + def save_global(self, obj, name=None, pack=struct.pack): + """ + Save a "global". + + The name of this method is somewhat misleading: all types get + dispatched here. + """ + if obj is type(None): # noqa + return self.save_reduce(type, (None,), obj=obj) + elif obj is type(Ellipsis): + return self.save_reduce(type, (Ellipsis,), obj=obj) + elif obj is type(NotImplemented): + return self.save_reduce(type, (NotImplemented,), obj=obj) + elif obj in _BUILTIN_TYPE_NAMES: + return self.save_reduce( + _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj) + + if sys.version_info[:2] < (3, 7) and _is_parametrized_type_hint(obj): # noqa # pragma: no branch + # Parametrized typing constructs in Python < 3.7 are not + # compatible with type checks and ``isinstance`` semantics. For + # this reason, it is easier to detect them using a + # duck-typing-based check (``_is_parametrized_type_hint``) than + # to populate the Pickler's dispatch with type-specific savers. + self.save_reduce( + _create_parametrized_type_hint, + parametrized_type_hint_getinitargs(obj), + obj=obj + ) + elif name is not None: + Pickler.save_global(self, obj, name=name) + elif not _is_importable(obj, name=name): + self._save_reduce_pickle5(*_dynamic_class_reduce(obj), obj=obj) + else: + Pickler.save_global(self, obj, name=name) + dispatch[type] = save_global + + def save_function(self, obj, name=None): + """ Registered with the dispatch to handle all function types. + + Determines what kind of function obj is (e.g. lambda, defined at + interactive prompt, etc) and handles the pickling appropriately. + """ + if _is_importable(obj, name=name): + return Pickler.save_global(self, obj, name=name) + elif PYPY and isinstance(obj.__code__, builtin_code_type): + return self.save_pypy_builtin_func(obj) + else: + return self._save_reduce_pickle5( + *self._dynamic_function_reduce(obj), obj=obj + ) + + def save_pypy_builtin_func(self, obj): + """Save pypy equivalent of builtin functions. + PyPy does not have the concept of builtin-functions. Instead, + builtin-functions are simple function instances, but with a + builtin-code attribute. + Most of the time, builtin functions should be pickled by attribute. + But PyPy has flaky support for __qualname__, so some builtin + functions such as float.__new__ will be classified as dynamic. For + this reason only, we created this special routine. Because + builtin-functions are not expected to have closure or globals, + there is no additional hack (compared the one already implemented + in pickle) to protect ourselves from reference cycles. A simple + (reconstructor, newargs, obj.__dict__) tuple is save_reduced. Note + also that PyPy improved their support for __qualname__ in v3.6, so + this routing should be removed when cloudpickle supports only PyPy + 3.6 and later. + """ + rv = (types.FunctionType, (obj.__code__, {}, obj.__name__, + obj.__defaults__, obj.__closure__), + obj.__dict__) + self.save_reduce(*rv, obj=obj) + + dispatch[types.FunctionType] = save_function diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/compat.py b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/compat.py new file mode 100644 index 0000000..afa285f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/cloudpickle/compat.py @@ -0,0 +1,13 @@ +import sys + + +if sys.version_info < (3, 8): + try: + import pickle5 as pickle # noqa: F401 + from pickle5 import Pickler # noqa: F401 + except ImportError: + import pickle # noqa: F401 + from pickle import _Pickler as Pickler # noqa: F401 +else: + import pickle # noqa: F401 + from _pickle import Pickler # noqa: F401 diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__init__.py new file mode 100644 index 0000000..21f3bb6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__init__.py @@ -0,0 +1,25 @@ +r"""The :mod:`loky` module manages a pool of worker that can be re-used across time. +It provides a robust and dynamic implementation os the +:class:`ProcessPoolExecutor` and a function :func:`get_reusable_executor` which +hide the pool management under the hood. +""" +from ._base import Executor, Future +from ._base import wait, as_completed +from ._base import TimeoutError, CancelledError +from ._base import ALL_COMPLETED, FIRST_COMPLETED, FIRST_EXCEPTION + +from .backend.context import cpu_count +from .backend.reduction import set_loky_pickler +from .reusable_executor import get_reusable_executor +from .cloudpickle_wrapper import wrap_non_picklable_objects +from .process_executor import BrokenProcessPool, ProcessPoolExecutor + + +__all__ = ["get_reusable_executor", "cpu_count", "wait", "as_completed", + "Future", "Executor", "ProcessPoolExecutor", + "BrokenProcessPool", "CancelledError", "TimeoutError", + "FIRST_COMPLETED", "FIRST_EXCEPTION", "ALL_COMPLETED", + "wrap_non_picklable_objects", "set_loky_pickler"] + + +__version__ = '2.9.0' diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..e19ee06 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/_base.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/_base.cpython-36.pyc new file mode 100644 index 0000000..d10d227 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/_base.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-36.pyc new file mode 100644 index 0000000..56726aa Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/cloudpickle_wrapper.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-36.pyc new file mode 100644 index 0000000..3dc51be Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/process_executor.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-36.pyc new file mode 100644 index 0000000..dced741 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/__pycache__/reusable_executor.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/_base.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/_base.py new file mode 100644 index 0000000..92422bb --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/_base.py @@ -0,0 +1,627 @@ +############################################################################### +# Backport concurrent.futures for python2.7/3.3 +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from concurrent/futures/_base.py (17/02/2017) +# * Do not use yield from +# * Use old super syntax +# +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +import sys +import time +import logging +import threading +import collections + + +if sys.version_info[:2] >= (3, 3): + + from concurrent.futures import wait, as_completed + from concurrent.futures import TimeoutError, CancelledError + from concurrent.futures import Executor, Future as _BaseFuture + + from concurrent.futures import FIRST_EXCEPTION + from concurrent.futures import ALL_COMPLETED, FIRST_COMPLETED + + from concurrent.futures._base import LOGGER + from concurrent.futures._base import PENDING, RUNNING, CANCELLED + from concurrent.futures._base import CANCELLED_AND_NOTIFIED, FINISHED +else: + + FIRST_COMPLETED = 'FIRST_COMPLETED' + FIRST_EXCEPTION = 'FIRST_EXCEPTION' + ALL_COMPLETED = 'ALL_COMPLETED' + _AS_COMPLETED = '_AS_COMPLETED' + + # Possible future states (for internal use by the futures package). + PENDING = 'PENDING' + RUNNING = 'RUNNING' + # The future was cancelled by the user... + CANCELLED = 'CANCELLED' + # ...and _Waiter.add_cancelled() was called by a worker. + CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED' + FINISHED = 'FINISHED' + + _FUTURE_STATES = [ + PENDING, + RUNNING, + CANCELLED, + CANCELLED_AND_NOTIFIED, + FINISHED + ] + + _STATE_TO_DESCRIPTION_MAP = { + PENDING: "pending", + RUNNING: "running", + CANCELLED: "cancelled", + CANCELLED_AND_NOTIFIED: "cancelled", + FINISHED: "finished" + } + + # Logger for internal use by the futures package. + LOGGER = logging.getLogger("concurrent.futures") + + class Error(Exception): + """Base class for all future-related exceptions.""" + pass + + class CancelledError(Error): + """The Future was cancelled.""" + pass + + class TimeoutError(Error): + """The operation exceeded the given deadline.""" + pass + + class _Waiter(object): + """Provides the event that wait() and as_completed() block on.""" + def __init__(self): + self.event = threading.Event() + self.finished_futures = [] + + def add_result(self, future): + self.finished_futures.append(future) + + def add_exception(self, future): + self.finished_futures.append(future) + + def add_cancelled(self, future): + self.finished_futures.append(future) + + class _AsCompletedWaiter(_Waiter): + """Used by as_completed().""" + + def __init__(self): + super(_AsCompletedWaiter, self).__init__() + self.lock = threading.Lock() + + def add_result(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_result(future) + self.event.set() + + def add_exception(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_exception(future) + self.event.set() + + def add_cancelled(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_cancelled(future) + self.event.set() + + class _FirstCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_COMPLETED).""" + + def add_result(self, future): + super(_FirstCompletedWaiter, self).add_result(future) + self.event.set() + + def add_exception(self, future): + super(_FirstCompletedWaiter, self).add_exception(future) + self.event.set() + + def add_cancelled(self, future): + super(_FirstCompletedWaiter, self).add_cancelled(future) + self.event.set() + + class _AllCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" + + def __init__(self, num_pending_calls, stop_on_exception): + self.num_pending_calls = num_pending_calls + self.stop_on_exception = stop_on_exception + self.lock = threading.Lock() + super(_AllCompletedWaiter, self).__init__() + + def _decrement_pending_calls(self): + with self.lock: + self.num_pending_calls -= 1 + if not self.num_pending_calls: + self.event.set() + + def add_result(self, future): + super(_AllCompletedWaiter, self).add_result(future) + self._decrement_pending_calls() + + def add_exception(self, future): + super(_AllCompletedWaiter, self).add_exception(future) + if self.stop_on_exception: + self.event.set() + else: + self._decrement_pending_calls() + + def add_cancelled(self, future): + super(_AllCompletedWaiter, self).add_cancelled(future) + self._decrement_pending_calls() + + class _AcquireFutures(object): + """A context manager that does an ordered acquire of Future conditions. + """ + + def __init__(self, futures): + self.futures = sorted(futures, key=id) + + def __enter__(self): + for future in self.futures: + future._condition.acquire() + + def __exit__(self, *args): + for future in self.futures: + future._condition.release() + + def _create_and_install_waiters(fs, return_when): + if return_when == _AS_COMPLETED: + waiter = _AsCompletedWaiter() + elif return_when == FIRST_COMPLETED: + waiter = _FirstCompletedWaiter() + else: + pending_count = sum( + f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] + for f in fs) + + if return_when == FIRST_EXCEPTION: + waiter = _AllCompletedWaiter(pending_count, + stop_on_exception=True) + elif return_when == ALL_COMPLETED: + waiter = _AllCompletedWaiter(pending_count, + stop_on_exception=False) + else: + raise ValueError("Invalid return condition: %r" % return_when) + + for f in fs: + f._waiters.append(waiter) + + return waiter + + def as_completed(fs, timeout=None): + """An iterator over the given futures that yields each as it completes. + + Args: + fs: The sequence of Futures (possibly created by different + Executors) to iterate over. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + + Returns: + An iterator that yields the given Futures as they complete + (finished or cancelled). If any given Futures are duplicated, they + will be returned once. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + """ + if timeout is not None: + end_time = timeout + time.time() + + fs = set(fs) + with _AcquireFutures(fs): + finished = set( + f for f in fs + if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) + pending = fs - finished + waiter = _create_and_install_waiters(fs, _AS_COMPLETED) + + try: + for future in finished: + yield future + + while pending: + if timeout is None: + wait_timeout = None + else: + wait_timeout = end_time - time.time() + if wait_timeout < 0: + raise TimeoutError('%d (of %d) futures unfinished' % ( + len(pending), len(fs))) + + waiter.event.wait(wait_timeout) + + with waiter.lock: + finished = waiter.finished_futures + waiter.finished_futures = [] + waiter.event.clear() + + for future in finished: + yield future + pending.remove(future) + + finally: + for f in fs: + with f._condition: + f._waiters.remove(waiter) + + DoneAndNotDoneFutures = collections.namedtuple( + 'DoneAndNotDoneFutures', 'done not_done') + + def wait(fs, timeout=None, return_when=ALL_COMPLETED): + """Wait for the futures in the given sequence to complete. + + Args: + fs: The sequence of Futures (possibly created by different + Executors) to wait upon. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + return_when: Indicates when this function should return. The + options are: + + FIRST_COMPLETED - Return when any future finishes or is + cancelled. + FIRST_EXCEPTION - Return when any future finishes by raising an + exception. If no future raises an exception + then it is equivalent to ALL_COMPLETED. + ALL_COMPLETED - Return when all futures finish or are + cancelled. + + Returns: + A named 2-tuple of sets. The first set, named 'done', contains the + futures that completed (is finished or cancelled) before the wait + completed. The second set, named 'not_done', contains uncompleted + futures. + """ + with _AcquireFutures(fs): + done = set(f for f in fs + if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) + not_done = set(fs) - done + + if (return_when == FIRST_COMPLETED) and done: + return DoneAndNotDoneFutures(done, not_done) + elif (return_when == FIRST_EXCEPTION) and done: + if any(f for f in done + if not f.cancelled() and f.exception() is not None): + return DoneAndNotDoneFutures(done, not_done) + + if len(done) == len(fs): + return DoneAndNotDoneFutures(done, not_done) + + waiter = _create_and_install_waiters(fs, return_when) + + waiter.event.wait(timeout) + for f in fs: + with f._condition: + f._waiters.remove(waiter) + + done.update(waiter.finished_futures) + return DoneAndNotDoneFutures(done, set(fs) - done) + + class _BaseFuture(object): + """Represents the result of an asynchronous computation.""" + + def __init__(self): + """Initializes the future. Should not be called by clients.""" + self._condition = threading.Condition() + self._state = PENDING + self._result = None + self._exception = None + self._waiters = [] + self._done_callbacks = [] + + def __repr__(self): + with self._condition: + if self._state == FINISHED: + if self._exception: + return '<%s at %#x state=%s raised %s>' % ( + self.__class__.__name__, + id(self), + _STATE_TO_DESCRIPTION_MAP[self._state], + self._exception.__class__.__name__) + else: + return '<%s at %#x state=%s returned %s>' % ( + self.__class__.__name__, + id(self), + _STATE_TO_DESCRIPTION_MAP[self._state], + self._result.__class__.__name__) + return '<%s at %#x state=%s>' % ( + self.__class__.__name__, + id(self), + _STATE_TO_DESCRIPTION_MAP[self._state]) + + def cancel(self): + """Cancel the future if possible. + + Returns True if the future was cancelled, False otherwise. A future + cannot be cancelled if it is running or has already completed. + """ + with self._condition: + if self._state in [RUNNING, FINISHED]: + return False + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + return True + + self._state = CANCELLED + self._condition.notify_all() + + self._invoke_callbacks() + return True + + def cancelled(self): + """Return True if the future was cancelled.""" + with self._condition: + return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED] + + def running(self): + """Return True if the future is currently executing.""" + with self._condition: + return self._state == RUNNING + + def done(self): + """Return True of the future was cancelled or finished executing. + """ + with self._condition: + return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, + FINISHED] + + def __get_result(self): + if self._exception: + raise self._exception + else: + return self._result + + def add_done_callback(self, fn): + """Attaches a callable that will be called when the future finishes. + + Args: + fn: A callable that will be called with this future as its only + argument when the future completes or is cancelled. The + callable will always be called by a thread in the same + process in which it was added. If the future has already + completed or been cancelled then the callable will be + called immediately. These callables are called in the order + that they were added. + """ + with self._condition: + if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, + FINISHED]: + self._done_callbacks.append(fn) + return + fn(self) + + def result(self, timeout=None): + """Return the result of the call that the future represents. + + Args: + timeout: The number of seconds to wait for the result if the + future isn't done. If None, then there is no limit on the + wait time. + + Returns: + The result of the call that the future represents. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the + given timeout. + Exception: If the call raised then that exception will be + raised. + """ + with self._condition: + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self.__get_result() + + self._condition.wait(timeout) + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self.__get_result() + else: + raise TimeoutError() + + def exception(self, timeout=None): + """Return the exception raised by the call that the future + represents. + + Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the + wait time. + + Returns: + The exception raised by the call that the future represents or + None if the call completed without raising. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the + given timeout. + """ + + with self._condition: + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self._exception + + self._condition.wait(timeout) + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self._exception + else: + raise TimeoutError() + + # The following methods should only be used by Executors and in tests. + def set_running_or_notify_cancel(self): + """Mark the future as running or process any cancel notifications. + + Should only be used by Executor implementations and unit tests. + + If the future has been cancelled (cancel() was called and returned + True) then any threads waiting on the future completing (though + calls to as_completed() or wait()) are notified and False is + returned. + + If the future was not cancelled then it is put in the running state + (future calls to running() will return True) and True is returned. + + This method should be called by Executor implementations before + executing the work associated with this future. If this method + returns False then the work should not be executed. + + Returns: + False if the Future was cancelled, True otherwise. + + Raises: + RuntimeError: if this method was already called or if + set_result() or set_exception() was called. + """ + with self._condition: + if self._state == CANCELLED: + self._state = CANCELLED_AND_NOTIFIED + for waiter in self._waiters: + waiter.add_cancelled(self) + # self._condition.notify_all() is not necessary because + # self.cancel() triggers a notification. + return False + elif self._state == PENDING: + self._state = RUNNING + return True + else: + LOGGER.critical('Future %s in unexpected state: %s', + id(self), + self._state) + raise RuntimeError('Future in unexpected state') + + def set_result(self, result): + """Sets the return value of work associated with the future. + + Should only be used by Executor implementations and unit tests. + """ + with self._condition: + self._result = result + self._state = FINISHED + for waiter in self._waiters: + waiter.add_result(self) + self._condition.notify_all() + self._invoke_callbacks() + + def set_exception(self, exception): + """Sets the result of the future as being the given exception. + + Should only be used by Executor implementations and unit tests. + """ + with self._condition: + self._exception = exception + self._state = FINISHED + for waiter in self._waiters: + waiter.add_exception(self) + self._condition.notify_all() + self._invoke_callbacks() + + class Executor(object): + """This is an abstract base class for concrete asynchronous executors. + """ + + def submit(self, fn, *args, **kwargs): + """Submits a callable to be executed with the given arguments. + + Schedules the callable to be executed as fn(*args, **kwargs) and + returns a Future instance representing the execution of the + callable. + + Returns: + A Future representing the given call. + """ + raise NotImplementedError() + + def map(self, fn, *iterables, **kwargs): + """Returns an iterator equivalent to map(fn, iter). + + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then + there is no limit on the wait time. + chunksize: The size of the chunks the iterable will be broken + into before being passed to a child process. This argument + is only used by ProcessPoolExecutor; it is ignored by + ThreadPoolExecutor. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls + may be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be + generated before the given timeout. + Exception: If fn(*args) raises for any values. + """ + timeout = kwargs.get('timeout') + if timeout is not None: + end_time = timeout + time.time() + + fs = [self.submit(fn, *args) for args in zip(*iterables)] + + # Yield must be hidden in closure so that the futures are submitted + # before the first iterator value is required. + def result_iterator(): + try: + for future in fs: + if timeout is None: + yield future.result() + else: + yield future.result(end_time - time.time()) + finally: + for future in fs: + future.cancel() + return result_iterator() + + def shutdown(self, wait=True): + """Clean-up the resources associated with the Executor. + + It is safe to call this method several times. Otherwise, no other + methods can be called after this one. + + Args: + wait: If True then shutdown will not return until all running + futures have finished executing and the resources used by + the executor have been reclaimed. + """ + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.shutdown(wait=True) + return False + + +# To make loky._base.Future instances awaitable by concurrent.futures.wait, +# derive our custom Future class from _BaseFuture. _invoke_callback is the only +# modification made to this class in loky. +class Future(_BaseFuture): + def _invoke_callbacks(self): + for callback in self._done_callbacks: + try: + callback(self) + except BaseException: + LOGGER.exception('exception calling callback for %r', self) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__init__.py new file mode 100644 index 0000000..a65ce0e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__init__.py @@ -0,0 +1,16 @@ +import os +import sys + +from .context import get_context + +if sys.version_info > (3, 4): + + def _make_name(): + name = '/loky-%i-%s' % (os.getpid(), next(synchronize.SemLock._rand)) + return name + + # monkey patch the name creation for multiprocessing + from multiprocessing import synchronize + synchronize.SemLock._make_name = staticmethod(_make_name) + +__all__ = ["get_context"] diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..2e48ae7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-36.pyc new file mode 100644 index 0000000..b8f08d9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_posix_reduction.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_posix_wait.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_posix_wait.cpython-36.pyc new file mode 100644 index 0000000..a740142 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_posix_wait.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-36.pyc new file mode 100644 index 0000000..83e04b9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_win_reduction.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_win_wait.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_win_wait.cpython-36.pyc new file mode 100644 index 0000000..5c53ce6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/_win_wait.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat.cpython-36.pyc new file mode 100644 index 0000000..4726830 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat_posix.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat_posix.cpython-36.pyc new file mode 100644 index 0000000..8229144 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat_posix.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat_win32.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat_win32.cpython-36.pyc new file mode 100644 index 0000000..8ce5930 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/compat_win32.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-36.pyc new file mode 100644 index 0000000..8b8508b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/context.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/fork_exec.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/fork_exec.cpython-36.pyc new file mode 100644 index 0000000..bbb8a00 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/fork_exec.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/managers.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/managers.cpython-36.pyc new file mode 100644 index 0000000..321b5c1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/managers.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-36.pyc new file mode 100644 index 0000000..03f6742 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_posix.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_win32.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_win32.cpython-36.pyc new file mode 100644 index 0000000..c885921 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/popen_loky_win32.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-36.pyc new file mode 100644 index 0000000..95c1897 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/process.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-36.pyc new file mode 100644 index 0000000..7e7e28b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/queues.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-36.pyc new file mode 100644 index 0000000..4ba51ee Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/reduction.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-36.pyc new file mode 100644 index 0000000..4044db5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/resource_tracker.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/semlock.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/semlock.cpython-36.pyc new file mode 100644 index 0000000..e65316a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/semlock.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-36.pyc new file mode 100644 index 0000000..1c69d34 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/spawn.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/synchronize.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/synchronize.cpython-36.pyc new file mode 100644 index 0000000..1df8aa1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/synchronize.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-36.pyc new file mode 100644 index 0000000..f632ab6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/__pycache__/utils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_posix_reduction.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_posix_reduction.py new file mode 100644 index 0000000..e0e394d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_posix_reduction.py @@ -0,0 +1,76 @@ +############################################################################### +# Extra reducers for Unix based system and connections objects +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/reduction.py (17/02/2017) +# * Add adapted reduction for LokyProcesses and socket/Connection +# +import os +import sys +import socket +import _socket + +from .reduction import register +from .context import get_spawning_popen + +if sys.version_info >= (3, 3): + from multiprocessing.connection import Connection +else: + from _multiprocessing import Connection + + +HAVE_SEND_HANDLE = (hasattr(socket, 'CMSG_LEN') and + hasattr(socket, 'SCM_RIGHTS') and + hasattr(socket.socket, 'sendmsg')) + + +def _mk_inheritable(fd): + if sys.version_info[:2] > (3, 3): + os.set_inheritable(fd, True) + return fd + + +def DupFd(fd): + '''Return a wrapper for an fd.''' + popen_obj = get_spawning_popen() + if popen_obj is not None: + return popen_obj.DupFd(popen_obj.duplicate_for_child(fd)) + elif HAVE_SEND_HANDLE and sys.version_info[:2] > (3, 3): + from multiprocessing import resource_sharer + return resource_sharer.DupFd(fd) + else: + raise TypeError( + 'Cannot pickle connection object. This object can only be ' + 'passed when spawning a new process' + ) + + +if sys.version_info[:2] != (3, 3): + def _reduce_socket(s): + df = DupFd(s.fileno()) + return _rebuild_socket, (df, s.family, s.type, s.proto) + + def _rebuild_socket(df, family, type, proto): + fd = df.detach() + return socket.fromfd(fd, family, type, proto) +else: + from multiprocessing.reduction import reduce_socket as _reduce_socket + + +register(socket.socket, _reduce_socket) +register(_socket.socket, _reduce_socket) + + +if sys.version_info[:2] != (3, 3): + def reduce_connection(conn): + df = DupFd(conn.fileno()) + return rebuild_connection, (df, conn.readable, conn.writable) + + def rebuild_connection(df, readable, writable): + fd = df.detach() + return Connection(fd, readable, writable) +else: + from multiprocessing.reduction import reduce_connection + +register(Connection, reduce_connection) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_posix_wait.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_posix_wait.py new file mode 100644 index 0000000..d935882 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_posix_wait.py @@ -0,0 +1,105 @@ +############################################################################### +# Compat for wait function on UNIX based system +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/connection.py (17/02/2017) +# * Backport wait function to python2.7 +# + +import platform +import select +import socket +import errno +SYSTEM = platform.system() + +try: + import ctypes +except ImportError: # pragma: no cover + ctypes = None # noqa + +if SYSTEM == 'Darwin' and ctypes is not None: + from ctypes.util import find_library + libSystem = ctypes.CDLL(find_library('libSystem.dylib')) + CoreServices = ctypes.CDLL(find_library('CoreServices'), + use_errno=True) + mach_absolute_time = libSystem.mach_absolute_time + mach_absolute_time.restype = ctypes.c_uint64 + absolute_to_nanoseconds = CoreServices.AbsoluteToNanoseconds + absolute_to_nanoseconds.restype = ctypes.c_uint64 + absolute_to_nanoseconds.argtypes = [ctypes.c_uint64] + + def monotonic(): + return absolute_to_nanoseconds(mach_absolute_time()) * 1e-9 + +elif SYSTEM == 'Linux' and ctypes is not None: + # from stackoverflow: + # questions/1205722/how-do-i-get-monotonic-time-durations-in-python + import ctypes + import os + + CLOCK_MONOTONIC = 1 # see + + class timespec(ctypes.Structure): + _fields_ = [ + ('tv_sec', ctypes.c_long), + ('tv_nsec', ctypes.c_long), + ] + + librt = ctypes.CDLL('librt.so.1', use_errno=True) + clock_gettime = librt.clock_gettime + clock_gettime.argtypes = [ + ctypes.c_int, ctypes.POINTER(timespec), + ] + + def monotonic(): # noqa + t = timespec() + if clock_gettime(CLOCK_MONOTONIC, ctypes.pointer(t)) != 0: + errno_ = ctypes.get_errno() + raise OSError(errno_, os.strerror(errno_)) + return t.tv_sec + t.tv_nsec * 1e-9 +else: # pragma: no cover + from time import time as monotonic + + +if hasattr(select, 'poll'): + def _poll(fds, timeout): + if timeout is not None: + timeout = int(timeout * 1000) # timeout is in milliseconds + fd_map = {} + pollster = select.poll() + for fd in fds: + pollster.register(fd, select.POLLIN) + if hasattr(fd, 'fileno'): + fd_map[fd.fileno()] = fd + else: + fd_map[fd] = fd + ls = [] + for fd, event in pollster.poll(timeout): + if event & select.POLLNVAL: # pragma: no cover + raise ValueError('invalid file descriptor %i' % fd) + ls.append(fd_map[fd]) + return ls +else: + def _poll(fds, timeout): + return select.select(fds, [], [], timeout)[0] + + +def wait(object_list, timeout=None): + ''' + Wait till an object in object_list is ready/readable. + Returns list of those objects which are ready/readable. + ''' + if timeout is not None: + if timeout <= 0: + return _poll(object_list, 0) + else: + deadline = monotonic() + timeout + while True: + try: + return _poll(object_list, timeout) + except (OSError, IOError, socket.error) as e: # pragma: no cover + if e.errno != errno.EINTR: + raise + if timeout is not None: + timeout = deadline - monotonic() diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_win_reduction.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_win_reduction.py new file mode 100644 index 0000000..142e6e7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_win_reduction.py @@ -0,0 +1,99 @@ +############################################################################### +# Extra reducers for Windows system and connections objects +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/reduction.py (17/02/2017) +# * Add adapted reduction for LokyProcesses and socket/PipeConnection +# +import os +import sys +import socket +from .reduction import register + + +if sys.platform == 'win32': + if sys.version_info[:2] < (3, 3): + from _multiprocessing import PipeConnection + else: + import _winapi + from multiprocessing.connection import PipeConnection + + +if sys.version_info[:2] >= (3, 4) and sys.platform == 'win32': + class DupHandle(object): + def __init__(self, handle, access, pid=None): + # duplicate handle for process with given pid + if pid is None: + pid = os.getpid() + proc = _winapi.OpenProcess(_winapi.PROCESS_DUP_HANDLE, False, pid) + try: + self._handle = _winapi.DuplicateHandle( + _winapi.GetCurrentProcess(), + handle, proc, access, False, 0) + finally: + _winapi.CloseHandle(proc) + self._access = access + self._pid = pid + + def detach(self): + # retrieve handle from process which currently owns it + if self._pid == os.getpid(): + return self._handle + proc = _winapi.OpenProcess(_winapi.PROCESS_DUP_HANDLE, False, + self._pid) + try: + return _winapi.DuplicateHandle( + proc, self._handle, _winapi.GetCurrentProcess(), + self._access, False, _winapi.DUPLICATE_CLOSE_SOURCE) + finally: + _winapi.CloseHandle(proc) + + def reduce_pipe_connection(conn): + access = ((_winapi.FILE_GENERIC_READ if conn.readable else 0) | + (_winapi.FILE_GENERIC_WRITE if conn.writable else 0)) + dh = DupHandle(conn.fileno(), access) + return rebuild_pipe_connection, (dh, conn.readable, conn.writable) + + def rebuild_pipe_connection(dh, readable, writable): + from multiprocessing.connection import PipeConnection + handle = dh.detach() + return PipeConnection(handle, readable, writable) + register(PipeConnection, reduce_pipe_connection) + +elif sys.platform == 'win32': + # Older Python versions + from multiprocessing.reduction import reduce_pipe_connection + register(PipeConnection, reduce_pipe_connection) + + +if sys.version_info[:2] < (3, 3) and sys.platform == 'win32': + from _multiprocessing import win32 + from multiprocessing.reduction import reduce_handle, rebuild_handle + close = win32.CloseHandle + + def fromfd(handle, family, type_, proto=0): + s = socket.socket(family, type_, proto, fileno=handle) + if s.__class__ is not socket.socket: + s = socket.socket(_sock=s) + return s + + def reduce_socket(s): + if not hasattr(socket, "fromfd"): + raise TypeError("sockets cannot be pickled on this system.") + reduced_handle = reduce_handle(s.fileno()) + return _rebuild_socket, (reduced_handle, s.family, s.type, s.proto) + + def _rebuild_socket(reduced_handle, family, type_, proto): + handle = rebuild_handle(reduced_handle) + s = fromfd(handle, family, type_, proto) + close(handle) + return s + + register(socket.socket, reduce_socket) +elif sys.version_info[:2] < (3, 4): + from multiprocessing.reduction import reduce_socket + register(socket.socket, reduce_socket) +else: + from multiprocessing.reduction import _reduce_socket + register(socket.socket, _reduce_socket) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_win_wait.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_win_wait.py new file mode 100644 index 0000000..7327131 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/_win_wait.py @@ -0,0 +1,58 @@ +############################################################################### +# Compat for wait function on Windows system +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/connection.py (17/02/2017) +# * Backport wait function to python2.7 +# + +import ctypes +import sys +from time import sleep + + +if sys.platform == 'win32' and sys.version_info[:2] < (3, 3): + from _subprocess import WaitForSingleObject, WAIT_OBJECT_0 + + try: + from time import monotonic + except ImportError: + # Backward old for crappy old Python that did not have cross-platform + # monotonic clock by default. + + # TODO: do we want to add support for cygwin at some point? See: + # https://github.com/atdt/monotonic/blob/master/monotonic.py + GetTickCount64 = ctypes.windll.kernel32.GetTickCount64 + GetTickCount64.restype = ctypes.c_ulonglong + + def monotonic(): + """Monotonic clock, cannot go backward.""" + return GetTickCount64() / 1000.0 + + def wait(handles, timeout=None): + """Backward compat for python2.7 + + This function wait for either: + * one connection is ready for read, + * one process handle has exited or got killed, + * timeout is reached. Note that this function has a precision of 2 + msec. + """ + if timeout is not None: + deadline = monotonic() + timeout + + while True: + # We cannot use select as in windows it only support sockets + ready = [] + for h in handles: + if type(h) in [int, long]: + if WaitForSingleObject(h, 0) == WAIT_OBJECT_0: + ready += [h] + elif h.poll(0): + ready.append(h) + if len(ready) > 0: + return ready + sleep(.001) + if timeout is not None and deadline - monotonic() <= 0: + return [] diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat.py new file mode 100644 index 0000000..aa406c6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat.py @@ -0,0 +1,41 @@ +############################################################################### +# Compat file to import the correct modules for each platform and python +# version. +# +# author: Thomas Moreau and Olivier grisel +# +import sys + +PY3 = sys.version_info[:2] >= (3, 3) + +if PY3: + import queue +else: + import Queue as queue + +if sys.version_info >= (3, 4): + from multiprocessing.process import BaseProcess +else: + from multiprocessing.process import Process as BaseProcess + +# Platform specific compat +if sys.platform == "win32": + from .compat_win32 import wait +else: + from .compat_posix import wait + + +def set_cause(exc, cause): + exc.__cause__ = cause + + if not PY3: + # Preformat message here. + if exc.__cause__ is not None: + exc.args = ("{}\n\nThis was caused directly by {}".format( + exc.args if len(exc.args) != 1 else exc.args[0], + str(exc.__cause__)),) + + return exc + + +__all__ = ["queue", "BaseProcess", "set_cause", "wait"] diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat_posix.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat_posix.py new file mode 100644 index 0000000..c8e4e4a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat_posix.py @@ -0,0 +1,13 @@ +# flake8: noqa +############################################################################### +# Compat file to load the correct wait function +# +# author: Thomas Moreau and Olivier grisel +# +import sys + +# Compat wait +if sys.version_info < (3, 3): + from ._posix_wait import wait +else: + from multiprocessing.connection import wait diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat_win32.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat_win32.py new file mode 100644 index 0000000..5df15f5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/compat_win32.py @@ -0,0 +1,46 @@ +# flake8: noqa: F401 +import sys +import numbers + +if sys.platform == "win32": + # Avoid import error by code introspection tools such as test runners + # trying to import this module while running on non-Windows systems. + + # Compat Popen + if sys.version_info[:2] >= (3, 4): + from multiprocessing.popen_spawn_win32 import Popen + else: + from multiprocessing.forking import Popen + + # wait compat + if sys.version_info[:2] < (3, 3): + from ._win_wait import wait + else: + from multiprocessing.connection import wait + + # Compat _winapi + if sys.version_info[:2] >= (3, 4): + import _winapi + else: + import os + import msvcrt + if sys.version_info[:2] < (3, 3): + import _subprocess as win_api + from _multiprocessing import win32 + else: + import _winapi as win_api + + class _winapi: + CreateProcess = win_api.CreateProcess + + @staticmethod + def CloseHandle(h): + if isinstance(h, numbers.Integral): + # Cast long to int for 64-bit Python 2.7 under Windows + h = int(h) + if sys.version_info[:2] < (3, 3): + if not isinstance(h, int): + h = h.Detach() + win32.CloseHandle(h) + else: + win_api.CloseHandle(h) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/context.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/context.py new file mode 100644 index 0000000..76f6520 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/context.py @@ -0,0 +1,367 @@ +############################################################################### +# Basic context management with LokyContext and provides +# compat for UNIX 2.7 and 3.3 +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/context.py +# * Create a context ensuring loky uses only objects that are compatible +# * Add LokyContext to the list of context of multiprocessing so loky can be +# used with multiprocessing.set_start_method +# * Add some compat function for python2.7 and 3.3. +# +from __future__ import division + +import os +import sys +import subprocess +import traceback +import warnings +import multiprocessing as mp + + +from .process import LokyProcess, LokyInitMainProcess + +START_METHODS = ['loky', 'loky_init_main'] +_DEFAULT_START_METHOD = None + +# Cache for the number of physical cores to avoid repeating subprocess calls. +# It should not change during the lifetime of the program. +physical_cores_cache = None + +if sys.version_info[:2] >= (3, 4): + from multiprocessing import get_context as mp_get_context + from multiprocessing.context import assert_spawning, set_spawning_popen + from multiprocessing.context import get_spawning_popen, BaseContext + + START_METHODS += ['spawn'] + if sys.platform != 'win32': + START_METHODS += ['fork', 'forkserver'] + + def get_context(method=None): + # Try to overload the default context + method = method or _DEFAULT_START_METHOD or "loky" + if method == "fork": + # If 'fork' is explicitly requested, warn user about potential + # issues. + warnings.warn("`fork` start method should not be used with " + "`loky` as it does not respect POSIX. Try using " + "`spawn` or `loky` instead.", UserWarning) + try: + context = mp_get_context(method) + except ValueError: + raise ValueError("Unknown context '{}'. Value should be in {}." + .format(method, START_METHODS)) + + return context + +else: + if sys.platform != 'win32': + import threading + # Mechanism to check that the current thread is spawning a process + _tls = threading.local() + popen_attr = 'spawning_popen' + else: + from multiprocessing.forking import Popen + _tls = Popen._tls + popen_attr = 'process_handle' + + BaseContext = object + + def get_spawning_popen(): + return getattr(_tls, popen_attr, None) + + def set_spawning_popen(popen): + setattr(_tls, popen_attr, popen) + + def assert_spawning(obj): + if get_spawning_popen() is None: + raise RuntimeError( + '%s objects should only be shared between processes' + ' through inheritance' % type(obj).__name__ + ) + + def get_context(method=None): + method = method or _DEFAULT_START_METHOD or 'loky' + if method == "loky": + return LokyContext() + elif method == "loky_init_main": + return LokyInitMainContext() + else: + raise ValueError("Unknown context '{}'. Value should be in {}." + .format(method, START_METHODS)) + + +def set_start_method(method, force=False): + global _DEFAULT_START_METHOD + if _DEFAULT_START_METHOD is not None and not force: + raise RuntimeError('context has already been set') + assert method is None or method in START_METHODS, ( + "'{}' is not a valid start_method. It should be in {}" + .format(method, START_METHODS)) + + _DEFAULT_START_METHOD = method + + +def get_start_method(): + return _DEFAULT_START_METHOD + + +def cpu_count(only_physical_cores=False): + """Return the number of CPUs the current process can use. + + The returned number of CPUs accounts for: + * the number of CPUs in the system, as given by + ``multiprocessing.cpu_count``; + * the CPU affinity settings of the current process + (available with Python 3.4+ on some Unix systems); + * CFS scheduler CPU bandwidth limit (available on Linux only, typically + set by docker and similar container orchestration systems); + * the value of the LOKY_MAX_CPU_COUNT environment variable if defined. + and is given as the minimum of these constraints. + + If ``only_physical_cores`` is True, return the number of physical cores + instead of the number of logical cores (hyperthreading / SMT). Note that + this option is not enforced if the number of usable cores is controlled in + any other way such as: process affinity, restricting CFS scheduler policy + or the LOKY_MAX_CPU_COUNT environment variable. If the number of physical + cores is not found, return the number of logical cores. + + It is also always larger or equal to 1. + """ + # TODO: use os.cpu_count when dropping python 2 support + try: + cpu_count_mp = mp.cpu_count() + except NotImplementedError: + cpu_count_mp = 1 + + cpu_count_user = _cpu_count_user(cpu_count_mp) + aggregate_cpu_count = min(cpu_count_mp, cpu_count_user) + + if only_physical_cores: + cpu_count_physical, exception = _count_physical_cores() + if cpu_count_user < cpu_count_mp: + # Respect user setting + cpu_count = max(cpu_count_user, 1) + elif cpu_count_physical == "not found": + # Fallback to default behavior + if exception is not None: + # warns only the first time + warnings.warn( + "Could not find the number of physical cores for the " + "following reason:\n" + str(exception) + "\n" + "Returning the number of logical cores instead. You can " + "silence this warning by setting LOKY_MAX_CPU_COUNT to " + "the number of cores you want to use.") + if sys.version_info >= (3, 5): + # TODO remove the version check when dropping py2 support + traceback.print_tb(exception.__traceback__) + + cpu_count = max(aggregate_cpu_count, 1) + else: + return cpu_count_physical + else: + cpu_count = max(aggregate_cpu_count, 1) + + return cpu_count + + +def _cpu_count_user(cpu_count_mp): + """Number of user defined available CPUs""" + import math + + # Number of available CPUs given affinity settings + cpu_count_affinity = cpu_count_mp + if hasattr(os, 'sched_getaffinity'): + try: + cpu_count_affinity = len(os.sched_getaffinity(0)) + except NotImplementedError: + pass + + # CFS scheduler CPU bandwidth limit + # available in Linux since 2.6 kernel + cpu_count_cfs = cpu_count_mp + cfs_quota_fname = "/sys/fs/cgroup/cpu/cpu.cfs_quota_us" + cfs_period_fname = "/sys/fs/cgroup/cpu/cpu.cfs_period_us" + if os.path.exists(cfs_quota_fname) and os.path.exists(cfs_period_fname): + with open(cfs_quota_fname, 'r') as fh: + cfs_quota_us = int(fh.read()) + with open(cfs_period_fname, 'r') as fh: + cfs_period_us = int(fh.read()) + + if cfs_quota_us > 0 and cfs_period_us > 0: + # Make sure this quantity is an int as math.ceil returns a + # float in python2.7. (See issue #165) + cpu_count_cfs = int(math.ceil(cfs_quota_us / cfs_period_us)) + + # User defined soft-limit passed as a loky specific environment variable. + cpu_count_loky = int(os.environ.get('LOKY_MAX_CPU_COUNT', cpu_count_mp)) + + return min(cpu_count_affinity, cpu_count_cfs, cpu_count_loky) + + +def _count_physical_cores(): + """Return a tuple (number of physical cores, exception) + + If the number of physical cores is found, exception is set to None. + If it has not been found, return ("not found", exception). + + The number of physical cores is cached to avoid repeating subprocess calls. + """ + exception = None + + # First check if the value is cached + global physical_cores_cache + if physical_cores_cache is not None: + return physical_cores_cache, exception + + # Not cached yet, find it + try: + if sys.platform == "linux": + cpu_info = subprocess.run( + "lscpu --parse=core".split(" "), capture_output=True) + cpu_info = cpu_info.stdout.decode("utf-8").splitlines() + cpu_info = {line for line in cpu_info if not line.startswith("#")} + cpu_count_physical = len(cpu_info) + elif sys.platform == "win32": + cpu_info = subprocess.run( + "wmic CPU Get NumberOfCores /Format:csv".split(" "), + capture_output=True) + cpu_info = cpu_info.stdout.decode('utf-8').splitlines() + cpu_info = [l.split(",")[1] for l in cpu_info + if (l and l != "Node,NumberOfCores")] + cpu_count_physical = sum(map(int, cpu_info)) + elif sys.platform == "darwin": + cpu_info = subprocess.run( + "sysctl -n hw.physicalcpu".split(" "), capture_output=True) + cpu_info = cpu_info.stdout.decode('utf-8') + cpu_count_physical = int(cpu_info) + else: + raise NotImplementedError( + "unsupported platform: {}".format(sys.platform)) + + # if cpu_count_physical < 1, we did not find a valid value + if cpu_count_physical < 1: + raise ValueError( + "found {} physical cores < 1".format(cpu_count_physical)) + + except Exception as e: + exception = e + cpu_count_physical = "not found" + + # Put the result in cache + physical_cores_cache = cpu_count_physical + + return cpu_count_physical, exception + + +class LokyContext(BaseContext): + """Context relying on the LokyProcess.""" + _name = 'loky' + Process = LokyProcess + cpu_count = staticmethod(cpu_count) + + def Queue(self, maxsize=0, reducers=None): + '''Returns a queue object''' + from .queues import Queue + return Queue(maxsize, reducers=reducers, + ctx=self.get_context()) + + def SimpleQueue(self, reducers=None): + '''Returns a queue object''' + from .queues import SimpleQueue + return SimpleQueue(reducers=reducers, ctx=self.get_context()) + + if sys.version_info[:2] < (3, 4): + """Compat for python2.7/3.3 for necessary methods in Context""" + def get_context(self): + return self + + def get_start_method(self): + return self._name + + def Pipe(self, duplex=True): + '''Returns two connection object connected by a pipe''' + return mp.Pipe(duplex) + + if sys.platform != "win32": + """Use the compat Manager for python2.7/3.3 on UNIX to avoid + relying on fork processes + """ + def Manager(self): + """Returns a manager object""" + from .managers import LokyManager + m = LokyManager() + m.start() + return m + else: + """Compat for context on Windows and python2.7/3.3. Using regular + multiprocessing objects as it does not rely on fork. + """ + from multiprocessing import synchronize + Semaphore = staticmethod(synchronize.Semaphore) + BoundedSemaphore = staticmethod(synchronize.BoundedSemaphore) + Lock = staticmethod(synchronize.Lock) + RLock = staticmethod(synchronize.RLock) + Condition = staticmethod(synchronize.Condition) + Event = staticmethod(synchronize.Event) + Manager = staticmethod(mp.Manager) + + if sys.platform != "win32": + """For Unix platform, use our custom implementation of synchronize + relying on ctypes to interface with pthread semaphores. + """ + def Semaphore(self, value=1): + """Returns a semaphore object""" + from .synchronize import Semaphore + return Semaphore(value=value) + + def BoundedSemaphore(self, value): + """Returns a bounded semaphore object""" + from .synchronize import BoundedSemaphore + return BoundedSemaphore(value) + + def Lock(self): + """Returns a lock object""" + from .synchronize import Lock + return Lock() + + def RLock(self): + """Returns a recurrent lock object""" + from .synchronize import RLock + return RLock() + + def Condition(self, lock=None): + """Returns a condition object""" + from .synchronize import Condition + return Condition(lock) + + def Event(self): + """Returns an event object""" + from .synchronize import Event + return Event() + + +class LokyInitMainContext(LokyContext): + """Extra context with LokyProcess, which does load the main module + + This context is used for compatibility in the case ``cloudpickle`` is not + present on the running system. This permits to load functions defined in + the ``main`` module, using proper safeguards. The declaration of the + ``executor`` should be protected by ``if __name__ == "__main__":`` and the + functions and variable used from main should be out of this block. + + This mimics the default behavior of multiprocessing under Windows and the + behavior of the ``spawn`` start method on a posix system for python3.4+. + For more details, see the end of the following section of python doc + https://docs.python.org/3/library/multiprocessing.html#multiprocessing-programming + """ + _name = 'loky_init_main' + Process = LokyInitMainProcess + + +if sys.version_info > (3, 4): + """Register loky context so it works with multiprocessing.get_context""" + ctx_loky = LokyContext() + mp.context._concrete_contexts['loky'] = ctx_loky + mp.context._concrete_contexts['loky_init_main'] = LokyInitMainContext() diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/fork_exec.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/fork_exec.py new file mode 100644 index 0000000..cfb68dc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/fork_exec.py @@ -0,0 +1,48 @@ +############################################################################### +# Launch a subprocess using forkexec and make sure only the needed fd are +# shared in the two process. +# +# author: Thomas Moreau and Olivier Grisel +# +import os +import sys + +if sys.platform == "darwin" and sys.version_info < (3, 3): + FileNotFoundError = OSError + + +def close_fds(keep_fds): # pragma: no cover + """Close all the file descriptors except those in keep_fds.""" + + # Make sure to keep stdout and stderr open for logging purpose + keep_fds = set(keep_fds).union([1, 2]) + + # We try to retrieve all the open fds + try: + open_fds = set(int(fd) for fd in os.listdir('/proc/self/fd')) + except FileNotFoundError: + import resource + max_nfds = resource.getrlimit(resource.RLIMIT_NOFILE)[0] + open_fds = set(fd for fd in range(3, max_nfds)) + open_fds.add(0) + + for i in open_fds - keep_fds: + try: + os.close(i) + except OSError: + pass + + +def fork_exec(cmd, keep_fds, env=None): + + # copy the environment variables to set in the child process + env = {} if env is None else env + child_env = os.environ.copy() + child_env.update(env) + + pid = os.fork() + if pid == 0: # pragma: no cover + close_fds(keep_fds) + os.execve(sys.executable, cmd, child_env) + else: + return pid diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/managers.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/managers.py new file mode 100644 index 0000000..081f897 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/managers.py @@ -0,0 +1,51 @@ +############################################################################### +# compat for UNIX 2.7 and 3.3 +# Manager with LokyContext server. +# This avoids having a Manager using fork and breaks the fd. +# +# author: Thomas Moreau and Olivier Grisel +# +# based on multiprocessing/managers.py (17/02/2017) +# * Overload the start method to use LokyContext and launch a loky subprocess +# + +import multiprocessing as mp +from multiprocessing.managers import SyncManager, State +from .process import LokyProcess as Process + + +class LokyManager(SyncManager): + def start(self, initializer=None, initargs=()): + '''Spawn a server process for this manager object''' + assert self._state.value == State.INITIAL + + if (initializer is not None + and not hasattr(initializer, '__call__')): + raise TypeError('initializer must be a callable') + + # pipe over which we will retrieve address of server + reader, writer = mp.Pipe(duplex=False) + + # spawn process which runs a server + self._process = Process( + target=type(self)._run_server, + args=(self._registry, self._address, bytes(self._authkey), + self._serializer, writer, initializer, initargs), + ) + ident = ':'.join(str(i) for i in self._process._identity) + self._process.name = type(self).__name__ + '-' + ident + self._process.start() + + # get address of server + writer.close() + self._address = reader.recv() + reader.close() + + # register a finalizer + self._state.value = State.STARTED + self.shutdown = mp.util.Finalize( + self, type(self)._finalize_manager, + args=(self._process, self._address, self._authkey, + self._state, self._Client), + exitpriority=0 + ) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/popen_loky_posix.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/popen_loky_posix.py new file mode 100644 index 0000000..970dead --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/popen_loky_posix.py @@ -0,0 +1,215 @@ +############################################################################### +# Popen for LokyProcess. +# +# author: Thomas Moreau and Olivier Grisel +# +import os +import sys +import signal +import pickle +from io import BytesIO + +from . import reduction, spawn +from .context import get_spawning_popen, set_spawning_popen +from multiprocessing import util, process + +if sys.version_info[:2] < (3, 3): + ProcessLookupError = OSError + +if sys.platform != "win32": + from . import resource_tracker + + +__all__ = [] + +if sys.platform != "win32": + # + # Wrapper for an fd used while launching a process + # + + class _DupFd(object): + def __init__(self, fd): + self.fd = reduction._mk_inheritable(fd) + + def detach(self): + return self.fd + + # + # Start child process using subprocess.Popen + # + + __all__.append('Popen') + + class Popen(object): + method = 'loky' + DupFd = _DupFd + + def __init__(self, process_obj): + sys.stdout.flush() + sys.stderr.flush() + self.returncode = None + self._fds = [] + self._launch(process_obj) + + if sys.version_info < (3, 4): + @classmethod + def duplicate_for_child(cls, fd): + popen = get_spawning_popen() + popen._fds.append(fd) + return reduction._mk_inheritable(fd) + + else: + def duplicate_for_child(self, fd): + self._fds.append(fd) + return reduction._mk_inheritable(fd) + + def poll(self, flag=os.WNOHANG): + if self.returncode is None: + while True: + try: + pid, sts = os.waitpid(self.pid, flag) + except OSError: + # Child process not yet created. See #1731717 + # e.errno == errno.ECHILD == 10 + return None + else: + break + if pid == self.pid: + if os.WIFSIGNALED(sts): + self.returncode = -os.WTERMSIG(sts) + else: + assert os.WIFEXITED(sts) + self.returncode = os.WEXITSTATUS(sts) + return self.returncode + + def wait(self, timeout=None): + if sys.version_info < (3, 3): + import time + if timeout is None: + return self.poll(0) + deadline = time.time() + timeout + delay = 0.0005 + while 1: + res = self.poll() + if res is not None: + break + remaining = deadline - time.time() + if remaining <= 0: + break + delay = min(delay * 2, remaining, 0.05) + time.sleep(delay) + return res + + if self.returncode is None: + if timeout is not None: + from multiprocessing.connection import wait + if not wait([self.sentinel], timeout): + return None + # This shouldn't block if wait() returned successfully. + return self.poll(os.WNOHANG if timeout == 0.0 else 0) + return self.returncode + + def terminate(self): + if self.returncode is None: + try: + os.kill(self.pid, signal.SIGTERM) + except ProcessLookupError: + pass + except OSError: + if self.wait(timeout=0.1) is None: + raise + + def _launch(self, process_obj): + + tracker_fd = resource_tracker._resource_tracker.getfd() + + fp = BytesIO() + set_spawning_popen(self) + try: + prep_data = spawn.get_preparation_data( + process_obj._name, + getattr(process_obj, "init_main_module", True)) + reduction.dump(prep_data, fp) + reduction.dump(process_obj, fp) + + finally: + set_spawning_popen(None) + + try: + parent_r, child_w = os.pipe() + child_r, parent_w = os.pipe() + # for fd in self._fds: + # _mk_inheritable(fd) + + cmd_python = [sys.executable] + cmd_python += ['-m', self.__module__] + cmd_python += ['--process-name', str(process_obj.name)] + cmd_python += ['--pipe', + str(reduction._mk_inheritable(child_r))] + reduction._mk_inheritable(child_w) + reduction._mk_inheritable(tracker_fd) + self._fds.extend([child_r, child_w, tracker_fd]) + if sys.version_info >= (3, 8) and os.name == 'posix': + mp_tracker_fd = prep_data['mp_tracker_args']['fd'] + self.duplicate_for_child(mp_tracker_fd) + + from .fork_exec import fork_exec + pid = fork_exec(cmd_python, self._fds, env=process_obj.env) + util.debug("launched python with pid {} and cmd:\n{}" + .format(pid, cmd_python)) + self.sentinel = parent_r + + method = 'getbuffer' + if not hasattr(fp, method): + method = 'getvalue' + with os.fdopen(parent_w, 'wb') as f: + f.write(getattr(fp, method)()) + self.pid = pid + finally: + if parent_r is not None: + util.Finalize(self, os.close, (parent_r,)) + for fd in (child_r, child_w): + if fd is not None: + os.close(fd) + + @staticmethod + def thread_is_spawning(): + return True + + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser('Command line parser') + parser.add_argument('--pipe', type=int, required=True, + help='File handle for the pipe') + parser.add_argument('--process-name', type=str, default=None, + help='Identifier for debugging purpose') + + args = parser.parse_args() + + info = dict() + + exitcode = 1 + try: + with os.fdopen(args.pipe, 'rb') as from_parent: + process.current_process()._inheriting = True + try: + prep_data = pickle.load(from_parent) + spawn.prepare(prep_data) + process_obj = pickle.load(from_parent) + finally: + del process.current_process()._inheriting + + exitcode = process_obj._bootstrap() + except Exception: + print('\n\n' + '-' * 80) + print('{} failed with traceback: '.format(args.process_name)) + print('-' * 80) + import traceback + print(traceback.format_exc()) + print('\n' + '-' * 80) + finally: + if from_parent is not None: + from_parent.close() + + sys.exit(exitcode) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/popen_loky_win32.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/popen_loky_win32.py new file mode 100644 index 0000000..523bd07 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/popen_loky_win32.py @@ -0,0 +1,173 @@ +import os +import sys +from pickle import load +from multiprocessing import process, util + +from . import spawn +from . import reduction +from .context import get_spawning_popen, set_spawning_popen + +if sys.platform == "win32": + # Avoid import error by code introspection tools such as test runners + # trying to import this module while running on non-Windows systems. + import msvcrt + from .compat_win32 import _winapi + from .compat_win32 import Popen as _Popen + from .reduction import duplicate +else: + _Popen = object + +if sys.version_info[:2] < (3, 3): + from os import fdopen as open + +__all__ = ['Popen'] + +# +# +# + +TERMINATE = 0x10000 +WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) +WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") + + +def _path_eq(p1, p2): + return p1 == p2 or os.path.normcase(p1) == os.path.normcase(p2) + + +WINENV = (hasattr(sys, "_base_executable") + and not _path_eq(sys.executable, sys._base_executable)) + +# +# We define a Popen class similar to the one from subprocess, but +# whose constructor takes a process object as its argument. +# + + +class Popen(_Popen): + ''' + Start a subprocess to run the code of a process object + ''' + method = 'loky' + + def __init__(self, process_obj): + prep_data = spawn.get_preparation_data( + process_obj._name, getattr(process_obj, "init_main_module", True)) + + # read end of pipe will be "stolen" by the child process + # -- see spawn_main() in spawn.py. + rfd, wfd = os.pipe() + rhandle = duplicate(msvcrt.get_osfhandle(rfd), inheritable=True) + os.close(rfd) + + cmd = get_command_line(parent_pid=os.getpid(), pipe_handle=rhandle) + cmd = ' '.join('"%s"' % x for x in cmd) + + python_exe = spawn.get_executable() + + # copy the environment variables to set in the child process + child_env = os.environ.copy() + child_env.update(process_obj.env) + + # bpo-35797: When running in a venv, we bypass the redirect + # executor and launch our base Python. + if WINENV and _path_eq(python_exe, sys.executable): + python_exe = sys._base_executable + child_env["__PYVENV_LAUNCHER__"] = sys.executable + + try: + with open(wfd, 'wb') as to_child: + # start process + try: + # This flag allows to pass inheritable handles from the + # parent to the child process in a python2-3 compatible way + # (see + # https://github.com/tomMoral/loky/pull/204#discussion_r290719629 + # for more detail). When support for Python 2 is dropped, + # the cleaner multiprocessing.reduction.steal_handle should + # be used instead. + inherit = True + hp, ht, pid, tid = _winapi.CreateProcess( + python_exe, cmd, + None, None, inherit, 0, + child_env, None, None) + _winapi.CloseHandle(ht) + except BaseException: + _winapi.CloseHandle(rhandle) + raise + + # set attributes of self + self.pid = pid + self.returncode = None + self._handle = hp + self.sentinel = int(hp) + util.Finalize(self, _winapi.CloseHandle, (self.sentinel,)) + + # send information to child + set_spawning_popen(self) + if sys.version_info[:2] < (3, 4): + Popen._tls.process_handle = int(hp) + try: + reduction.dump(prep_data, to_child) + reduction.dump(process_obj, to_child) + finally: + set_spawning_popen(None) + if sys.version_info[:2] < (3, 4): + del Popen._tls.process_handle + except IOError as exc: + # IOError 22 happens when the launched subprocess terminated before + # wfd.close is called. Thus we can safely ignore it. + if exc.errno != 22: + raise + util.debug("While starting {}, ignored a IOError 22" + .format(process_obj._name)) + + def duplicate_for_child(self, handle): + assert self is get_spawning_popen() + return duplicate(handle, self.sentinel) + + +def get_command_line(pipe_handle, **kwds): + ''' + Returns prefix of command line used for spawning a child process + ''' + if getattr(sys, 'frozen', False): + return ([sys.executable, '--multiprocessing-fork', pipe_handle]) + else: + prog = 'from joblib.externals.loky.backend.popen_loky_win32 import main; main()' + opts = util._args_from_interpreter_flags() + return [spawn.get_executable()] + opts + [ + '-c', prog, '--multiprocessing-fork', pipe_handle] + + +def is_forking(argv): + ''' + Return whether commandline indicates we are forking + ''' + if len(argv) >= 2 and argv[1] == '--multiprocessing-fork': + assert len(argv) == 3 + return True + else: + return False + + +def main(): + ''' + Run code specified by data received over pipe + ''' + assert is_forking(sys.argv) + + handle = int(sys.argv[-1]) + fd = msvcrt.open_osfhandle(handle, os.O_RDONLY) + from_parent = os.fdopen(fd, 'rb') + + process.current_process()._inheriting = True + preparation_data = load(from_parent) + spawn.prepare(preparation_data) + self = load(from_parent) + process.current_process()._inheriting = False + + from_parent.close() + + exitcode = self._bootstrap() + exit(exitcode) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/process.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/process.py new file mode 100644 index 0000000..30a20c0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/process.py @@ -0,0 +1,108 @@ +############################################################################### +# LokyProcess implementation +# +# authors: Thomas Moreau and Olivier Grisel +# +# based on multiprocessing/process.py (17/02/2017) +# * Add some compatibility function for python2.7 and 3.3 +# +import os +import sys +from .compat import BaseProcess + + +class LokyProcess(BaseProcess): + _start_method = 'loky' + + def __init__(self, group=None, target=None, name=None, args=(), + kwargs={}, daemon=None, init_main_module=False, + env=None): + if sys.version_info < (3, 3): + super(LokyProcess, self).__init__( + group=group, target=target, name=name, args=args, + kwargs=kwargs) + self.daemon = daemon + else: + super(LokyProcess, self).__init__( + group=group, target=target, name=name, args=args, + kwargs=kwargs, daemon=daemon) + self.env = {} if env is None else env + self.authkey = self.authkey + self.init_main_module = init_main_module + + @staticmethod + def _Popen(process_obj): + if sys.platform == "win32": + from .popen_loky_win32 import Popen + else: + from .popen_loky_posix import Popen + return Popen(process_obj) + + if sys.version_info < (3, 3): + def start(self): + ''' + Start child process + ''' + from multiprocessing.process import _current_process, _cleanup + assert self._popen is None, 'cannot start a process twice' + assert self._parent_pid == os.getpid(), \ + 'can only start a process object created by current process' + _cleanup() + self._popen = self._Popen(self) + self._sentinel = self._popen.sentinel + _current_process._children.add(self) + + @property + def sentinel(self): + ''' + Return a file descriptor (Unix) or handle (Windows) suitable for + waiting for process termination. + ''' + try: + return self._sentinel + except AttributeError: + raise ValueError("process not started") + + if sys.version_info < (3, 4): + @property + def authkey(self): + return self._authkey + + @authkey.setter + def authkey(self, authkey): + ''' + Set authorization key of process + ''' + self._authkey = AuthenticationKey(authkey) + + def _bootstrap(self): + from .context import set_start_method + set_start_method(self._start_method) + super(LokyProcess, self)._bootstrap() + + +class LokyInitMainProcess(LokyProcess): + _start_method = 'loky_init_main' + + def __init__(self, group=None, target=None, name=None, args=(), + kwargs={}, daemon=None): + super(LokyInitMainProcess, self).__init__( + group=group, target=target, name=name, args=args, kwargs=kwargs, + daemon=daemon, init_main_module=True) + + +# +# We subclass bytes to avoid accidental transmission of auth keys over network +# + +class AuthenticationKey(bytes): + def __reduce__(self): + from .context import assert_spawning + try: + assert_spawning(self) + except RuntimeError: + raise TypeError( + 'Pickling an AuthenticationKey object is ' + 'disallowed for security reasons' + ) + return AuthenticationKey, (bytes(self),) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/queues.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/queues.py new file mode 100644 index 0000000..62735db --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/queues.py @@ -0,0 +1,247 @@ +############################################################################### +# Queue and SimpleQueue implementation for loky +# +# authors: Thomas Moreau, Olivier Grisel +# +# based on multiprocessing/queues.py (16/02/2017) +# * Add some compatibility function for python2.7 and 3.3 and makes sure +# it uses the right synchronization primitive. +# * Add some custom reducers for the Queues/SimpleQueue to tweak the +# pickling process. (overload Queue._feed/SimpleQueue.put) +# +import os +import sys +import errno +import weakref +import threading + +from multiprocessing import util +from multiprocessing import connection +from multiprocessing.synchronize import SEM_VALUE_MAX +from multiprocessing.queues import Full +from multiprocessing.queues import _sentinel, Queue as mp_Queue +from multiprocessing.queues import SimpleQueue as mp_SimpleQueue + +from .reduction import loads, dumps +from .context import assert_spawning, get_context + + +__all__ = ['Queue', 'SimpleQueue', 'Full'] + + +class Queue(mp_Queue): + + def __init__(self, maxsize=0, reducers=None, ctx=None): + + if sys.version_info[:2] >= (3, 4): + super().__init__(maxsize=maxsize, ctx=ctx) + else: + if maxsize <= 0: + # Can raise ImportError (see issues #3770 and #23400) + maxsize = SEM_VALUE_MAX + if ctx is None: + ctx = get_context() + self._maxsize = maxsize + self._reader, self._writer = connection.Pipe(duplex=False) + self._rlock = ctx.Lock() + self._opid = os.getpid() + if sys.platform == 'win32': + self._wlock = None + else: + self._wlock = ctx.Lock() + self._sem = ctx.BoundedSemaphore(maxsize) + + # For use by concurrent.futures + self._ignore_epipe = False + + self._after_fork() + + if sys.platform != 'win32': + util.register_after_fork(self, Queue._after_fork) + + self._reducers = reducers + + # Use custom queue set/get state to be able to reduce the custom reducers + def __getstate__(self): + assert_spawning(self) + return (self._ignore_epipe, self._maxsize, self._reader, self._writer, + self._reducers, self._rlock, self._wlock, self._sem, + self._opid) + + def __setstate__(self, state): + (self._ignore_epipe, self._maxsize, self._reader, self._writer, + self._reducers, self._rlock, self._wlock, self._sem, + self._opid) = state + if sys.version_info >= (3, 9): + self._reset() + else: + self._after_fork() + + # Overload _start_thread to correctly call our custom _feed + def _start_thread(self): + util.debug('Queue._start_thread()') + + # Start thread which transfers data from buffer to pipe + self._buffer.clear() + self._thread = threading.Thread( + target=Queue._feed, + args=(self._buffer, self._notempty, self._send_bytes, + self._wlock, self._writer.close, self._reducers, + self._ignore_epipe, self._on_queue_feeder_error, self._sem), + name='QueueFeederThread' + ) + self._thread.daemon = True + + util.debug('doing self._thread.start()') + self._thread.start() + util.debug('... done self._thread.start()') + + # On process exit we will wait for data to be flushed to pipe. + # + # However, if this process created the queue then all + # processes which use the queue will be descendants of this + # process. Therefore waiting for the queue to be flushed + # is pointless once all the child processes have been joined. + created_by_this_process = (self._opid == os.getpid()) + if not self._joincancelled and not created_by_this_process: + self._jointhread = util.Finalize( + self._thread, Queue._finalize_join, + [weakref.ref(self._thread)], + exitpriority=-5 + ) + + # Send sentinel to the thread queue object when garbage collected + self._close = util.Finalize( + self, Queue._finalize_close, + [self._buffer, self._notempty], + exitpriority=10 + ) + + # Overload the _feed methods to use our custom pickling strategy. + @staticmethod + def _feed(buffer, notempty, send_bytes, writelock, close, reducers, + ignore_epipe, onerror, queue_sem): + util.debug('starting thread to feed data to pipe') + nacquire = notempty.acquire + nrelease = notempty.release + nwait = notempty.wait + bpopleft = buffer.popleft + sentinel = _sentinel + if sys.platform != 'win32': + wacquire = writelock.acquire + wrelease = writelock.release + else: + wacquire = None + + while 1: + try: + nacquire() + try: + if not buffer: + nwait() + finally: + nrelease() + try: + while 1: + obj = bpopleft() + if obj is sentinel: + util.debug('feeder thread got sentinel -- exiting') + close() + return + + # serialize the data before acquiring the lock + obj_ = dumps(obj, reducers=reducers) + if wacquire is None: + send_bytes(obj_) + else: + wacquire() + try: + send_bytes(obj_) + finally: + wrelease() + # Remove references early to avoid leaking memory + del obj, obj_ + except IndexError: + pass + except BaseException as e: + if ignore_epipe and getattr(e, 'errno', 0) == errno.EPIPE: + return + # Since this runs in a daemon thread the resources it uses + # may be become unusable while the process is cleaning up. + # We ignore errors which happen after the process has + # started to cleanup. + if util.is_exiting(): + util.info('error in queue thread: %s', e) + return + else: + queue_sem.release() + onerror(e, obj) + + def _on_queue_feeder_error(self, e, obj): + """ + Private API hook called when feeding data in the background thread + raises an exception. For overriding by concurrent.futures. + """ + import traceback + traceback.print_exc() + + if sys.version_info[:2] < (3, 4): + # Compat for python2.7/3.3 that use _send instead of _send_bytes + def _after_fork(self): + super(Queue, self)._after_fork() + self._send_bytes = self._writer.send_bytes + + +class SimpleQueue(mp_SimpleQueue): + + def __init__(self, reducers=None, ctx=None): + if sys.version_info[:2] >= (3, 4): + super().__init__(ctx=ctx) + else: + # Use the context to create the sync objects for python2.7/3.3 + if ctx is None: + ctx = get_context() + self._reader, self._writer = connection.Pipe(duplex=False) + self._rlock = ctx.Lock() + self._poll = self._reader.poll + if sys.platform == 'win32': + self._wlock = None + else: + self._wlock = ctx.Lock() + + # Add possiblity to use custom reducers + self._reducers = reducers + + def close(self): + self._reader.close() + self._writer.close() + + # Use custom queue set/get state to be able to reduce the custom reducers + def __getstate__(self): + assert_spawning(self) + return (self._reader, self._writer, self._reducers, self._rlock, + self._wlock) + + def __setstate__(self, state): + (self._reader, self._writer, self._reducers, self._rlock, + self._wlock) = state + + if sys.version_info[:2] < (3, 4): + # For python2.7/3.3, overload get to avoid creating deadlocks with + # unpickling errors. + def get(self): + with self._rlock: + res = self._reader.recv_bytes() + # unserialize the data after having released the lock + return loads(res) + + # Overload put to use our customizable reducer + def put(self, obj): + # serialize the data before acquiring the lock + obj = dumps(obj, reducers=self._reducers) + if self._wlock is None: + # writes to a message oriented win32 pipe are atomic + self._writer.send_bytes(obj) + else: + with self._wlock: + self._writer.send_bytes(obj) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/reduction.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/reduction.py new file mode 100644 index 0000000..4a2407c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/reduction.py @@ -0,0 +1,280 @@ +############################################################################### +# Customizable Pickler with some basic reducers +# +# author: Thomas Moreau +# +# adapted from multiprocessing/reduction.py (17/02/2017) +# * Replace the ForkingPickler with a similar _LokyPickler, +# * Add CustomizableLokyPickler to allow customizing pickling process +# on the fly. +# +import io +import os +import sys +import functools +from multiprocessing import util +import types +try: + # Python 2 compat + from cPickle import loads as pickle_loads +except ImportError: + from pickle import loads as pickle_loads + import copyreg + +from pickle import HIGHEST_PROTOCOL + +if sys.platform == "win32": + if sys.version_info[:2] > (3, 3): + from multiprocessing.reduction import duplicate + else: + from multiprocessing.forking import duplicate + + +############################################################################### +# Enable custom pickling in Loky. +# To allow instance customization of the pickling process, we use 2 classes. +# _ReducerRegistry gives module level customization and CustomizablePickler +# permits to use instance base custom reducers. Only CustomizablePickler +# should be used. + +class _ReducerRegistry(object): + """Registry for custom reducers. + + HIGHEST_PROTOCOL is selected by default as this pickler is used + to pickle ephemeral datastructures for interprocess communication + hence no backward compatibility is required. + + """ + + # We override the pure Python pickler as its the only way to be able to + # customize the dispatch table without side effects in Python 2.6 + # to 3.2. For Python 3.3+ leverage the new dispatch_table + # feature from http://bugs.python.org/issue14166 that makes it possible + # to use the C implementation of the Pickler which is faster. + + dispatch_table = {} + + @classmethod + def register(cls, type, reduce_func): + """Attach a reducer function to a given type in the dispatch table.""" + if sys.version_info < (3,): + # Python 2 pickler dispatching is not explicitly customizable. + # Let us use a closure to workaround this limitation. + def dispatcher(cls, obj): + reduced = reduce_func(obj) + cls.save_reduce(obj=obj, *reduced) + cls.dispatch_table[type] = dispatcher + else: + cls.dispatch_table[type] = reduce_func + + +############################################################################### +# Registers extra pickling routines to improve picklization for loky + +register = _ReducerRegistry.register + + +# make methods picklable +def _reduce_method(m): + if m.__self__ is None: + return getattr, (m.__class__, m.__func__.__name__) + else: + return getattr, (m.__self__, m.__func__.__name__) + + +class _C: + def f(self): + pass + + @classmethod + def h(cls): + pass + + +register(type(_C().f), _reduce_method) +register(type(_C.h), _reduce_method) + + +if not hasattr(sys, "pypy_version_info"): + # PyPy uses functions instead of method_descriptors and wrapper_descriptors + def _reduce_method_descriptor(m): + return getattr, (m.__objclass__, m.__name__) + + register(type(list.append), _reduce_method_descriptor) + register(type(int.__add__), _reduce_method_descriptor) + + +# Make partial func pickable +def _reduce_partial(p): + return _rebuild_partial, (p.func, p.args, p.keywords or {}) + + +def _rebuild_partial(func, args, keywords): + return functools.partial(func, *args, **keywords) + + +register(functools.partial, _reduce_partial) + +if sys.platform != "win32": + from ._posix_reduction import _mk_inheritable # noqa: F401 +else: + from . import _win_reduction # noqa: F401 + +# global variable to change the pickler behavior +try: + from joblib.externals import cloudpickle # noqa: F401 + DEFAULT_ENV = "cloudpickle" +except ImportError: + # If cloudpickle is not present, fallback to pickle + DEFAULT_ENV = "pickle" + +ENV_LOKY_PICKLER = os.environ.get("LOKY_PICKLER", DEFAULT_ENV) +_LokyPickler = None +_loky_pickler_name = None + + +def set_loky_pickler(loky_pickler=None): + global _LokyPickler, _loky_pickler_name + + if loky_pickler is None: + loky_pickler = ENV_LOKY_PICKLER + + loky_pickler_cls = None + + # The default loky_pickler is cloudpickle + if loky_pickler in ["", None]: + loky_pickler = "cloudpickle" + + if loky_pickler == _loky_pickler_name: + return + + if loky_pickler == "cloudpickle": + from joblib.externals.cloudpickle import CloudPickler as loky_pickler_cls + else: + try: + from importlib import import_module + module_pickle = import_module(loky_pickler) + loky_pickler_cls = module_pickle.Pickler + except (ImportError, AttributeError) as e: + extra_info = ("\nThis error occurred while setting loky_pickler to" + " '{}', as required by the env variable LOKY_PICKLER" + " or the function set_loky_pickler." + .format(loky_pickler)) + e.args = (e.args[0] + extra_info,) + e.args[1:] + e.msg = e.args[0] + raise e + + util.debug("Using '{}' for serialization." + .format(loky_pickler if loky_pickler else "cloudpickle")) + + class CustomizablePickler(loky_pickler_cls): + _loky_pickler_cls = loky_pickler_cls + + def _set_dispatch_table(self, dispatch_table): + for ancestor_class in self._loky_pickler_cls.mro(): + dt_attribute = getattr(ancestor_class, "dispatch_table", None) + if isinstance(dt_attribute, types.MemberDescriptorType): + # Ancestor class (typically _pickle.Pickler) has a + # member_descriptor for its "dispatch_table" attribute. Use + # it to set the dispatch_table as a member instead of a + # dynamic attribute in the __dict__ of the instance, + # otherwise it will not be taken into account by the C + # implementation of the dump method if a subclass defines a + # class-level dispatch_table attribute as was done in + # cloudpickle 1.6.0: + # https://github.com/joblib/loky/pull/260 + dt_attribute.__set__(self, dispatch_table) + break + + # On top of member descriptor set, also use setattr such that code + # that directly access self.dispatch_table gets a consistent view + # of the same table. + self.dispatch_table = dispatch_table + + def __init__(self, writer, reducers=None, protocol=HIGHEST_PROTOCOL): + loky_pickler_cls.__init__(self, writer, protocol=protocol) + if reducers is None: + reducers = {} + if sys.version_info < (3,): + self.dispatch = loky_pickler_cls.dispatch.copy() + self.dispatch.update(_ReducerRegistry.dispatch_table) + else: + if hasattr(self, "dispatch_table"): + # Force a copy that we will update without mutating the + # any class level defined dispatch_table. + loky_dt = dict(self.dispatch_table) + else: + # Use standard reducers as bases + loky_dt = copyreg.dispatch_table.copy() + + # Register loky specific reducers + loky_dt.update(_ReducerRegistry.dispatch_table) + + # Set the new dispatch table, taking care of the fact that we + # need to use the member_descriptor when we inherit from a + # subclass of the C implementation of the Pickler base class + # with an class level dispatch_table attribute. + self._set_dispatch_table(loky_dt) + + # Register custom reducers + for type, reduce_func in reducers.items(): + self.register(type, reduce_func) + + def register(self, type, reduce_func): + """Attach a reducer function to a given type in the dispatch table. + """ + if sys.version_info < (3,): + # Python 2 pickler dispatching is not explicitly customizable. + # Let us use a closure to workaround this limitation. + def dispatcher(self, obj): + reduced = reduce_func(obj) + self.save_reduce(obj=obj, *reduced) + self.dispatch[type] = dispatcher + else: + self.dispatch_table[type] = reduce_func + + _LokyPickler = CustomizablePickler + _loky_pickler_name = loky_pickler + + +def get_loky_pickler_name(): + global _loky_pickler_name + return _loky_pickler_name + + +def get_loky_pickler(): + global _LokyPickler + return _LokyPickler + + +# Set it to its default value +set_loky_pickler() + + +def loads(buf): + # Compat for python2.7 version + if sys.version_info < (3, 3) and isinstance(buf, io.BytesIO): + buf = buf.getvalue() + return pickle_loads(buf) + + +def dump(obj, file, reducers=None, protocol=None): + '''Replacement for pickle.dump() using _LokyPickler.''' + global _LokyPickler + _LokyPickler(file, reducers=reducers, protocol=protocol).dump(obj) + + +def dumps(obj, reducers=None, protocol=None): + global _LokyPickler + + buf = io.BytesIO() + dump(obj, buf, reducers=reducers, protocol=protocol) + if sys.version_info < (3, 3): + return buf.getvalue() + return buf.getbuffer() + + +__all__ = ["dump", "dumps", "loads", "register", "set_loky_pickler"] + +if sys.platform == "win32": + __all__ += ["duplicate"] diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/resource_tracker.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/resource_tracker.py new file mode 100644 index 0000000..95dff35 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/resource_tracker.py @@ -0,0 +1,380 @@ +############################################################################### +# Server process to keep track of unlinked resources, like folders and +# semaphores and clean them. +# +# author: Thomas Moreau +# +# adapted from multiprocessing/semaphore_tracker.py (17/02/2017) +# * include custom spawnv_passfds to start the process +# * use custom unlink from our own SemLock implementation +# * add some VERBOSE logging +# + +# +# On Unix we run a server process which keeps track of unlinked +# resources. The server ignores SIGINT and SIGTERM and reads from a +# pipe. The resource_tracker implements a reference counting scheme: each time +# a Python process anticipates the shared usage of a resource by another +# process, it signals the resource_tracker of this shared usage, and in return, +# the resource_tracker increments the resource's reference count by 1. +# Similarly, when access to a resource is closed by a Python process, the +# process notifies the resource_tracker by asking it to decrement the +# resource's reference count by 1. When the reference count drops to 0, the +# resource_tracker attempts to clean up the underlying resource. + +# Finally, every other process connected to the resource tracker has a copy of +# the writable end of the pipe used to communicate with it, so the resource +# tracker gets EOF when all other processes have exited. Then the +# resource_tracker process unlinks any remaining leaked resources (with +# reference count above 0) + +# For semaphores, this is important because the system only supports a limited +# number of named semaphores, and they will not be automatically removed till +# the next reboot. Without this resource tracker process, "killall python" +# would probably leave unlinked semaphores. + +# Note that this behavior differs from CPython's resource_tracker, which only +# implements list of shared resources, and not a proper refcounting scheme. +# Also, CPython's resource tracker will only attempt to cleanup those shared +# resources once all procsses connected to the resouce tracker have exited. + + +import os +import shutil +import sys +import signal +import warnings +import threading + +from . import spawn +from multiprocessing import util + +if sys.platform == "win32": + from .compat_win32 import _winapi + from .reduction import duplicate + import msvcrt + +try: + from _multiprocessing import sem_unlink +except ImportError: + from .semlock import sem_unlink + +if sys.version_info < (3,): + BrokenPipeError = OSError + from os import fdopen as open + +__all__ = ['ensure_running', 'register', 'unregister'] + +_HAVE_SIGMASK = hasattr(signal, 'pthread_sigmask') +_IGNORED_SIGNALS = (signal.SIGINT, signal.SIGTERM) + +_CLEANUP_FUNCS = { + 'folder': shutil.rmtree, + 'file': os.unlink +} + +if os.name == "posix": + _CLEANUP_FUNCS['semlock'] = sem_unlink + + +VERBOSE = False + + +class ResourceTracker(object): + + def __init__(self): + self._lock = threading.Lock() + self._fd = None + self._pid = None + + def getfd(self): + self.ensure_running() + return self._fd + + def ensure_running(self): + '''Make sure that resource tracker process is running. + + This can be run from any process. Usually a child process will use + the resource created by its parent.''' + with self._lock: + if self._fd is not None: + # resource tracker was launched before, is it still running? + if self._check_alive(): + # => still alive + return + # => dead, launch it again + os.close(self._fd) + if os.name == "posix": + try: + # At this point, the resource_tracker process has been + # killed or crashed. Let's remove the process entry + # from the process table to avoid zombie processes. + os.waitpid(self._pid, 0) + except OSError: + # The process was terminated or is a child from an + # ancestor of the current process. + pass + self._fd = None + self._pid = None + + warnings.warn('resource_tracker: process died unexpectedly, ' + 'relaunching. Some folders/sempahores might ' + 'leak.') + + fds_to_pass = [] + try: + fds_to_pass.append(sys.stderr.fileno()) + except Exception: + pass + + r, w = os.pipe() + if sys.platform == "win32": + _r = duplicate(msvcrt.get_osfhandle(r), inheritable=True) + os.close(r) + r = _r + + cmd = 'from {} import main; main({}, {})'.format( + main.__module__, r, VERBOSE) + try: + fds_to_pass.append(r) + # process will out live us, so no need to wait on pid + exe = spawn.get_executable() + args = [exe] + util._args_from_interpreter_flags() + # In python 3.3, there is a bug which put `-RRRRR..` instead of + # `-R` in args. Replace it to get the correct flags. + # See https://github.com/python/cpython/blob/3.3/Lib/subprocess.py#L488 + if sys.version_info[:2] <= (3, 3): + import re + for i in range(1, len(args)): + args[i] = re.sub("-R+", "-R", args[i]) + args += ['-c', cmd] + util.debug("launching resource tracker: {}".format(args)) + # bpo-33613: Register a signal mask that will block the + # signals. This signal mask will be inherited by the child + # that is going to be spawned and will protect the child from a + # race condition that can make the child die before it + # registers signal handlers for SIGINT and SIGTERM. The mask is + # unregistered after spawning the child. + try: + if _HAVE_SIGMASK: + signal.pthread_sigmask(signal.SIG_BLOCK, + _IGNORED_SIGNALS) + pid = spawnv_passfds(exe, args, fds_to_pass) + finally: + if _HAVE_SIGMASK: + signal.pthread_sigmask(signal.SIG_UNBLOCK, + _IGNORED_SIGNALS) + except BaseException: + os.close(w) + raise + else: + self._fd = w + self._pid = pid + finally: + if sys.platform == "win32": + _winapi.CloseHandle(r) + else: + os.close(r) + + def _check_alive(self): + '''Check for the existence of the resource tracker process.''' + try: + self._send('PROBE', '', '') + except BrokenPipeError: + return False + else: + return True + + def register(self, name, rtype): + '''Register a named resource, and increment its refcount.''' + self.ensure_running() + self._send('REGISTER', name, rtype) + + def unregister(self, name, rtype): + '''Unregister a named resource with resource tracker.''' + self.ensure_running() + self._send('UNREGISTER', name, rtype) + + def maybe_unlink(self, name, rtype): + '''Decrement the refcount of a resource, and delete it if it hits 0''' + self.ensure_running() + self._send("MAYBE_UNLINK", name, rtype) + + def _send(self, cmd, name, rtype): + msg = '{0}:{1}:{2}\n'.format(cmd, name, rtype).encode('ascii') + if len(name) > 512: + # posix guarantees that writes to a pipe of less than PIPE_BUF + # bytes are atomic, and that PIPE_BUF >= 512 + raise ValueError('name too long') + nbytes = os.write(self._fd, msg) + assert nbytes == len(msg) + + +_resource_tracker = ResourceTracker() +ensure_running = _resource_tracker.ensure_running +register = _resource_tracker.register +maybe_unlink = _resource_tracker.maybe_unlink +unregister = _resource_tracker.unregister +getfd = _resource_tracker.getfd + + +def main(fd, verbose=0): + '''Run resource tracker.''' + # protect the process from ^C and "killall python" etc + if verbose: + util.log_to_stderr(level=util.DEBUG) + + signal.signal(signal.SIGINT, signal.SIG_IGN) + signal.signal(signal.SIGTERM, signal.SIG_IGN) + + if _HAVE_SIGMASK: + signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS) + + for f in (sys.stdin, sys.stdout): + try: + f.close() + except Exception: + pass + + if verbose: + util.debug("Main resource tracker is running") + + registry = {rtype: dict() for rtype in _CLEANUP_FUNCS.keys()} + try: + # keep track of registered/unregistered resources + if sys.platform == "win32": + fd = msvcrt.open_osfhandle(fd, os.O_RDONLY) + with open(fd, 'rb') as f: + while True: + line = f.readline() + if line == b'': # EOF + break + try: + splitted = line.strip().decode('ascii').split(':') + # name can potentially contain separator symbols (for + # instance folders on Windows) + cmd, name, rtype = ( + splitted[0], ':'.join(splitted[1:-1]), splitted[-1]) + + if cmd == 'PROBE': + continue + + if rtype not in _CLEANUP_FUNCS: + raise ValueError( + 'Cannot register {} for automatic cleanup: ' + 'unknown resource type ({}). Resource type should ' + 'be one of the following: {}'.format( + name, rtype, list(_CLEANUP_FUNCS.keys()))) + + if cmd == 'REGISTER': + if name not in registry[rtype]: + registry[rtype][name] = 1 + else: + registry[rtype][name] += 1 + + if verbose: + util.debug( + "[ResourceTracker] incremented refcount of {} " + "{} (current {})".format( + rtype, name, registry[rtype][name])) + elif cmd == 'UNREGISTER': + del registry[rtype][name] + if verbose: + util.debug( + "[ResourceTracker] unregister {} {}: " + "registry({})".format(name, rtype, len(registry))) + elif cmd == 'MAYBE_UNLINK': + registry[rtype][name] -= 1 + if verbose: + util.debug( + "[ResourceTracker] decremented refcount of {} " + "{} (current {})".format( + rtype, name, registry[rtype][name])) + + if registry[rtype][name] == 0: + del registry[rtype][name] + try: + if verbose: + util.debug( + "[ResourceTracker] unlink {}" + .format(name)) + _CLEANUP_FUNCS[rtype](name) + except Exception as e: + warnings.warn( + 'resource_tracker: %s: %r' % (name, e)) + + else: + raise RuntimeError('unrecognized command %r' % cmd) + except BaseException: + try: + sys.excepthook(*sys.exc_info()) + except BaseException: + pass + finally: + # all processes have terminated; cleanup any remaining resources + def _unlink_resources(rtype_registry, rtype): + if rtype_registry: + try: + warnings.warn('resource_tracker: There appear to be %d ' + 'leaked %s objects to clean up at shutdown' % + (len(rtype_registry), rtype)) + except Exception: + pass + for name in rtype_registry: + # For some reason the process which created and registered this + # resource has failed to unregister it. Presumably it has + # died. We therefore clean it up. + try: + _CLEANUP_FUNCS[rtype](name) + if verbose: + util.debug("[ResourceTracker] unlink {}" + .format(name)) + except Exception as e: + warnings.warn('resource_tracker: %s: %r' % (name, e)) + + for rtype, rtype_registry in registry.items(): + if rtype == "folder": + continue + else: + _unlink_resources(rtype_registry, rtype) + + # The default cleanup routine for folders deletes everything inside + # those folders recursively, which can include other resources tracked + # by the resource tracker). To limit the risk of the resource tracker + # attempting to delete twice a resource (once as part of a tracked + # folder, and once as a resource), we delete the folders after all + # other resource types. + if "folder" in registry: + _unlink_resources(registry["folder"], "folder") + + if verbose: + util.debug("resource tracker shut down") + + +# +# Start a program with only specified fds kept open +# + +def spawnv_passfds(path, args, passfds): + passfds = sorted(passfds) + if sys.platform != "win32": + errpipe_read, errpipe_write = os.pipe() + try: + from .reduction import _mk_inheritable + _pass = [] + for fd in passfds: + _pass += [_mk_inheritable(fd)] + from .fork_exec import fork_exec + return fork_exec(args, _pass) + finally: + os.close(errpipe_read) + os.close(errpipe_write) + else: + cmd = ' '.join('"%s"' % x for x in args) + try: + hp, ht, pid, tid = _winapi.CreateProcess( + path, cmd, None, None, True, 0, None, None, None) + _winapi.CloseHandle(ht) + except BaseException: + pass + return pid diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/semlock.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/semlock.py new file mode 100644 index 0000000..2d35f6a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/semlock.py @@ -0,0 +1,274 @@ +############################################################################### +# Ctypes implementation for posix semaphore. +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from cpython/Modules/_multiprocessing/semaphore.c (17/02/2017) +# * use ctypes to access pthread semaphores and provide a full python +# semaphore management. +# * For OSX, as no sem_getvalue is not implemented, Semaphore with value > 1 +# are not guaranteed to work. +# * Only work with LokyProcess on posix +# +import os +import sys +import time +import errno +import ctypes +import tempfile +import threading +from ctypes.util import find_library + +# As we need to use ctypes return types for semlock object, failure value +# needs to be cast to proper python value. Unix failure convention is to +# return 0, whereas OSX returns -1 +SEM_FAILURE = ctypes.c_void_p(0).value +if sys.platform == 'darwin': + SEM_FAILURE = ctypes.c_void_p(-1).value + +# Semaphore types +RECURSIVE_MUTEX = 0 +SEMAPHORE = 1 + +# Semaphore constants +SEM_OFLAG = ctypes.c_int(os.O_CREAT | os.O_EXCL) +SEM_PERM = ctypes.c_int(384) + + +class timespec(ctypes.Structure): + _fields_ = [("tv_sec", ctypes.c_long), ("tv_nsec", ctypes.c_long)] + + +if sys.platform != 'win32': + pthread = ctypes.CDLL(find_library('pthread'), use_errno=True) + pthread.sem_open.restype = ctypes.c_void_p + pthread.sem_close.argtypes = [ctypes.c_void_p] + pthread.sem_wait.argtypes = [ctypes.c_void_p] + pthread.sem_trywait.argtypes = [ctypes.c_void_p] + pthread.sem_post.argtypes = [ctypes.c_void_p] + pthread.sem_getvalue.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + pthread.sem_unlink.argtypes = [ctypes.c_char_p] + if sys.platform != "darwin": + pthread.sem_timedwait.argtypes = [ctypes.c_void_p, + ctypes.POINTER(timespec)] + +try: + from threading import get_ident +except ImportError: + def get_ident(): + return threading.current_thread().ident + + +if sys.version_info[:2] < (3, 3): + class FileExistsError(OSError): + pass + + class FileNotFoundError(OSError): + pass + + +def sem_unlink(name): + if pthread.sem_unlink(name.encode('ascii')) < 0: + raiseFromErrno() + + +def _sem_open(name, value=None): + """ Construct or retrieve a semaphore with the given name + + If value is None, try to retrieve an existing named semaphore. + Else create a new semaphore with the given value + """ + if value is None: + handle = pthread.sem_open(ctypes.c_char_p(name), 0) + else: + handle = pthread.sem_open(ctypes.c_char_p(name), SEM_OFLAG, SEM_PERM, + ctypes.c_int(value)) + + if handle == SEM_FAILURE: + e = ctypes.get_errno() + if e == errno.EEXIST: + raise FileExistsError("a semaphore named %s already exists" % name) + elif e == errno.ENOENT: + raise FileNotFoundError('cannot find semaphore named %s' % name) + elif e == errno.ENOSYS: + raise NotImplementedError('No semaphore implementation on this ' + 'system') + else: + raiseFromErrno() + + return handle + + +def _sem_timedwait(handle, timeout): + t_start = time.time() + if sys.platform != "darwin": + sec = int(timeout) + tv_sec = int(t_start) + nsec = int(1e9 * (timeout - sec) + .5) + tv_nsec = int(1e9 * (t_start - tv_sec) + .5) + deadline = timespec(sec+tv_sec, nsec+tv_nsec) + deadline.tv_sec += int(deadline.tv_nsec / 1000000000) + deadline.tv_nsec %= 1000000000 + return pthread.sem_timedwait(handle, ctypes.pointer(deadline)) + + # PERFORMANCE WARNING + # No sem_timedwait on OSX so we implement our own method. This method can + # degrade performances has the wait can have a latency up to 20 msecs + deadline = t_start + timeout + delay = 0 + now = time.time() + while True: + # Poll the sem file + res = pthread.sem_trywait(handle) + if res == 0: + return 0 + else: + e = ctypes.get_errno() + if e != errno.EAGAIN: + raiseFromErrno() + + # check for timeout + now = time.time() + if now > deadline: + ctypes.set_errno(errno.ETIMEDOUT) + return -1 + + # calculate how much time left and check the delay is not too long + # -- maximum is 20 msecs + difference = (deadline - now) + delay = min(delay, 20e-3, difference) + + # Sleep and increase delay + time.sleep(delay) + delay += 1e-3 + + +class SemLock(object): + """ctypes wrapper to the unix semaphore""" + + _rand = tempfile._RandomNameSequence() + + def __init__(self, kind, value, maxvalue, name=None, unlink_now=False): + self.count = 0 + self.ident = 0 + self.kind = kind + self.maxvalue = maxvalue + self.name = name + self.handle = _sem_open(self.name.encode('ascii'), value) + + def __del__(self): + try: + res = pthread.sem_close(self.handle) + assert res == 0, "Issue while closing semaphores" + except AttributeError: + pass + + def _is_mine(self): + return self.count > 0 and get_ident() == self.ident + + def acquire(self, block=True, timeout=None): + if self.kind == RECURSIVE_MUTEX and self._is_mine(): + self.count += 1 + return True + + if block and timeout is None: + res = pthread.sem_wait(self.handle) + elif not block or timeout <= 0: + res = pthread.sem_trywait(self.handle) + else: + res = _sem_timedwait(self.handle, timeout) + if res < 0: + e = ctypes.get_errno() + if e == errno.EINTR: + return None + elif e in [errno.EAGAIN, errno.ETIMEDOUT]: + return False + raiseFromErrno() + self.count += 1 + self.ident = get_ident() + return True + + def release(self): + if self.kind == RECURSIVE_MUTEX: + assert self._is_mine(), ( + "attempt to release recursive lock not owned by thread") + if self.count > 1: + self.count -= 1 + return + assert self.count == 1 + else: + if sys.platform == 'darwin': + # Handle broken get_value for mac ==> only Lock will work + # as sem_get_value do not work properly + if self.maxvalue == 1: + if pthread.sem_trywait(self.handle) < 0: + e = ctypes.get_errno() + if e != errno.EAGAIN: + raise OSError(e, errno.errorcode[e]) + else: + if pthread.sem_post(self.handle) < 0: + raiseFromErrno() + else: + raise ValueError( + "semaphore or lock released too many times") + else: + import warnings + warnings.warn("semaphore are broken on OSX, release might " + "increase its maximal value", RuntimeWarning) + else: + value = self._get_value() + if value >= self.maxvalue: + raise ValueError( + "semaphore or lock released too many times") + + if pthread.sem_post(self.handle) < 0: + raiseFromErrno() + + self.count -= 1 + + def _get_value(self): + value = ctypes.pointer(ctypes.c_int(-1)) + if pthread.sem_getvalue(self.handle, value) < 0: + raiseFromErrno() + return value.contents.value + + def _count(self): + return self.count + + def _is_zero(self): + if sys.platform == 'darwin': + # Handle broken get_value for mac ==> only Lock will work + # as sem_get_value do not work properly + if pthread.sem_trywait(self.handle) < 0: + e = ctypes.get_errno() + if e == errno.EAGAIN: + return True + raise OSError(e, errno.errorcode[e]) + else: + if pthread.sem_post(self.handle) < 0: + raiseFromErrno() + return False + else: + value = ctypes.pointer(ctypes.c_int(-1)) + if pthread.sem_getvalue(self.handle, value) < 0: + raiseFromErrno() + return value.contents.value == 0 + + def _after_fork(self): + self.count = 0 + + @staticmethod + def _rebuild(handle, kind, maxvalue, name): + self = SemLock.__new__(SemLock) + self.count = 0 + self.ident = 0 + self.kind = kind + self.maxvalue = maxvalue + self.name = name + self.handle = _sem_open(name.encode('ascii')) + return self + + +def raiseFromErrno(): + e = ctypes.get_errno() + raise OSError(e, errno.errorcode[e]) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/spawn.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/spawn.py new file mode 100644 index 0000000..2a16c84 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/spawn.py @@ -0,0 +1,258 @@ +############################################################################### +# Prepares and processes the data to setup the new process environment +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/spawn.py (17/02/2017) +# * Improve logging data +# +import os +import sys +import runpy +import types +from multiprocessing import process, util + + +if sys.platform != 'win32': + WINEXE = False + WINSERVICE = False +else: + import msvcrt + from .reduction import duplicate + WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) + WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") + +if WINSERVICE: + _python_exe = os.path.join(sys.exec_prefix, 'python.exe') +else: + _python_exe = sys.executable + + +def get_executable(): + return _python_exe + + +def _check_not_importing_main(): + if getattr(process.current_process(), '_inheriting', False): + raise RuntimeError(''' + An attempt has been made to start a new process before the + current process has finished its bootstrapping phase. + + This probably means that you are not using fork to start your + child processes and you have forgotten to use the proper idiom + in the main module: + + if __name__ == '__main__': + freeze_support() + ... + + The "freeze_support()" line can be omitted if the program + is not going to be frozen to produce an executable.''') + + +def get_preparation_data(name, init_main_module=True): + ''' + Return info about parent needed by child to unpickle process object + ''' + _check_not_importing_main() + d = dict( + log_to_stderr=util._log_to_stderr, + authkey=bytes(process.current_process().authkey), + name=name, + sys_argv=sys.argv, + orig_dir=process.ORIGINAL_DIR, + dir=os.getcwd() + ) + + # Send sys_path and make sure the current directory will not be changed + sys_path = [p for p in sys.path] + try: + i = sys_path.index('') + except ValueError: + pass + else: + sys_path[i] = process.ORIGINAL_DIR + d['sys_path'] = sys_path + + # Make sure to pass the information if the multiprocessing logger is active + if util._logger is not None: + d['log_level'] = util._logger.getEffectiveLevel() + if len(util._logger.handlers) > 0: + h = util._logger.handlers[0] + d['log_fmt'] = h.formatter._fmt + + # Tell the child how to communicate with the resource_tracker + from .resource_tracker import _resource_tracker + _resource_tracker.ensure_running() + d["tracker_args"] = {"pid": _resource_tracker._pid} + if sys.platform == "win32": + child_w = duplicate( + msvcrt.get_osfhandle(_resource_tracker._fd), inheritable=True) + d["tracker_args"]["fh"] = child_w + else: + d["tracker_args"]["fd"] = _resource_tracker._fd + + if sys.version_info >= (3, 8) and os.name == 'posix': + # joblib/loky#242: allow loky processes to retrieve the resource + # tracker of their parent in case the child processes depickles + # shared_memory objects, that are still tracked by multiprocessing's + # resource_tracker by default. + # XXX: this is a workaround that may be error prone: in the future, it + # would be better to have loky subclass multiprocessing's shared_memory + # to force registration of shared_memory segments via loky's + # resource_tracker. + from multiprocessing.resource_tracker import ( + _resource_tracker as mp_resource_tracker + ) + # multiprocessing's resource_tracker must be running before loky + # process is created (othewise the child won't be able to use it if it + # is created later on) + mp_resource_tracker.ensure_running() + d["mp_tracker_args"] = { + 'fd': mp_resource_tracker._fd, 'pid': mp_resource_tracker._pid + } + + # Figure out whether to initialise main in the subprocess as a module + # or through direct execution (or to leave it alone entirely) + if init_main_module: + main_module = sys.modules['__main__'] + try: + main_mod_name = getattr(main_module.__spec__, "name", None) + except BaseException: + main_mod_name = None + if main_mod_name is not None: + d['init_main_from_name'] = main_mod_name + elif sys.platform != 'win32' or (not WINEXE and not WINSERVICE): + main_path = getattr(main_module, '__file__', None) + if main_path is not None: + if (not os.path.isabs(main_path) and + process.ORIGINAL_DIR is not None): + main_path = os.path.join(process.ORIGINAL_DIR, main_path) + d['init_main_from_path'] = os.path.normpath(main_path) + # Compat for python2.7 + d['main_path'] = d['init_main_from_path'] + + return d + + +# +# Prepare current process +# +old_main_modules = [] + + +def prepare(data): + ''' + Try to get current process ready to unpickle process object + ''' + if 'name' in data: + process.current_process().name = data['name'] + + if 'authkey' in data: + process.current_process().authkey = data['authkey'] + + if 'log_to_stderr' in data and data['log_to_stderr']: + util.log_to_stderr() + + if 'log_level' in data: + util.get_logger().setLevel(data['log_level']) + + if 'log_fmt' in data: + import logging + util.get_logger().handlers[0].setFormatter( + logging.Formatter(data['log_fmt']) + ) + + if 'sys_path' in data: + sys.path = data['sys_path'] + + if 'sys_argv' in data: + sys.argv = data['sys_argv'] + + if 'dir' in data: + os.chdir(data['dir']) + + if 'orig_dir' in data: + process.ORIGINAL_DIR = data['orig_dir'] + + if 'mp_tracker_args' in data: + from multiprocessing.resource_tracker import ( + _resource_tracker as mp_resource_tracker + ) + mp_resource_tracker._fd = data['mp_tracker_args']['fd'] + mp_resource_tracker._pid = data['mp_tracker_args']['pid'] + if 'tracker_args' in data: + from .resource_tracker import _resource_tracker + _resource_tracker._pid = data["tracker_args"]['pid'] + if sys.platform == 'win32': + handle = data["tracker_args"]["fh"] + _resource_tracker._fd = msvcrt.open_osfhandle(handle, 0) + else: + _resource_tracker._fd = data["tracker_args"]["fd"] + + if 'init_main_from_name' in data: + _fixup_main_from_name(data['init_main_from_name']) + elif 'init_main_from_path' in data: + _fixup_main_from_path(data['init_main_from_path']) + + +# Multiprocessing module helpers to fix up the main module in +# spawned subprocesses +def _fixup_main_from_name(mod_name): + # __main__.py files for packages, directories, zip archives, etc, run + # their "main only" code unconditionally, so we don't even try to + # populate anything in __main__, nor do we make any changes to + # __main__ attributes + current_main = sys.modules['__main__'] + if mod_name == "__main__" or mod_name.endswith(".__main__"): + return + + # If this process was forked, __main__ may already be populated + if getattr(current_main.__spec__, "name", None) == mod_name: + return + + # Otherwise, __main__ may contain some non-main code where we need to + # support unpickling it properly. We rerun it as __mp_main__ and make + # the normal __main__ an alias to that + old_main_modules.append(current_main) + main_module = types.ModuleType("__mp_main__") + main_content = runpy.run_module(mod_name, + run_name="__mp_main__", + alter_sys=True) + main_module.__dict__.update(main_content) + sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module + + +def _fixup_main_from_path(main_path): + # If this process was forked, __main__ may already be populated + current_main = sys.modules['__main__'] + + # Unfortunately, the main ipython launch script historically had no + # "if __name__ == '__main__'" guard, so we work around that + # by treating it like a __main__.py file + # See https://github.com/ipython/ipython/issues/4698 + main_name = os.path.splitext(os.path.basename(main_path))[0] + if main_name == 'ipython': + return + + # Otherwise, if __file__ already has the setting we expect, + # there's nothing more to do + if getattr(current_main, '__file__', None) == main_path: + return + + # If the parent process has sent a path through rather than a module + # name we assume it is an executable script that may contain + # non-main code that needs to be executed + old_main_modules.append(current_main) + main_module = types.ModuleType("__mp_main__") + main_content = runpy.run_path(main_path, + run_name="__mp_main__") + main_module.__dict__.update(main_content) + sys.modules['__main__'] = sys.modules['__mp_main__'] = main_module + + +def import_main_path(main_path): + ''' + Set sys.modules['__main__'] to module at main_path + ''' + _fixup_main_from_path(main_path) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/synchronize.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/synchronize.py new file mode 100644 index 0000000..592de3c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/synchronize.py @@ -0,0 +1,381 @@ +############################################################################### +# Synchronization primitives based on our SemLock implementation +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from multiprocessing/synchronize.py (17/02/2017) +# * Remove ctx argument for compatibility reason +# * Implementation of Condition/Event are necessary for compatibility +# with python2.7/3.3, Barrier should be reimplemented to for those +# version (but it is not used in loky). +# + +import os +import sys +import tempfile +import threading +import _multiprocessing +from time import time as _time + +from .context import assert_spawning +from . import resource_tracker +from multiprocessing import process +from multiprocessing import util + +__all__ = [ + 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event' + ] +# Try to import the mp.synchronize module cleanly, if it fails +# raise ImportError for platforms lacking a working sem_open implementation. +# See issue 3770 +try: + if sys.version_info < (3, 4): + from .semlock import SemLock as _SemLock + from .semlock import sem_unlink + else: + from _multiprocessing import SemLock as _SemLock + from _multiprocessing import sem_unlink +except (ImportError): + raise ImportError("This platform lacks a functioning sem_open" + + " implementation, therefore, the required" + + " synchronization primitives needed will not" + + " function, see issue 3770.") + +if sys.version_info[:2] < (3, 3): + FileExistsError = OSError + +# +# Constants +# + +RECURSIVE_MUTEX, SEMAPHORE = list(range(2)) +SEM_VALUE_MAX = _multiprocessing.SemLock.SEM_VALUE_MAX + + +# +# Base class for semaphores and mutexes; wraps `_multiprocessing.SemLock` +# + +class SemLock(object): + + _rand = tempfile._RandomNameSequence() + + def __init__(self, kind, value, maxvalue): + # unlink_now is only used on win32 or when we are using fork. + unlink_now = False + for i in range(100): + try: + self._semlock = _SemLock( + kind, value, maxvalue, SemLock._make_name(), + unlink_now) + except FileExistsError: # pragma: no cover + pass + else: + break + else: # pragma: no cover + raise FileExistsError('cannot find name for semaphore') + + util.debug('created semlock with handle %s and name "%s"' + % (self._semlock.handle, self._semlock.name)) + + self._make_methods() + + def _after_fork(obj): + obj._semlock._after_fork() + + util.register_after_fork(self, _after_fork) + + # When the object is garbage collected or the + # process shuts down we unlink the semaphore name + resource_tracker.register(self._semlock.name, "semlock") + util.Finalize(self, SemLock._cleanup, (self._semlock.name,), + exitpriority=0) + + @staticmethod + def _cleanup(name): + sem_unlink(name) + resource_tracker.unregister(name, "semlock") + + def _make_methods(self): + self.acquire = self._semlock.acquire + self.release = self._semlock.release + + def __enter__(self): + return self._semlock.acquire() + + def __exit__(self, *args): + return self._semlock.release() + + def __getstate__(self): + assert_spawning(self) + sl = self._semlock + h = sl.handle + return (h, sl.kind, sl.maxvalue, sl.name) + + def __setstate__(self, state): + self._semlock = _SemLock._rebuild(*state) + util.debug('recreated blocker with handle %r and name "%s"' + % (state[0], state[3])) + self._make_methods() + + @staticmethod + def _make_name(): + # OSX does not support long names for semaphores + return '/loky-%i-%s' % (os.getpid(), next(SemLock._rand)) + + +# +# Semaphore +# + +class Semaphore(SemLock): + + def __init__(self, value=1): + SemLock.__init__(self, SEMAPHORE, value, SEM_VALUE_MAX) + + def get_value(self): + if sys.platform == 'darwin': + raise NotImplementedError("OSX does not implement sem_getvalue") + return self._semlock._get_value() + + def __repr__(self): + try: + value = self._semlock._get_value() + except Exception: + value = 'unknown' + return '<%s(value=%s)>' % (self.__class__.__name__, value) + + +# +# Bounded semaphore +# + +class BoundedSemaphore(Semaphore): + + def __init__(self, value=1): + SemLock.__init__(self, SEMAPHORE, value, value) + + def __repr__(self): + try: + value = self._semlock._get_value() + except Exception: + value = 'unknown' + return '<%s(value=%s, maxvalue=%s)>' % \ + (self.__class__.__name__, value, self._semlock.maxvalue) + + +# +# Non-recursive lock +# + +class Lock(SemLock): + + def __init__(self): + super(Lock, self).__init__(SEMAPHORE, 1, 1) + + def __repr__(self): + try: + if self._semlock._is_mine(): + name = process.current_process().name + if threading.current_thread().name != 'MainThread': + name += '|' + threading.current_thread().name + elif self._semlock._get_value() == 1: + name = 'None' + elif self._semlock._count() > 0: + name = 'SomeOtherThread' + else: + name = 'SomeOtherProcess' + except Exception: + name = 'unknown' + return '<%s(owner=%s)>' % (self.__class__.__name__, name) + + +# +# Recursive lock +# + +class RLock(SemLock): + + def __init__(self): + super(RLock, self).__init__(RECURSIVE_MUTEX, 1, 1) + + def __repr__(self): + try: + if self._semlock._is_mine(): + name = process.current_process().name + if threading.current_thread().name != 'MainThread': + name += '|' + threading.current_thread().name + count = self._semlock._count() + elif self._semlock._get_value() == 1: + name, count = 'None', 0 + elif self._semlock._count() > 0: + name, count = 'SomeOtherThread', 'nonzero' + else: + name, count = 'SomeOtherProcess', 'nonzero' + except Exception: + name, count = 'unknown', 'unknown' + return '<%s(%s, %s)>' % (self.__class__.__name__, name, count) + + +# +# Condition variable +# + +class Condition(object): + + def __init__(self, lock=None): + self._lock = lock or RLock() + self._sleeping_count = Semaphore(0) + self._woken_count = Semaphore(0) + self._wait_semaphore = Semaphore(0) + self._make_methods() + + def __getstate__(self): + assert_spawning(self) + return (self._lock, self._sleeping_count, + self._woken_count, self._wait_semaphore) + + def __setstate__(self, state): + (self._lock, self._sleeping_count, + self._woken_count, self._wait_semaphore) = state + self._make_methods() + + def __enter__(self): + return self._lock.__enter__() + + def __exit__(self, *args): + return self._lock.__exit__(*args) + + def _make_methods(self): + self.acquire = self._lock.acquire + self.release = self._lock.release + + def __repr__(self): + try: + num_waiters = (self._sleeping_count._semlock._get_value() - + self._woken_count._semlock._get_value()) + except Exception: + num_waiters = 'unknown' + return '<%s(%s, %s)>' % (self.__class__.__name__, + self._lock, num_waiters) + + def wait(self, timeout=None): + assert self._lock._semlock._is_mine(), \ + 'must acquire() condition before using wait()' + + # indicate that this thread is going to sleep + self._sleeping_count.release() + + # release lock + count = self._lock._semlock._count() + for i in range(count): + self._lock.release() + + try: + # wait for notification or timeout + return self._wait_semaphore.acquire(True, timeout) + finally: + # indicate that this thread has woken + self._woken_count.release() + + # reacquire lock + for i in range(count): + self._lock.acquire() + + def notify(self): + assert self._lock._semlock._is_mine(), 'lock is not owned' + assert not self._wait_semaphore.acquire(False) + + # to take account of timeouts since last notify() we subtract + # woken_count from sleeping_count and rezero woken_count + while self._woken_count.acquire(False): + res = self._sleeping_count.acquire(False) + assert res + + if self._sleeping_count.acquire(False): # try grabbing a sleeper + self._wait_semaphore.release() # wake up one sleeper + self._woken_count.acquire() # wait for the sleeper to wake + + # rezero _wait_semaphore in case a timeout just happened + self._wait_semaphore.acquire(False) + + def notify_all(self): + assert self._lock._semlock._is_mine(), 'lock is not owned' + assert not self._wait_semaphore.acquire(False) + + # to take account of timeouts since last notify*() we subtract + # woken_count from sleeping_count and rezero woken_count + while self._woken_count.acquire(False): + res = self._sleeping_count.acquire(False) + assert res + + sleepers = 0 + while self._sleeping_count.acquire(False): + self._wait_semaphore.release() # wake up one sleeper + sleepers += 1 + + if sleepers: + for i in range(sleepers): + self._woken_count.acquire() # wait for a sleeper to wake + + # rezero wait_semaphore in case some timeouts just happened + while self._wait_semaphore.acquire(False): + pass + + def wait_for(self, predicate, timeout=None): + result = predicate() + if result: + return result + if timeout is not None: + endtime = _time() + timeout + else: + endtime = None + waittime = None + while not result: + if endtime is not None: + waittime = endtime - _time() + if waittime <= 0: + break + self.wait(waittime) + result = predicate() + return result + + +# +# Event +# + +class Event(object): + + def __init__(self): + self._cond = Condition(Lock()) + self._flag = Semaphore(0) + + def is_set(self): + with self._cond: + if self._flag.acquire(False): + self._flag.release() + return True + return False + + def set(self): + with self._cond: + self._flag.acquire(False) + self._flag.release() + self._cond.notify_all() + + def clear(self): + with self._cond: + self._flag.acquire(False) + + def wait(self, timeout=None): + with self._cond: + if self._flag.acquire(False): + self._flag.release() + else: + self._cond.wait(timeout) + + if self._flag.acquire(False): + self._flag.release() + return True + return False diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/utils.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/utils.py new file mode 100644 index 0000000..dc1b82a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/backend/utils.py @@ -0,0 +1,172 @@ +import os +import sys +import time +import errno +import signal +import warnings +import threading +import subprocess +try: + import psutil +except ImportError: + psutil = None + + +WIN32 = sys.platform == "win32" + + +def _flag_current_thread_clean_exit(): + """Put a ``_clean_exit`` flag on the current thread""" + thread = threading.current_thread() + thread._clean_exit = True + + +def recursive_terminate(process, use_psutil=True): + if use_psutil and psutil is not None: + _recursive_terminate_with_psutil(process) + else: + _recursive_terminate_without_psutil(process) + + +def _recursive_terminate_with_psutil(process, retries=5): + try: + children = psutil.Process(process.pid).children(recursive=True) + except psutil.NoSuchProcess: + return + + # Kill the children in reverse order to avoid killing the parents before + # the children in cases where there are more processes nested. + for child in children[::-1]: + try: + child.kill() + except psutil.NoSuchProcess: + pass + + process.terminate() + process.join() + + +def _recursive_terminate_without_psutil(process): + """Terminate a process and its descendants. + """ + try: + _recursive_terminate(process.pid) + except OSError as e: + warnings.warn("Failed to kill subprocesses on this platform. Please" + "install psutil: https://github.com/giampaolo/psutil") + # In case we cannot introspect the children, we fall back to the + # classic Process.terminate. + process.terminate() + process.join() + + +def _recursive_terminate(pid): + """Recursively kill the descendants of a process before killing it. + """ + + if sys.platform == "win32": + # On windows, the taskkill function with option `/T` terminate a given + # process pid and its children. + try: + subprocess.check_output( + ["taskkill", "/F", "/T", "/PID", str(pid)], + stderr=None) + except subprocess.CalledProcessError as e: + # In windows, taskkill return 1 for permission denied and 128, 255 + # for no process found. + if e.returncode not in [1, 128, 255]: + raise + elif e.returncode == 1: + # Try to kill the process without its descendants if taskkill + # was denied permission. If this fails too, with an error + # different from process not found, let the top level function + # raise a warning and retry to kill the process. + try: + os.kill(pid, signal.SIGTERM) + except OSError as e: + if e.errno != errno.ESRCH: + raise + + else: + try: + children_pids = subprocess.check_output( + ["pgrep", "-P", str(pid)], + stderr=None + ) + except subprocess.CalledProcessError as e: + # `ps` returns 1 when no child process has been found + if e.returncode == 1: + children_pids = b'' + else: + raise + + # Decode the result, split the cpid and remove the trailing line + children_pids = children_pids.decode().split('\n')[:-1] + for cpid in children_pids: + cpid = int(cpid) + _recursive_terminate(cpid) + + try: + os.kill(pid, signal.SIGTERM) + except OSError as e: + # if OSError is raised with [Errno 3] no such process, the process + # is already terminated, else, raise the error and let the top + # level function raise a warning and retry to kill the process. + if e.errno != errno.ESRCH: + raise + + +def get_exitcodes_terminated_worker(processes): + """Return a formated string with the exitcodes of terminated workers. + + If necessary, wait (up to .25s) for the system to correctly set the + exitcode of one terminated worker. + """ + patience = 5 + + # Catch the exitcode of the terminated workers. There should at least be + # one. If not, wait a bit for the system to correctly set the exitcode of + # the terminated worker. + exitcodes = [p.exitcode for p in list(processes.values()) + if p.exitcode is not None] + while len(exitcodes) == 0 and patience > 0: + patience -= 1 + exitcodes = [p.exitcode for p in list(processes.values()) + if p.exitcode is not None] + time.sleep(.05) + + return _format_exitcodes(exitcodes) + + +def _format_exitcodes(exitcodes): + """Format a list of exit code with names of the signals if possible""" + str_exitcodes = ["{}({})".format(_get_exitcode_name(e), e) + for e in exitcodes if e is not None] + return "{" + ", ".join(str_exitcodes) + "}" + + +def _get_exitcode_name(exitcode): + if sys.platform == "win32": + # The exitcode are unreliable on windows (see bpo-31863). + # For this case, return UNKNOWN + return "UNKNOWN" + + if exitcode < 0: + try: + import signal + if sys.version_info > (3, 5): + return signal.Signals(-exitcode).name + + # construct an inverse lookup table + for v, k in signal.__dict__.items(): + if (v.startswith('SIG') and not v.startswith('SIG_') and + k == -exitcode): + return v + except ValueError: + return "UNKNOWN" + elif exitcode != 255: + # The exitcode are unreliable on forkserver were 255 is always returned + # (see bpo-30589). For this case, return UNKNOWN + return "EXIT" + + return "UNKNOWN" diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/cloudpickle_wrapper.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/cloudpickle_wrapper.py new file mode 100644 index 0000000..1bf41a3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/cloudpickle_wrapper.py @@ -0,0 +1,113 @@ +import inspect +from functools import partial + +try: + from joblib.externals.cloudpickle import dumps, loads + cloudpickle = True +except ImportError: + cloudpickle = False + + +WRAP_CACHE = dict() + + +class CloudpickledObjectWrapper(object): + def __init__(self, obj, keep_wrapper=False): + self._obj = obj + self._keep_wrapper = keep_wrapper + + def __reduce__(self): + _pickled_object = dumps(self._obj) + if not self._keep_wrapper: + return loads, (_pickled_object,) + + return _reconstruct_wrapper, (_pickled_object, self._keep_wrapper) + + def __getattr__(self, attr): + # Ensure that the wrapped object can be used seemlessly as the + # previous object. + if attr not in ['_obj', '_keep_wrapper']: + return getattr(self._obj, attr) + return getattr(self, attr) + + +# Make sure the wrapped object conserves the callable property +class CallableObjectWrapper(CloudpickledObjectWrapper): + + def __call__(self, *args, **kwargs): + return self._obj(*args, **kwargs) + + +def _wrap_non_picklable_objects(obj, keep_wrapper): + if callable(obj): + return CallableObjectWrapper(obj, keep_wrapper=keep_wrapper) + return CloudpickledObjectWrapper(obj, keep_wrapper=keep_wrapper) + + +def _reconstruct_wrapper(_pickled_object, keep_wrapper): + obj = loads(_pickled_object) + return _wrap_non_picklable_objects(obj, keep_wrapper) + + +def _wrap_objects_when_needed(obj): + # Function to introspect an object and decide if it should be wrapped or + # not. + if not cloudpickle: + return obj + + need_wrap = "__main__" in getattr(obj, "__module__", "") + if isinstance(obj, partial): + return partial( + _wrap_objects_when_needed(obj.func), + *[_wrap_objects_when_needed(a) for a in obj.args], + **{k: _wrap_objects_when_needed(v) + for k, v in obj.keywords.items()} + ) + if callable(obj): + # Need wrap if the object is a function defined in a local scope of + # another function. + func_code = getattr(obj, "__code__", "") + need_wrap |= getattr(func_code, "co_flags", 0) & inspect.CO_NESTED + + # Need wrap if the obj is a lambda expression + func_name = getattr(obj, "__name__", "") + need_wrap |= "" in func_name + + if not need_wrap: + return obj + + wrapped_obj = WRAP_CACHE.get(obj) + if wrapped_obj is None: + wrapped_obj = _wrap_non_picklable_objects(obj, keep_wrapper=False) + WRAP_CACHE[obj] = wrapped_obj + return wrapped_obj + + +def wrap_non_picklable_objects(obj, keep_wrapper=True): + """Wrapper for non-picklable object to use cloudpickle to serialize them. + + Note that this wrapper tends to slow down the serialization process as it + is done with cloudpickle which is typically slower compared to pickle. The + proper way to solve serialization issues is to avoid defining functions and + objects in the main scripts and to implement __reduce__ functions for + complex classes. + """ + if not cloudpickle: + raise ImportError("could not from joblib.externals import cloudpickle. Please install " + "cloudpickle to allow extended serialization. " + "(`pip install cloudpickle`).") + + # If obj is a class, create a CloudpickledClassWrapper which instantiates + # the object internally and wrap it directly in a CloudpickledObjectWrapper + if inspect.isclass(obj): + class CloudpickledClassWrapper(CloudpickledObjectWrapper): + def __init__(self, *args, **kwargs): + self._obj = obj(*args, **kwargs) + self._keep_wrapper = keep_wrapper + + CloudpickledClassWrapper.__name__ = obj.__name__ + return CloudpickledClassWrapper + + # If obj is an instance of a class, just wrap it in a regular + # CloudpickledObjectWrapper + return _wrap_non_picklable_objects(obj, keep_wrapper=keep_wrapper) diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/process_executor.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/process_executor.py new file mode 100644 index 0000000..41e4a2b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/process_executor.py @@ -0,0 +1,1181 @@ +############################################################################### +# Re-implementation of the ProcessPoolExecutor more robust to faults +# +# author: Thomas Moreau and Olivier Grisel +# +# adapted from concurrent/futures/process_pool_executor.py (17/02/2017) +# * Backport for python2.7/3.3, +# * Add an extra management thread to detect executor_manager_thread failures, +# * Improve the shutdown process to avoid deadlocks, +# * Add timeout for workers, +# * More robust pickling process. +# +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Implements ProcessPoolExecutor. + +The follow diagram and text describe the data-flow through the system: + +|======================= In-process =====================|== Out-of-process ==| + ++----------+ +----------+ +--------+ +-----------+ +---------+ +| | => | Work Ids | | | | Call Q | | Process | +| | +----------+ | | +-----------+ | Pool | +| | | ... | | | | ... | +---------+ +| | | 6 | => | | => | 5, call() | => | | +| | | 7 | | | | ... | | | +| Process | | ... | | Local | +-----------+ | Process | +| Pool | +----------+ | Worker | | #1..n | +| Executor | | Thread | | | +| | +----------- + | | +-----------+ | | +| | <=> | Work Items | <=> | | <= | Result Q | <= | | +| | +------------+ | | +-----------+ | | +| | | 6: call() | | | | ... | | | +| | | future | +--------+ | 4, result | | | +| | | ... | | 3, except | | | ++----------+ +------------+ +-----------+ +---------+ + +Executor.submit() called: +- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict +- adds the id of the _WorkItem to the "Work Ids" queue + +Local worker thread: +- reads work ids from the "Work Ids" queue and looks up the corresponding + WorkItem from the "Work Items" dict: if the work item has been cancelled then + it is simply removed from the dict, otherwise it is repackaged as a + _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" + until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because + calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). +- reads _ResultItems from "Result Q", updates the future stored in the + "Work Items" dict and deletes the dict entry + +Process #1..n: +- reads _CallItems from "Call Q", executes the calls, and puts the resulting + _ResultItems in "Result Q" +""" + + +__author__ = 'Thomas Moreau (thomas.moreau.2010@gmail.com)' + + +import os +import gc +import sys +import struct +import weakref +import warnings +import itertools +import traceback +import threading +from time import time +import multiprocessing as mp +from functools import partial +from pickle import PicklingError + +from . import _base +from .backend import get_context +from .backend.compat import queue +from .backend.compat import wait +from .backend.compat import set_cause +from .backend.context import cpu_count +from .backend.queues import Queue, SimpleQueue +from .backend.reduction import set_loky_pickler, get_loky_pickler_name +from .backend.utils import recursive_terminate, get_exitcodes_terminated_worker + +try: + from concurrent.futures.process import BrokenProcessPool as _BPPException +except ImportError: + _BPPException = RuntimeError + + +# Compatibility for python2.7 +if sys.version_info[0] == 2: + ProcessLookupError = OSError + + +# Mechanism to prevent infinite process spawning. When a worker of a +# ProcessPoolExecutor nested in MAX_DEPTH Executor tries to create a new +# Executor, a LokyRecursionError is raised +MAX_DEPTH = int(os.environ.get("LOKY_MAX_DEPTH", 10)) +_CURRENT_DEPTH = 0 + +# Minimum time interval between two consecutive memory leak protection checks. +_MEMORY_LEAK_CHECK_DELAY = 1. + +# Number of bytes of memory usage allowed over the reference process size. +_MAX_MEMORY_LEAK_SIZE = int(3e8) + + +try: + from psutil import Process + _USE_PSUTIL = True + + def _get_memory_usage(pid, force_gc=False): + if force_gc: + gc.collect() + + return Process(pid).memory_info().rss + +except ImportError: + _USE_PSUTIL = False + + +class _ThreadWakeup: + def __init__(self): + self._closed = False + self._reader, self._writer = mp.Pipe(duplex=False) + + def close(self): + if not self._closed: + self._closed = True + self._writer.close() + self._reader.close() + + def wakeup(self): + if not self._closed: + if sys.platform == "win32" and sys.version_info[:2] < (3, 4): + # Compat for python2.7 on windows, where poll return false for + # b"" messages. Use the slightly larger message b"0". + self._writer.send_bytes(b"0") + else: + self._writer.send_bytes(b"") + + def clear(self): + if not self._closed: + while self._reader.poll(): + self._reader.recv_bytes() + + +class _ExecutorFlags(object): + """necessary references to maintain executor states without preventing gc + + It permits to keep the information needed by executor_manager_thread + and crash_detection_thread to maintain the pool without preventing the + garbage collection of unreferenced executors. + """ + def __init__(self, shutdown_lock): + + self.shutdown = False + self.broken = None + self.kill_workers = False + self.shutdown_lock = shutdown_lock + + def flag_as_shutting_down(self, kill_workers=None): + with self.shutdown_lock: + self.shutdown = True + if kill_workers is not None: + self.kill_workers = kill_workers + + def flag_as_broken(self, broken): + with self.shutdown_lock: + self.shutdown = True + self.broken = broken + + +# Prior to 3.9, executor_manager_thread is created as daemon thread. This means +# that it is not joined automatically when the interpreter is shutting down. +# To work around this problem, an exit handler is installed to tell the +# thread to exit when the interpreter is shutting down and then waits until +# it finishes. The thread needs to be daemonized because the atexit hooks are +# called after all non daemonized threads are joined. +# +# Starting 3.9, there exists a specific atexit hook to be called before joining +# the threads so the executor_manager_thread does not need to be daemonized +# anymore. +# +# The atexit hooks are registered when starting the first ProcessPoolExecutor +# to avoid import having an effect on the interpreter. + +_threads_wakeups = weakref.WeakKeyDictionary() +_global_shutdown = False + + +def _python_exit(): + global _global_shutdown + _global_shutdown = True + items = list(_threads_wakeups.items()) + mp.util.debug("Interpreter shutting down. Waking up " + "executor_manager_thread {}".format(items)) + for _, (shutdown_lock, thread_wakeup) in items: + with shutdown_lock: + thread_wakeup.wakeup() + for thread, _ in items: + thread.join() + + +# With the fork context, _thread_wakeups is propagated to children. +# Clear it after fork to avoid some situation that can cause some +# freeze when joining the workers. +mp.util.register_after_fork(_threads_wakeups, lambda obj: obj.clear()) + + +# Module variable to register the at_exit call +process_pool_executor_at_exit = None + +# Controls how many more calls than processes will be queued in the call queue. +# A smaller number will mean that processes spend more time idle waiting for +# work while a larger number will make Future.cancel() succeed less frequently +# (Futures in the call queue cannot be cancelled). +EXTRA_QUEUED_CALLS = 1 + + +class _RemoteTraceback(Exception): + """Embed stringification of remote traceback in local traceback + """ + def __init__(self, tb=None): + self.tb = '\n"""\n{}"""'.format(tb) + + def __str__(self): + return self.tb + + +class _ExceptionWithTraceback(BaseException): + + def __init__(self, exc): + tb = getattr(exc, "__traceback__", None) + if tb is None: + _, _, tb = sys.exc_info() + tb = traceback.format_exception(type(exc), exc, tb) + tb = ''.join(tb) + self.exc = exc + self.tb = tb + + def __reduce__(self): + return _rebuild_exc, (self.exc, self.tb) + + +def _rebuild_exc(exc, tb): + exc = set_cause(exc, _RemoteTraceback(tb)) + return exc + + +class _WorkItem(object): + + __slots__ = ["future", "fn", "args", "kwargs"] + + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + + +class _ResultItem(object): + + def __init__(self, work_id, exception=None, result=None): + self.work_id = work_id + self.exception = exception + self.result = result + + +class _CallItem(object): + + def __init__(self, work_id, fn, args, kwargs): + self.work_id = work_id + self.fn = fn + self.args = args + self.kwargs = kwargs + + # Store the current loky_pickler so it is correctly set in the worker + self.loky_pickler = get_loky_pickler_name() + + def __call__(self): + set_loky_pickler(self.loky_pickler) + return self.fn(*self.args, **self.kwargs) + + def __repr__(self): + return "CallItem({}, {}, {}, {})".format( + self.work_id, self.fn, self.args, self.kwargs) + + +class _SafeQueue(Queue): + """Safe Queue set exception to the future object linked to a job""" + def __init__(self, max_size=0, ctx=None, pending_work_items=None, + running_work_items=None, thread_wakeup=None, reducers=None): + self.thread_wakeup = thread_wakeup + self.pending_work_items = pending_work_items + self.running_work_items = running_work_items + super(_SafeQueue, self).__init__(max_size, reducers=reducers, ctx=ctx) + + def _on_queue_feeder_error(self, e, obj): + if isinstance(obj, _CallItem): + # format traceback only works on python3 + if isinstance(e, struct.error): + raised_error = RuntimeError( + "The task could not be sent to the workers as it is too " + "large for `send_bytes`.") + else: + raised_error = PicklingError( + "Could not pickle the task to send it to the workers.") + tb = traceback.format_exception( + type(e), e, getattr(e, "__traceback__", None)) + raised_error = set_cause(raised_error, + _RemoteTraceback(''.join(tb))) + work_item = self.pending_work_items.pop(obj.work_id, None) + self.running_work_items.remove(obj.work_id) + # work_item can be None if another process terminated. In this + # case, the executor_manager_thread fails all work_items with + # BrokenProcessPool + if work_item is not None: + work_item.future.set_exception(raised_error) + del work_item + self.thread_wakeup.wakeup() + else: + super(_SafeQueue, self)._on_queue_feeder_error(e, obj) + + +def _get_chunks(chunksize, *iterables): + """Iterates over zip()ed iterables in chunks. """ + if sys.version_info < (3, 3): + it = itertools.izip(*iterables) + else: + it = zip(*iterables) + while True: + chunk = tuple(itertools.islice(it, chunksize)) + if not chunk: + return + yield chunk + + +def _process_chunk(fn, chunk): + """Processes a chunk of an iterable passed to map. + + Runs the function passed to map() on a chunk of the + iterable passed to map. + + This function is run in a separate process. + + """ + return [fn(*args) for args in chunk] + + +def _sendback_result(result_queue, work_id, result=None, exception=None): + """Safely send back the given result or exception""" + try: + result_queue.put(_ResultItem(work_id, result=result, + exception=exception)) + except BaseException as e: + exc = _ExceptionWithTraceback(e) + result_queue.put(_ResultItem(work_id, exception=exc)) + + +def _process_worker(call_queue, result_queue, initializer, initargs, + processes_management_lock, timeout, worker_exit_lock, + current_depth): + """Evaluates calls from call_queue and places the results in result_queue. + + This worker is run in a separate process. + + Args: + call_queue: A ctx.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A ctx.Queue of _ResultItems that will written + to by the worker. + initializer: A callable initializer, or None + initargs: A tuple of args for the initializer + process_management_lock: A ctx.Lock avoiding worker timeout while some + workers are being spawned. + timeout: maximum time to wait for a new item in the call_queue. If that + time is expired, the worker will shutdown. + worker_exit_lock: Lock to avoid flagging the executor as broken on + workers timeout. + current_depth: Nested parallelism level, to avoid infinite spawning. + """ + if initializer is not None: + try: + initializer(*initargs) + except BaseException: + _base.LOGGER.critical('Exception in initializer:', exc_info=True) + # The parent will notice that the process stopped and + # mark the pool broken + return + + # set the global _CURRENT_DEPTH mechanism to limit recursive call + global _CURRENT_DEPTH + _CURRENT_DEPTH = current_depth + _process_reference_size = None + _last_memory_leak_check = None + pid = os.getpid() + + mp.util.debug('Worker started with timeout=%s' % timeout) + while True: + try: + call_item = call_queue.get(block=True, timeout=timeout) + if call_item is None: + mp.util.info("Shutting down worker on sentinel") + except queue.Empty: + mp.util.info("Shutting down worker after timeout %0.3fs" + % timeout) + if processes_management_lock.acquire(block=False): + processes_management_lock.release() + call_item = None + else: + mp.util.info("Could not acquire processes_management_lock") + continue + except BaseException: + previous_tb = traceback.format_exc() + try: + result_queue.put(_RemoteTraceback(previous_tb)) + except BaseException: + # If we cannot format correctly the exception, at least print + # the traceback. + print(previous_tb) + sys.exit(1) + if call_item is None: + # Notify queue management thread about clean worker shutdown + result_queue.put(pid) + with worker_exit_lock: + return + try: + r = call_item() + except BaseException as e: + exc = _ExceptionWithTraceback(e) + result_queue.put(_ResultItem(call_item.work_id, exception=exc)) + else: + _sendback_result(result_queue, call_item.work_id, result=r) + del r + + # Free the resource as soon as possible, to avoid holding onto + # open files or shared memory that is not needed anymore + del call_item + + if _USE_PSUTIL: + if _process_reference_size is None: + # Make reference measurement after the first call + _process_reference_size = _get_memory_usage(pid, force_gc=True) + _last_memory_leak_check = time() + continue + if time() - _last_memory_leak_check > _MEMORY_LEAK_CHECK_DELAY: + mem_usage = _get_memory_usage(pid) + _last_memory_leak_check = time() + if mem_usage - _process_reference_size < _MAX_MEMORY_LEAK_SIZE: + # Memory usage stays within bounds: everything is fine. + continue + + # Check again memory usage; this time take the measurement + # after a forced garbage collection to break any reference + # cycles. + mem_usage = _get_memory_usage(pid, force_gc=True) + _last_memory_leak_check = time() + if mem_usage - _process_reference_size < _MAX_MEMORY_LEAK_SIZE: + # The GC managed to free the memory: everything is fine. + continue + + # The process is leaking memory: let the master process + # know that we need to start a new worker. + mp.util.info("Memory leak detected: shutting down worker") + result_queue.put(pid) + with worker_exit_lock: + return + else: + # if psutil is not installed, trigger gc.collect events + # regularly to limit potential memory leaks due to reference cycles + if ((_last_memory_leak_check is None) or + (time() - _last_memory_leak_check > + _MEMORY_LEAK_CHECK_DELAY)): + gc.collect() + _last_memory_leak_check = time() + + +class _ExecutorManagerThread(threading.Thread): + """Manages the communication between this process and the worker processes. + + The manager is run in a local thread. + + Args: + executor: A reference to the ProcessPoolExecutor that owns + this thread. A weakref will be own by the manager as well as + references to internal objects used to introspect the state of + the executor. + """ + + def __init__(self, executor): + # Store references to necessary internals of the executor. + + # A _ThreadWakeup to allow waking up the executor_manager_thread from + # the main Thread and avoid deadlocks caused by permanently + # locked queues. + self.thread_wakeup = executor._executor_manager_thread_wakeup + self.shutdown_lock = executor._shutdown_lock + + # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used + # to determine if the ProcessPoolExecutor has been garbage collected + # and that the manager can exit. + # When the executor gets garbage collected, the weakref callback + # will wake up the queue management thread so that it can terminate + # if there is no pending work item. + def weakref_cb(_, + thread_wakeup=self.thread_wakeup, + shutdown_lock=self.shutdown_lock): + mp.util.debug('Executor collected: triggering callback for' + ' QueueManager wakeup') + with shutdown_lock: + thread_wakeup.wakeup() + + self.executor_reference = weakref.ref(executor, weakref_cb) + + # The flags of the executor + self.executor_flags = executor._flags + + # A list of the ctx.Process instances used as workers. + self.processes = executor._processes + + # A ctx.Queue that will be filled with _CallItems derived from + # _WorkItems for processing by the process workers. + self.call_queue = executor._call_queue + + # A ctx.SimpleQueue of _ResultItems generated by the process workers. + self.result_queue = executor._result_queue + + # A queue.Queue of work ids e.g. Queue([5, 6, ...]). + self.work_ids_queue = executor._work_ids + + # A dict mapping work ids to _WorkItems e.g. + # {5: <_WorkItem...>, 6: <_WorkItem...>, ...} + self.pending_work_items = executor._pending_work_items + + # A list of the work_ids that are currently running + self.running_work_items = executor._running_work_items + + # A lock to avoid concurrent shutdown of workers on timeout and spawn + # of new processes or shut down + self.processes_management_lock = executor._processes_management_lock + + super(_ExecutorManagerThread, self).__init__() + if sys.version_info < (3, 9): + self.daemon = True + + def run(self): + # Main loop for the executor manager thread. + + while True: + self.add_call_item_to_queue() + + result_item, is_broken, bpe = self.wait_result_broken_or_wakeup() + + if is_broken: + self.terminate_broken(bpe) + return + if result_item is not None: + self.process_result_item(result_item) + # Delete reference to result_item to avoid keeping references + # while waiting on new results. + del result_item + + if self.is_shutting_down(): + self.flag_executor_shutting_down() + + # Since no new work items can be added, it is safe to shutdown + # this thread if there are no pending work items. + if not self.pending_work_items: + self.join_executor_internals() + return + + def add_call_item_to_queue(self): + # Fills call_queue with _WorkItems from pending_work_items. + # This function never blocks. + while True: + if self.call_queue.full(): + return + try: + work_id = self.work_ids_queue.get(block=False) + except queue.Empty: + return + else: + work_item = self.pending_work_items[work_id] + + if work_item.future.set_running_or_notify_cancel(): + self.running_work_items += [work_id] + self.call_queue.put(_CallItem(work_id, + work_item.fn, + work_item.args, + work_item.kwargs), + block=True) + else: + del self.pending_work_items[work_id] + continue + + def wait_result_broken_or_wakeup(self): + # Wait for a result to be ready in the result_queue while checking + # that all worker processes are still running, or for a wake up + # signal send. The wake up signals come either from new tasks being + # submitted, from the executor being shutdown/gc-ed, or from the + # shutdown of the python interpreter. + result_reader = self.result_queue._reader + wakeup_reader = self.thread_wakeup._reader + readers = [result_reader, wakeup_reader] + worker_sentinels = [p.sentinel for p in list(self.processes.values())] + ready = wait(readers + worker_sentinels) + + bpe = None + is_broken = True + result_item = None + if result_reader in ready: + try: + result_item = result_reader.recv() + if isinstance(result_item, _RemoteTraceback): + bpe = BrokenProcessPool( + "A task has failed to un-serialize. Please ensure that" + " the arguments of the function are all picklable." + ) + set_cause(bpe, result_item) + else: + is_broken = False + except BaseException as e: + bpe = BrokenProcessPool( + "A result has failed to un-serialize. Please ensure that " + "the objects returned by the function are always " + "picklable." + ) + tb = traceback.format_exception( + type(e), e, getattr(e, "__traceback__", None)) + set_cause(bpe, _RemoteTraceback(''.join(tb))) + + elif wakeup_reader in ready: + # This is simply a wake-up event that might either trigger putting + # more tasks in the queue or trigger the clean up of resources. + is_broken = False + else: + # A worker has terminated and we don't know why, set the state of + # the executor as broken + exit_codes = '' + if sys.platform != "win32": + # In Windows, introspecting terminated workers exitcodes seems + # unstable, therefore they are not appended in the exception + # message. + exit_codes = "\nThe exit codes of the workers are {}".format( + get_exitcodes_terminated_worker(self.processes)) + bpe = TerminatedWorkerError( + "A worker process managed by the executor was unexpectedly " + "terminated. This could be caused by a segmentation fault " + "while calling the function or by an excessive memory usage " + "causing the Operating System to kill the worker.\n" + "{}".format(exit_codes) + ) + + self.thread_wakeup.clear() + + return result_item, is_broken, bpe + + def process_result_item(self, result_item): + # Process the received a result_item. This can be either the PID of a + # worker that exited gracefully or a _ResultItem + + if isinstance(result_item, int): + # Clean shutdown of a worker using its PID, either on request + # by the executor.shutdown method or by the timeout of the worker + # itself: we should not mark the executor as broken. + with self.processes_management_lock: + p = self.processes.pop(result_item, None) + + # p can be None is the executor is concurrently shutting down. + if p is not None: + p._worker_exit_lock.release() + p.join() + del p + + # Make sure the executor have the right number of worker, even if a + # worker timeout while some jobs were submitted. If some work is + # pending or there is less processes than running items, we need to + # start a new Process and raise a warning. + n_pending = len(self.pending_work_items) + n_running = len(self.running_work_items) + if (n_pending - n_running > 0 or n_running > len(self.processes)): + executor = self.executor_reference() + if (executor is not None + and len(self.processes) < executor._max_workers): + warnings.warn( + "A worker stopped while some jobs were given to the " + "executor. This can be caused by a too short worker " + "timeout or by a memory leak.", UserWarning + ) + executor._adjust_process_count() + executor = None + else: + # Received a _ResultItem so mark the future as completed. + work_item = self.pending_work_items.pop(result_item.work_id, None) + # work_item can be None if another process terminated (see above) + if work_item is not None: + if result_item.exception: + work_item.future.set_exception(result_item.exception) + else: + work_item.future.set_result(result_item.result) + self.running_work_items.remove(result_item.work_id) + + def is_shutting_down(self): + # Check whether we should start shutting down the executor. + executor = self.executor_reference() + # No more work items can be added if: + # - The interpreter is shutting down OR + # - The executor that owns this thread is not broken AND + # * The executor that owns this worker has been collected OR + # * The executor that owns this worker has been shutdown. + # If the executor is broken, it should be detected in the next loop. + return (_global_shutdown or + ((executor is None or self.executor_flags.shutdown) + and not self.executor_flags.broken)) + + def terminate_broken(self, bpe): + # Terminate the executor because it is in a broken state. The bpe + # argument can be used to display more information on the error that + # lead the executor into becoming broken. + + # Mark the process pool broken so that submits fail right now. + self.executor_flags.flag_as_broken(bpe) + + # Mark pending tasks as failed. + for work_id, work_item in self.pending_work_items.items(): + work_item.future.set_exception(bpe) + # Delete references to object. See issue16284 + del work_item + self.pending_work_items.clear() + + # Terminate remaining workers forcibly: the queues or their + # locks may be in a dirty state and block forever. + self.kill_workers() + + # clean up resources + self.join_executor_internals() + + def flag_executor_shutting_down(self): + # Flag the executor as shutting down and cancel remaining tasks if + # requested as early as possible if it is not gc-ed yet. + self.executor_flags.flag_as_shutting_down() + + # Cancel pending work items if requested. + if self.executor_flags.kill_workers: + while self.pending_work_items: + _, work_item = self.pending_work_items.popitem() + work_item.future.set_exception(ShutdownExecutorError( + "The Executor was shutdown with `kill_workers=True` " + "before this job could complete.")) + del work_item + + # Kill the remaining worker forcibly to no waste time joining them + self.kill_workers() + + def kill_workers(self): + # Terminate the remaining workers using SIGKILL. This function also + # terminates descendant workers of the children in case there is some + # nested parallelism. + while self.processes: + _, p = self.processes.popitem() + mp.util.debug('terminate process {}'.format(p.name)) + try: + recursive_terminate(p) + except ProcessLookupError: # pragma: no cover + pass + + def shutdown_workers(self): + # shutdown all workers in self.processes + + # Create a list to avoid RuntimeError due to concurrent modification of + # processes. nb_children_alive is thus an upper bound. Also release the + # processes' _worker_exit_lock to accelerate the shutdown procedure, as + # there is no need for hand-shake here. + with self.processes_management_lock: + n_children_to_stop = 0 + for p in list(self.processes.values()): + p._worker_exit_lock.release() + n_children_to_stop += 1 + + # Send the right number of sentinels, to make sure all children are + # properly terminated. Do it with a mechanism that avoid hanging on + # Full queue when all workers have already been shutdown. + n_sentinels_sent = 0 + while (n_sentinels_sent < n_children_to_stop + and self.get_n_children_alive() > 0): + for i in range(n_children_to_stop - n_sentinels_sent): + try: + self.call_queue.put_nowait(None) + n_sentinels_sent += 1 + except queue.Full: + break + + def join_executor_internals(self): + self.shutdown_workers() + + # Release the queue's resources as soon as possible. Flag the feeder + # thread for clean exit to avoid having the crash detection thread flag + # the Executor as broken during the shutdown. This is safe as either: + # * We don't need to communicate with the workers anymore + # * There is nothing left in the Queue buffer except None sentinels + mp.util.debug("closing call_queue") + self.call_queue.close() + self.call_queue.join_thread() + + # Closing result_queue + mp.util.debug("closing result_queue") + self.result_queue.close() + + mp.util.debug("closing thread_wakeup") + with self.shutdown_lock: + self.thread_wakeup.close() + + # If .join() is not called on the created processes then + # some ctx.Queue methods may deadlock on Mac OS X. + mp.util.debug("joining processes") + for p in list(self.processes.values()): + p.join() + + mp.util.debug("executor management thread clean shutdown of worker " + "processes: {}".format(list(self.processes))) + + def get_n_children_alive(self): + # This is an upper bound on the number of children alive. + with self.processes_management_lock: + return sum(p.is_alive() for p in list(self.processes.values())) + + +_system_limits_checked = False +_system_limited = None + + +def _check_system_limits(): + global _system_limits_checked, _system_limited + if _system_limits_checked: + if _system_limited: + raise NotImplementedError(_system_limited) + _system_limits_checked = True + try: + nsems_max = os.sysconf("SC_SEM_NSEMS_MAX") + except (AttributeError, ValueError): + # sysconf not available or setting not available + return + if nsems_max == -1: + # undetermined limit, assume that limit is determined + # by available memory only + return + if nsems_max >= 256: + # minimum number of semaphores available + # according to POSIX + return + _system_limited = ("system provides too few semaphores (%d available, " + "256 necessary)" % nsems_max) + raise NotImplementedError(_system_limited) + + +def _chain_from_iterable_of_lists(iterable): + """ + Specialized implementation of itertools.chain.from_iterable. + Each item in *iterable* should be a list. This function is + careful not to keep references to yielded objects. + """ + for element in iterable: + element.reverse() + while element: + yield element.pop() + + +def _check_max_depth(context): + # Limit the maxmal recursion level + global _CURRENT_DEPTH + if context.get_start_method() == "fork" and _CURRENT_DEPTH > 0: + raise LokyRecursionError( + "Could not spawn extra nested processes at depth superior to " + "MAX_DEPTH=1. It is not possible to increase this limit when " + "using the 'fork' start method.") + + if 0 < MAX_DEPTH and _CURRENT_DEPTH + 1 > MAX_DEPTH: + raise LokyRecursionError( + "Could not spawn extra nested processes at depth superior to " + "MAX_DEPTH={}. If this is intendend, you can change this limit " + "with the LOKY_MAX_DEPTH environment variable.".format(MAX_DEPTH)) + + +class LokyRecursionError(RuntimeError): + """Raised when a process try to spawn too many levels of nested processes. + """ + + +class BrokenProcessPool(_BPPException): + """ + Raised when the executor is broken while a future was in the running state. + The cause can an error raised when unpickling the task in the worker + process or when unpickling the result value in the parent process. It can + also be caused by a worker process being terminated unexpectedly. + """ + + +class TerminatedWorkerError(BrokenProcessPool): + """ + Raised when a process in a ProcessPoolExecutor terminated abruptly + while a future was in the running state. + """ + + +# Alias for backward compat (for code written for loky 1.1.4 and earlier). Do +# not use in new code. +BrokenExecutor = BrokenProcessPool + + +class ShutdownExecutorError(RuntimeError): + + """ + Raised when a ProcessPoolExecutor is shutdown while a future was in the + running or pending state. + """ + + +class ProcessPoolExecutor(_base.Executor): + + _at_exit = None + + def __init__(self, max_workers=None, job_reducers=None, + result_reducers=None, timeout=None, context=None, + initializer=None, initargs=(), env=None): + """Initializes a new ProcessPoolExecutor instance. + + Args: + max_workers: int, optional (default: cpu_count()) + The maximum number of processes that can be used to execute the + given calls. If None or not given then as many worker processes + will be created as the number of CPUs the current process + can use. + job_reducers, result_reducers: dict(type: reducer_func) + Custom reducer for pickling the jobs and the results from the + Executor. If only `job_reducers` is provided, `result_reducer` + will use the same reducers + timeout: int, optional (default: None) + Idle workers exit after timeout seconds. If a new job is + submitted after the timeout, the executor will start enough + new Python processes to make sure the pool of workers is full. + context: A multiprocessing context to launch the workers. This + object should provide SimpleQueue, Queue and Process. + initializer: An callable used to initialize worker processes. + initargs: A tuple of arguments to pass to the initializer. + env: A dict of environment variable to overwrite in the child + process. The environment variables are set before any module is + loaded. Note that this only works with the loky context and it + is unreliable under windows with Python < 3.6. + """ + _check_system_limits() + + if max_workers is None: + self._max_workers = cpu_count() + else: + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + self._max_workers = max_workers + + if context is None: + context = get_context() + self._context = context + self._env = env + + if initializer is not None and not callable(initializer): + raise TypeError("initializer must be a callable") + self._initializer = initializer + self._initargs = initargs + + _check_max_depth(self._context) + + if result_reducers is None: + result_reducers = job_reducers + + # Timeout + self._timeout = timeout + + # Management thread + self._executor_manager_thread = None + + # Map of pids to processes + self._processes = {} + + # Internal variables of the ProcessPoolExecutor + self._processes = {} + self._queue_count = 0 + self._pending_work_items = {} + self._running_work_items = [] + self._work_ids = queue.Queue() + self._processes_management_lock = self._context.Lock() + self._executor_manager_thread = None + self._shutdown_lock = threading.Lock() + + # _ThreadWakeup is a communication channel used to interrupt the wait + # of the main loop of executor_manager_thread from another thread (e.g. + # when calling executor.submit or executor.shutdown). We do not use the + # _result_queue to send wakeup signals to the executor_manager_thread + # as it could result in a deadlock if a worker process dies with the + # _result_queue write lock still acquired. + # + # _shutdown_lock must be locked to access _ThreadWakeup.wakeup. + self._executor_manager_thread_wakeup = _ThreadWakeup() + + # Flag to hold the state of the Executor. This permits to introspect + # the Executor state even once it has been garbage collected. + self._flags = _ExecutorFlags(self._shutdown_lock) + + # Finally setup the queues for interprocess communication + self._setup_queues(job_reducers, result_reducers) + + mp.util.debug('ProcessPoolExecutor is setup') + + def _setup_queues(self, job_reducers, result_reducers, queue_size=None): + # Make the call queue slightly larger than the number of processes to + # prevent the worker processes from idling. But don't make it too big + # because futures in the call queue cannot be cancelled. + if queue_size is None: + queue_size = 2 * self._max_workers + EXTRA_QUEUED_CALLS + self._call_queue = _SafeQueue( + max_size=queue_size, pending_work_items=self._pending_work_items, + running_work_items=self._running_work_items, + thread_wakeup=self._executor_manager_thread_wakeup, + reducers=job_reducers, ctx=self._context) + # Killed worker processes can produce spurious "broken pipe" + # tracebacks in the queue's own worker thread. But we detect killed + # processes anyway, so silence the tracebacks. + self._call_queue._ignore_epipe = True + + self._result_queue = SimpleQueue(reducers=result_reducers, + ctx=self._context) + + def _start_executor_manager_thread(self): + if self._executor_manager_thread is None: + mp.util.debug('_start_executor_manager_thread called') + + # When the executor gets garbarge collected, the weakref callback + # will wake up the queue management thread so that it can terminate + # if there is no pending work item. + def weakref_cb( + _, thread_wakeup=self._executor_manager_thread_wakeup, + shutdown_lock=self._shutdown_lock): + mp.util.debug('Executor collected: triggering callback for' + ' QueueManager wakeup') + with self._shutdown_lock: + thread_wakeup.wakeup() + + # Start the processes so that their sentinels are known. + self._executor_manager_thread = _ExecutorManagerThread(self) + self._executor_manager_thread.start() + + # register this executor in a mechanism that ensures it will wakeup + # when the interpreter is exiting. + _threads_wakeups[self._executor_manager_thread] = \ + (self._shutdown_lock, + self._executor_manager_thread_wakeup) + + global process_pool_executor_at_exit + if process_pool_executor_at_exit is None: + # Ensure that the _python_exit function will be called before + # the multiprocessing.Queue._close finalizers which have an + # exitpriority of 10. + + if sys.version_info < (3, 9): + process_pool_executor_at_exit = mp.util.Finalize( + None, _python_exit, exitpriority=20) + else: + process_pool_executor_at_exit = threading._register_atexit( + _python_exit) + + def _adjust_process_count(self): + for _ in range(len(self._processes), self._max_workers): + worker_exit_lock = self._context.BoundedSemaphore(1) + args = (self._call_queue, self._result_queue, self._initializer, + self._initargs, self._processes_management_lock, + self._timeout, worker_exit_lock, _CURRENT_DEPTH + 1) + worker_exit_lock.acquire() + try: + # Try to spawn the process with some environment variable to + # overwrite but it only works with the loky context for now. + p = self._context.Process(target=_process_worker, args=args, + env=self._env) + except TypeError: + p = self._context.Process(target=_process_worker, args=args) + p._worker_exit_lock = worker_exit_lock + p.start() + self._processes[p.pid] = p + mp.util.debug('Adjust process count : {}'.format(self._processes)) + + def _ensure_executor_running(self): + """ensures all workers and management thread are running + """ + with self._processes_management_lock: + if len(self._processes) != self._max_workers: + self._adjust_process_count() + self._start_executor_manager_thread() + + def submit(self, fn, *args, **kwargs): + with self._flags.shutdown_lock: + if self._flags.broken is not None: + raise self._flags.broken + if self._flags.shutdown: + raise ShutdownExecutorError( + 'cannot schedule new futures after shutdown') + + # Cannot submit a new calls once the interpreter is shutting down. + # This check avoids spawning new processes at exit. + if _global_shutdown: + raise RuntimeError('cannot schedule new futures after ' + 'interpreter shutdown') + + f = _base.Future() + w = _WorkItem(f, fn, args, kwargs) + + self._pending_work_items[self._queue_count] = w + self._work_ids.put(self._queue_count) + self._queue_count += 1 + # Wake up queue management thread + self._executor_manager_thread_wakeup.wakeup() + + self._ensure_executor_running() + return f + submit.__doc__ = _base.Executor.submit.__doc__ + + def map(self, fn, *iterables, **kwargs): + """Returns an iterator equivalent to map(fn, iter). + + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: If greater than one, the iterables will be chopped into + chunks of size chunksize and submitted to the process pool. + If set to one, the items in the list will be sent one at a + time. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ + timeout = kwargs.get('timeout', None) + chunksize = kwargs.get('chunksize', 1) + if chunksize < 1: + raise ValueError("chunksize must be >= 1.") + + results = super(ProcessPoolExecutor, self).map( + partial(_process_chunk, fn), _get_chunks(chunksize, *iterables), + timeout=timeout) + return _chain_from_iterable_of_lists(results) + + def shutdown(self, wait=True, kill_workers=False): + mp.util.debug('shutting down executor %s' % self) + + self._flags.flag_as_shutting_down(kill_workers) + executor_manager_thread = self._executor_manager_thread + executor_manager_thread_wakeup = self._executor_manager_thread_wakeup + + if executor_manager_thread_wakeup is not None: + # Wake up queue management thread + with self._shutdown_lock: + self._executor_manager_thread_wakeup.wakeup() + + if executor_manager_thread is not None and wait: + executor_manager_thread.join() + + # To reduce the risk of opening too many files, remove references to + # objects that use file descriptors. + self._executor_manager_thread = None + self._executor_manager_thread_wakeup = None + self._call_queue = None + self._result_queue = None + self._processes_management_lock = None + + shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/minor_project/lib/python3.6/site-packages/joblib/externals/loky/reusable_executor.py b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/reusable_executor.py new file mode 100644 index 0000000..9a8e73f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/externals/loky/reusable_executor.py @@ -0,0 +1,232 @@ +############################################################################### +# Reusable ProcessPoolExecutor +# +# author: Thomas Moreau and Olivier Grisel +# +import time +import warnings +import threading +import multiprocessing as mp + +from .process_executor import ProcessPoolExecutor, EXTRA_QUEUED_CALLS +from .backend.context import cpu_count +from .backend import get_context + +__all__ = ['get_reusable_executor'] + +# Python 2 compat helper +STRING_TYPE = type("") + +# Singleton executor and id management +_executor_lock = threading.RLock() +_next_executor_id = 0 +_executor = None +_executor_kwargs = None + + +def _get_next_executor_id(): + """Ensure that each successive executor instance has a unique, monotonic id. + + The purpose of this monotonic id is to help debug and test automated + instance creation. + """ + global _next_executor_id + with _executor_lock: + executor_id = _next_executor_id + _next_executor_id += 1 + return executor_id + + +def get_reusable_executor(max_workers=None, context=None, timeout=10, + kill_workers=False, reuse="auto", + job_reducers=None, result_reducers=None, + initializer=None, initargs=(), env=None): + """Return the current ReusableExectutor instance. + + Start a new instance if it has not been started already or if the previous + instance was left in a broken state. + + If the previous instance does not have the requested number of workers, the + executor is dynamically resized to adjust the number of workers prior to + returning. + + Reusing a singleton instance spares the overhead of starting new worker + processes and importing common python packages each time. + + ``max_workers`` controls the maximum number of tasks that can be running in + parallel in worker processes. By default this is set to the number of + CPUs on the host. + + Setting ``timeout`` (in seconds) makes idle workers automatically shutdown + so as to release system resources. New workers are respawn upon submission + of new tasks so that ``max_workers`` are available to accept the newly + submitted tasks. Setting ``timeout`` to around 100 times the time required + to spawn new processes and import packages in them (on the order of 100ms) + ensures that the overhead of spawning workers is negligible. + + Setting ``kill_workers=True`` makes it possible to forcibly interrupt + previously spawned jobs to get a new instance of the reusable executor + with new constructor argument values. + + The ``job_reducers`` and ``result_reducers`` are used to customize the + pickling of tasks and results send to the executor. + + When provided, the ``initializer`` is run first in newly spawned + processes with argument ``initargs``. + + The environment variable in the child process are a copy of the values in + the main process. One can provide a dict ``{ENV: VAL}`` where ``ENV`` and + ``VAR`` are string literals to overwrite the environment variable ``ENV`` + in the child processes to value ``VAL``. The environment variables are set + in the children before any module is loaded. This only works with with the + ``loky`` context and it is unreliable on Windows with Python < 3.6. + """ + _executor, _ = _ReusablePoolExecutor.get_reusable_executor( + max_workers=max_workers, context=context, timeout=timeout, + kill_workers=kill_workers, reuse=reuse, job_reducers=job_reducers, + result_reducers=result_reducers, initializer=initializer, + initargs=initargs, env=env + ) + return _executor + + +class _ReusablePoolExecutor(ProcessPoolExecutor): + def __init__(self, submit_resize_lock, max_workers=None, context=None, + timeout=None, executor_id=0, job_reducers=None, + result_reducers=None, initializer=None, initargs=(), + env=None): + super(_ReusablePoolExecutor, self).__init__( + max_workers=max_workers, context=context, timeout=timeout, + job_reducers=job_reducers, result_reducers=result_reducers, + initializer=initializer, initargs=initargs, env=env) + self.executor_id = executor_id + self._submit_resize_lock = submit_resize_lock + + @classmethod + def get_reusable_executor(cls, max_workers=None, context=None, timeout=10, + kill_workers=False, reuse="auto", + job_reducers=None, result_reducers=None, + initializer=None, initargs=(), env=None): + with _executor_lock: + global _executor, _executor_kwargs + executor = _executor + + if max_workers is None: + if reuse is True and executor is not None: + max_workers = executor._max_workers + else: + max_workers = cpu_count() + elif max_workers <= 0: + raise ValueError( + "max_workers must be greater than 0, got {}." + .format(max_workers)) + + if isinstance(context, STRING_TYPE): + context = get_context(context) + if context is not None and context.get_start_method() == "fork": + raise ValueError( + "Cannot use reusable executor with the 'fork' context" + ) + + kwargs = dict(context=context, timeout=timeout, + job_reducers=job_reducers, + result_reducers=result_reducers, + initializer=initializer, initargs=initargs, + env=env) + if executor is None: + is_reused = False + mp.util.debug("Create a executor with max_workers={}." + .format(max_workers)) + executor_id = _get_next_executor_id() + _executor_kwargs = kwargs + _executor = executor = cls( + _executor_lock, max_workers=max_workers, + executor_id=executor_id, **kwargs) + else: + if reuse == 'auto': + reuse = kwargs == _executor_kwargs + if (executor._flags.broken or executor._flags.shutdown + or not reuse): + if executor._flags.broken: + reason = "broken" + elif executor._flags.shutdown: + reason = "shutdown" + else: + reason = "arguments have changed" + mp.util.debug( + "Creating a new executor with max_workers={} as the " + "previous instance cannot be reused ({})." + .format(max_workers, reason)) + executor.shutdown(wait=True, kill_workers=kill_workers) + _executor = executor = _executor_kwargs = None + # Recursive call to build a new instance + return cls.get_reusable_executor(max_workers=max_workers, + **kwargs) + else: + mp.util.debug( + "Reusing existing executor with max_workers={}." + .format(executor._max_workers) + ) + is_reused = True + executor._resize(max_workers) + + return executor, is_reused + + def submit(self, fn, *args, **kwargs): + with self._submit_resize_lock: + return super(_ReusablePoolExecutor, self).submit( + fn, *args, **kwargs) + + def _resize(self, max_workers): + with self._submit_resize_lock: + if max_workers is None: + raise ValueError("Trying to resize with max_workers=None") + elif max_workers == self._max_workers: + return + + if self._executor_manager_thread is None: + # If the executor_manager_thread has not been started + # then no processes have been spawned and we can just + # update _max_workers and return + self._max_workers = max_workers + return + + self._wait_job_completion() + + # Some process might have returned due to timeout so check how many + # children are still alive. Use the _process_management_lock to + # ensure that no process are spawned or timeout during the resize. + with self._processes_management_lock: + processes = list(self._processes.values()) + nb_children_alive = sum(p.is_alive() for p in processes) + self._max_workers = max_workers + for _ in range(max_workers, nb_children_alive): + self._call_queue.put(None) + while (len(self._processes) > max_workers + and not self._flags.broken): + time.sleep(1e-3) + + self._adjust_process_count() + processes = list(self._processes.values()) + while not all([p.is_alive() for p in processes]): + time.sleep(1e-3) + + def _wait_job_completion(self): + """Wait for the cache to be empty before resizing the pool.""" + # Issue a warning to the user about the bad effect of this usage. + if len(self._pending_work_items) > 0: + warnings.warn("Trying to resize an executor with running jobs: " + "waiting for jobs completion before resizing.", + UserWarning) + mp.util.debug("Executor {} waiting for jobs completion before" + " resizing".format(self.executor_id)) + # Wait for the completion of the jobs + while len(self._pending_work_items) > 0: + time.sleep(1e-3) + + def _setup_queues(self, job_reducers, result_reducers): + # As this executor can be resized, use a large queue size to avoid + # underestimating capacity and introducing overhead + queue_size = 2 * cpu_count() + EXTRA_QUEUED_CALLS + super(_ReusablePoolExecutor, self)._setup_queues( + job_reducers, result_reducers, queue_size=queue_size) diff --git a/minor_project/lib/python3.6/site-packages/joblib/format_stack.py b/minor_project/lib/python3.6/site-packages/joblib/format_stack.py new file mode 100644 index 0000000..b324919 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/format_stack.py @@ -0,0 +1,36 @@ +from sys import version_info + +from warnings import warn + +""" +Represent an exception with a lot of information. + +Provides 2 useful functions: + +format_exc: format an exception into a complete traceback, with full + debugging instruction. + +format_outer_frames: format the current position in the stack call. + +Adapted from IPython's VerboseTB. + +This module is deprecated and will be removed in joblib 0.16. +""" +from joblib import _deprecated_format_stack + +_deprecated_names = [ + name for name in dir(_deprecated_format_stack) if + not name.startswith("__") # special attributes +] + + +if version_info[:2] >= (3, 7): + def __getattr__(name): + if not name.startswith("__") and name in _deprecated_names: + warn("{} is deprecated and will be removed from joblib " + "in 0.16".format(name), DeprecationWarning) + return getattr(_deprecated_format_stack, name) + raise AttributeError +else: + for name in _deprecated_names: + globals()[name] = getattr(_deprecated_format_stack, name) diff --git a/minor_project/lib/python3.6/site-packages/joblib/func_inspect.py b/minor_project/lib/python3.6/site-packages/joblib/func_inspect.py new file mode 100644 index 0000000..ec6bb4a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/func_inspect.py @@ -0,0 +1,345 @@ +""" +My own variation on function-specific inspect-like features. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import inspect +import warnings +import re +import os +import collections + +from itertools import islice +from tokenize import open as open_py_source + +from .logger import pformat + +full_argspec_fields = ('args varargs varkw defaults kwonlyargs ' + 'kwonlydefaults annotations') +full_argspec_type = collections.namedtuple('FullArgSpec', full_argspec_fields) + + +def get_func_code(func): + """ Attempts to retrieve a reliable function code hash. + + The reason we don't use inspect.getsource is that it caches the + source, whereas we want this to be modified on the fly when the + function is modified. + + Returns + ------- + func_code: string + The function code + source_file: string + The path to the file in which the function is defined. + first_line: int + The first line of the code in the source file. + + Notes + ------ + This function does a bit more magic than inspect, and is thus + more robust. + """ + source_file = None + try: + code = func.__code__ + source_file = code.co_filename + if not os.path.exists(source_file): + # Use inspect for lambda functions and functions defined in an + # interactive shell, or in doctests + source_code = ''.join(inspect.getsourcelines(func)[0]) + line_no = 1 + if source_file.startswith('', source_file).groups() + line_no = int(line_no) + source_file = '' % source_file + return source_code, source_file, line_no + # Try to retrieve the source code. + with open_py_source(source_file) as source_file_obj: + first_line = code.co_firstlineno + # All the lines after the function definition: + source_lines = list(islice(source_file_obj, first_line - 1, None)) + return ''.join(inspect.getblock(source_lines)), source_file, first_line + except: + # If the source code fails, we use the hash. This is fragile and + # might change from one session to another. + if hasattr(func, '__code__'): + # Python 3.X + return str(func.__code__.__hash__()), source_file, -1 + else: + # Weird objects like numpy ufunc don't have __code__ + # This is fragile, as quite often the id of the object is + # in the repr, so it might not persist across sessions, + # however it will work for ufuncs. + return repr(func), source_file, -1 + + +def _clean_win_chars(string): + """Windows cannot encode some characters in filename.""" + import urllib + if hasattr(urllib, 'quote'): + quote = urllib.quote + else: + # In Python 3, quote is elsewhere + import urllib.parse + quote = urllib.parse.quote + for char in ('<', '>', '!', ':', '\\'): + string = string.replace(char, quote(char)) + return string + + +def get_func_name(func, resolv_alias=True, win_characters=True): + """ Return the function import path (as a list of module names), and + a name for the function. + + Parameters + ---------- + func: callable + The func to inspect + resolv_alias: boolean, optional + If true, possible local aliases are indicated. + win_characters: boolean, optional + If true, substitute special characters using urllib.quote + This is useful in Windows, as it cannot encode some filenames + """ + if hasattr(func, '__module__'): + module = func.__module__ + else: + try: + module = inspect.getmodule(func) + except TypeError: + if hasattr(func, '__class__'): + module = func.__class__.__module__ + else: + module = 'unknown' + if module is None: + # Happens in doctests, eg + module = '' + if module == '__main__': + try: + filename = os.path.abspath(inspect.getsourcefile(func)) + except: + filename = None + if filename is not None: + # mangling of full path to filename + parts = filename.split(os.sep) + if parts[-1].startswith(', where: + # - N is the cell number where the function was defined + # - XYZ is a hash representing the function's code (and name). + # It will be consistent across sessions and kernel restarts, + # and will change if the function's code/name changes + # We remove N so that cache is properly hit if the cell where + # the func is defined is re-exectuted. + # The XYZ hash should avoid collisions between functions with + # the same name, both within the same notebook but also across + # notebooks + splitted = parts[-1].split('-') + parts[-1] = '-'.join(splitted[:2] + splitted[3:]) + filename = '-'.join(parts) + if filename.endswith('.py'): + filename = filename[:-3] + module = module + '-' + filename + module = module.split('.') + if hasattr(func, 'func_name'): + name = func.func_name + elif hasattr(func, '__name__'): + name = func.__name__ + else: + name = 'unknown' + # Hack to detect functions not defined at the module-level + if resolv_alias: + # TODO: Maybe add a warning here? + if hasattr(func, 'func_globals') and name in func.func_globals: + if not func.func_globals[name] is func: + name = '%s-alias' % name + if inspect.ismethod(func): + # We need to add the name of the class + if hasattr(func, 'im_class'): + klass = func.im_class + module.append(klass.__name__) + if os.name == 'nt' and win_characters: + # Windows can't encode certain characters in filenames + name = _clean_win_chars(name) + module = [_clean_win_chars(s) for s in module] + return module, name + + +def _signature_str(function_name, arg_spec): + """Helper function to output a function signature""" + arg_spec_str = inspect.formatargspec(*arg_spec) + return '{}{}'.format(function_name, arg_spec_str) + + +def _function_called_str(function_name, args, kwargs): + """Helper function to output a function call""" + template_str = '{0}({1}, {2})' + + args_str = repr(args)[1:-1] + kwargs_str = ', '.join('%s=%s' % (k, v) + for k, v in kwargs.items()) + return template_str.format(function_name, args_str, + kwargs_str) + + +def filter_args(func, ignore_lst, args=(), kwargs=dict()): + """ Filters the given args and kwargs using a list of arguments to + ignore, and a function specification. + + Parameters + ---------- + func: callable + Function giving the argument specification + ignore_lst: list of strings + List of arguments to ignore (either a name of an argument + in the function spec, or '*', or '**') + *args: list + Positional arguments passed to the function. + **kwargs: dict + Keyword arguments passed to the function + + Returns + ------- + filtered_args: list + List of filtered positional and keyword arguments. + """ + args = list(args) + if isinstance(ignore_lst, str): + # Catch a common mistake + raise ValueError( + 'ignore_lst must be a list of parameters to ignore ' + '%s (type %s) was given' % (ignore_lst, type(ignore_lst))) + # Special case for functools.partial objects + if (not inspect.ismethod(func) and not inspect.isfunction(func)): + if ignore_lst: + warnings.warn('Cannot inspect object %s, ignore list will ' + 'not work.' % func, stacklevel=2) + return {'*': args, '**': kwargs} + arg_spec = inspect.getfullargspec(func) + arg_names = arg_spec.args + arg_spec.kwonlyargs + arg_defaults = arg_spec.defaults or () + if arg_spec.kwonlydefaults: + arg_defaults = arg_defaults + tuple(arg_spec.kwonlydefaults[k] + for k in arg_spec.kwonlyargs + if k in arg_spec.kwonlydefaults) + arg_varargs = arg_spec.varargs + arg_varkw = arg_spec.varkw + + if inspect.ismethod(func): + # First argument is 'self', it has been removed by Python + # we need to add it back: + args = [func.__self__, ] + args + # XXX: Maybe I need an inspect.isbuiltin to detect C-level methods, such + # as on ndarrays. + + _, name = get_func_name(func, resolv_alias=False) + arg_dict = dict() + arg_position = -1 + for arg_position, arg_name in enumerate(arg_names): + if arg_position < len(args): + # Positional argument or keyword argument given as positional + if arg_name not in arg_spec.kwonlyargs: + arg_dict[arg_name] = args[arg_position] + else: + raise ValueError( + "Keyword-only parameter '%s' was passed as " + 'positional parameter for %s:\n' + ' %s was called.' + % (arg_name, + _signature_str(name, arg_spec), + _function_called_str(name, args, kwargs)) + ) + + else: + position = arg_position - len(arg_names) + if arg_name in kwargs: + arg_dict[arg_name] = kwargs[arg_name] + else: + try: + arg_dict[arg_name] = arg_defaults[position] + except (IndexError, KeyError) as e: + # Missing argument + raise ValueError( + 'Wrong number of arguments for %s:\n' + ' %s was called.' + % (_signature_str(name, arg_spec), + _function_called_str(name, args, kwargs)) + ) from e + + varkwargs = dict() + for arg_name, arg_value in sorted(kwargs.items()): + if arg_name in arg_dict: + arg_dict[arg_name] = arg_value + elif arg_varkw is not None: + varkwargs[arg_name] = arg_value + else: + raise TypeError("Ignore list for %s() contains an unexpected " + "keyword argument '%s'" % (name, arg_name)) + + if arg_varkw is not None: + arg_dict['**'] = varkwargs + if arg_varargs is not None: + varargs = args[arg_position + 1:] + arg_dict['*'] = varargs + + # Now remove the arguments to be ignored + for item in ignore_lst: + if item in arg_dict: + arg_dict.pop(item) + else: + raise ValueError("Ignore list: argument '%s' is not defined for " + "function %s" + % (item, + _signature_str(name, arg_spec)) + ) + # XXX: Return a sorted list of pairs? + return arg_dict + + +def _format_arg(arg): + formatted_arg = pformat(arg, indent=2) + if len(formatted_arg) > 1500: + formatted_arg = '%s...' % formatted_arg[:700] + return formatted_arg + + +def format_signature(func, *args, **kwargs): + # XXX: Should this use inspect.formatargvalues/formatargspec? + module, name = get_func_name(func) + module = [m for m in module if m] + if module: + module.append(name) + module_path = '.'.join(module) + else: + module_path = name + arg_str = list() + previous_length = 0 + for arg in args: + formatted_arg = _format_arg(arg) + if previous_length > 80: + formatted_arg = '\n%s' % formatted_arg + previous_length = len(formatted_arg) + arg_str.append(formatted_arg) + arg_str.extend(['%s=%s' % (v, _format_arg(i)) for v, i in kwargs.items()]) + arg_str = ', '.join(arg_str) + + signature = '%s(%s)' % (name, arg_str) + return module_path, signature + + +def format_call(func, args, kwargs, object_name="Memory"): + """ Returns a nicely formatted statement displaying the function + call with the given arguments. + """ + path, signature = format_signature(func, *args, **kwargs) + msg = '%s\n[%s] Calling %s...\n%s' % (80 * '_', object_name, + path, signature) + return msg + # XXX: Not using logging framework + # self.debug(msg) diff --git a/minor_project/lib/python3.6/site-packages/joblib/hashing.py b/minor_project/lib/python3.6/site-packages/joblib/hashing.py new file mode 100644 index 0000000..24aeb55 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/hashing.py @@ -0,0 +1,266 @@ +""" +Fast cryptographic hash of Python objects, with a special case for fast +hashing of numpy arrays. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import pickle +import hashlib +import sys +import types +import struct +import io +import decimal + + +Pickler = pickle._Pickler + + +class _ConsistentSet(object): + """ Class used to ensure the hash of Sets is preserved + whatever the order of its items. + """ + def __init__(self, set_sequence): + # Forces order of elements in set to ensure consistent hash. + try: + # Trying first to order the set assuming the type of elements is + # consistent and orderable. + # This fails on python 3 when elements are unorderable + # but we keep it in a try as it's faster. + self._sequence = sorted(set_sequence) + except (TypeError, decimal.InvalidOperation): + # If elements are unorderable, sorting them using their hash. + # This is slower but works in any case. + self._sequence = sorted((hash(e) for e in set_sequence)) + + +class _MyHash(object): + """ Class used to hash objects that won't normally pickle """ + + def __init__(self, *args): + self.args = args + + +class Hasher(Pickler): + """ A subclass of pickler, to do cryptographic hashing, rather than + pickling. + """ + + def __init__(self, hash_name='md5'): + self.stream = io.BytesIO() + # By default we want a pickle protocol that only changes with + # the major python version and not the minor one + protocol = 3 + Pickler.__init__(self, self.stream, protocol=protocol) + # Initialise the hash obj + self._hash = hashlib.new(hash_name) + + def hash(self, obj, return_digest=True): + try: + self.dump(obj) + except pickle.PicklingError as e: + e.args += ('PicklingError while hashing %r: %r' % (obj, e),) + raise + dumps = self.stream.getvalue() + self._hash.update(dumps) + if return_digest: + return self._hash.hexdigest() + + def save(self, obj): + if isinstance(obj, (types.MethodType, type({}.pop))): + # the Pickler cannot pickle instance methods; here we decompose + # them into components that make them uniquely identifiable + if hasattr(obj, '__func__'): + func_name = obj.__func__.__name__ + else: + func_name = obj.__name__ + inst = obj.__self__ + if type(inst) == type(pickle): + obj = _MyHash(func_name, inst.__name__) + elif inst is None: + # type(None) or type(module) do not pickle + obj = _MyHash(func_name, inst) + else: + cls = obj.__self__.__class__ + obj = _MyHash(func_name, inst, cls) + Pickler.save(self, obj) + + def memoize(self, obj): + # We want hashing to be sensitive to value instead of reference. + # For example we want ['aa', 'aa'] and ['aa', 'aaZ'[:2]] + # to hash to the same value and that's why we disable memoization + # for strings + if isinstance(obj, (bytes, str)): + return + Pickler.memoize(self, obj) + + # The dispatch table of the pickler is not accessible in Python + # 3, as these lines are only bugware for IPython, we skip them. + def save_global(self, obj, name=None, pack=struct.pack): + # We have to override this method in order to deal with objects + # defined interactively in IPython that are not injected in + # __main__ + kwargs = dict(name=name, pack=pack) + del kwargs['pack'] + try: + Pickler.save_global(self, obj, **kwargs) + except pickle.PicklingError: + Pickler.save_global(self, obj, **kwargs) + module = getattr(obj, "__module__", None) + if module == '__main__': + my_name = name + if my_name is None: + my_name = obj.__name__ + mod = sys.modules[module] + if not hasattr(mod, my_name): + # IPython doesn't inject the variables define + # interactively in __main__ + setattr(mod, my_name, obj) + + dispatch = Pickler.dispatch.copy() + # builtin + dispatch[type(len)] = save_global + # type + dispatch[type(object)] = save_global + # classobj + dispatch[type(Pickler)] = save_global + # function + dispatch[type(pickle.dump)] = save_global + + def _batch_setitems(self, items): + # forces order of keys in dict to ensure consistent hash. + try: + # Trying first to compare dict assuming the type of keys is + # consistent and orderable. + # This fails on python 3 when keys are unorderable + # but we keep it in a try as it's faster. + Pickler._batch_setitems(self, iter(sorted(items))) + except TypeError: + # If keys are unorderable, sorting them using their hash. This is + # slower but works in any case. + Pickler._batch_setitems(self, iter(sorted((hash(k), v) + for k, v in items))) + + def save_set(self, set_items): + # forces order of items in Set to ensure consistent hash + Pickler.save(self, _ConsistentSet(set_items)) + + dispatch[type(set())] = save_set + + +class NumpyHasher(Hasher): + """ Special case the hasher for when numpy is loaded. + """ + + def __init__(self, hash_name='md5', coerce_mmap=False): + """ + Parameters + ---------- + hash_name: string + The hash algorithm to be used + coerce_mmap: boolean + Make no difference between np.memmap and np.ndarray + objects. + """ + self.coerce_mmap = coerce_mmap + Hasher.__init__(self, hash_name=hash_name) + # delayed import of numpy, to avoid tight coupling + import numpy as np + self.np = np + if hasattr(np, 'getbuffer'): + self._getbuffer = np.getbuffer + else: + self._getbuffer = memoryview + + def save(self, obj): + """ Subclass the save method, to hash ndarray subclass, rather + than pickling them. Off course, this is a total abuse of + the Pickler class. + """ + if isinstance(obj, self.np.ndarray) and not obj.dtype.hasobject: + # Compute a hash of the object + # The update function of the hash requires a c_contiguous buffer. + if obj.shape == (): + # 0d arrays need to be flattened because viewing them as bytes + # raises a ValueError exception. + obj_c_contiguous = obj.flatten() + elif obj.flags.c_contiguous: + obj_c_contiguous = obj + elif obj.flags.f_contiguous: + obj_c_contiguous = obj.T + else: + # Cater for non-single-segment arrays: this creates a + # copy, and thus aleviates this issue. + # XXX: There might be a more efficient way of doing this + obj_c_contiguous = obj.flatten() + + # memoryview is not supported for some dtypes, e.g. datetime64, see + # https://github.com/numpy/numpy/issues/4983. The + # workaround is to view the array as bytes before + # taking the memoryview. + self._hash.update( + self._getbuffer(obj_c_contiguous.view(self.np.uint8))) + + # We store the class, to be able to distinguish between + # Objects with the same binary content, but different + # classes. + if self.coerce_mmap and isinstance(obj, self.np.memmap): + # We don't make the difference between memmap and + # normal ndarrays, to be able to reload previously + # computed results with memmap. + klass = self.np.ndarray + else: + klass = obj.__class__ + # We also return the dtype and the shape, to distinguish + # different views on the same data with different dtypes. + + # The object will be pickled by the pickler hashed at the end. + obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides)) + elif isinstance(obj, self.np.dtype): + # numpy.dtype consistent hashing is tricky to get right. This comes + # from the fact that atomic np.dtype objects are interned: + # ``np.dtype('f4') is np.dtype('f4')``. The situation is + # complicated by the fact that this interning does not resist a + # simple pickle.load/dump roundtrip: + # ``pickle.loads(pickle.dumps(np.dtype('f4'))) is not + # np.dtype('f4') Because pickle relies on memoization during + # pickling, it is easy to + # produce different hashes for seemingly identical objects, such as + # ``[np.dtype('f4'), np.dtype('f4')]`` + # and ``[np.dtype('f4'), pickle.loads(pickle.dumps('f4'))]``. + # To prevent memoization from interfering with hashing, we isolate + # the serialization (and thus the pickle memoization) of each dtype + # using each time a different ``pickle.dumps`` call unrelated to + # the current Hasher instance. + self._hash.update("_HASHED_DTYPE".encode('utf-8')) + self._hash.update(pickle.dumps(obj)) + return + Hasher.save(self, obj) + + +def hash(obj, hash_name='md5', coerce_mmap=False): + """ Quick calculation of a hash to identify uniquely Python objects + containing numpy arrays. + + + Parameters + ----------- + hash_name: 'md5' or 'sha1' + Hashing algorithm used. sha1 is supposedly safer, but md5 is + faster. + coerce_mmap: boolean + Make no difference between np.memmap and np.ndarray + """ + valid_hash_names = ('md5', 'sha1') + if hash_name not in valid_hash_names: + raise ValueError("Valid options for 'hash_name' are {}. " + "Got hash_name={!r} instead." + .format(valid_hash_names, hash_name)) + if 'numpy' in sys.modules: + hasher = NumpyHasher(hash_name=hash_name, coerce_mmap=coerce_mmap) + else: + hasher = Hasher(hash_name=hash_name) + return hasher.hash(obj) diff --git a/minor_project/lib/python3.6/site-packages/joblib/logger.py b/minor_project/lib/python3.6/site-packages/joblib/logger.py new file mode 100644 index 0000000..f30efef --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/logger.py @@ -0,0 +1,156 @@ +""" +Helpers for logging. + +This module needs much love to become useful. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2008 Gael Varoquaux +# License: BSD Style, 3 clauses. + +from __future__ import print_function + +import time +import sys +import os +import shutil +import logging +import pprint + +from .disk import mkdirp + + +def _squeeze_time(t): + """Remove .1s to the time under Windows: this is the time it take to + stat files. This is needed to make results similar to timings under + Unix, for tests + """ + if sys.platform.startswith('win'): + return max(0, t - .1) + else: + return t + + +def format_time(t): + t = _squeeze_time(t) + return "%.1fs, %.1fmin" % (t, t / 60.) + + +def short_format_time(t): + t = _squeeze_time(t) + if t > 60: + return "%4.1fmin" % (t / 60.) + else: + return " %5.1fs" % (t) + + +def pformat(obj, indent=0, depth=3): + if 'numpy' in sys.modules: + import numpy as np + print_options = np.get_printoptions() + np.set_printoptions(precision=6, threshold=64, edgeitems=1) + else: + print_options = None + out = pprint.pformat(obj, depth=depth, indent=indent) + if print_options: + np.set_printoptions(**print_options) + return out + + +############################################################################### +# class `Logger` +############################################################################### +class Logger(object): + """ Base class for logging messages. + """ + + def __init__(self, depth=3): + """ + Parameters + ---------- + depth: int, optional + The depth of objects printed. + """ + self.depth = depth + + def warn(self, msg): + logging.warning("[%s]: %s" % (self, msg)) + + def debug(self, msg): + # XXX: This conflicts with the debug flag used in children class + logging.debug("[%s]: %s" % (self, msg)) + + def format(self, obj, indent=0): + """Return the formatted representation of the object.""" + return pformat(obj, indent=indent, depth=self.depth) + + +############################################################################### +# class `PrintTime` +############################################################################### +class PrintTime(object): + """ Print and log messages while keeping track of time. + """ + + def __init__(self, logfile=None, logdir=None): + if logfile is not None and logdir is not None: + raise ValueError('Cannot specify both logfile and logdir') + # XXX: Need argument docstring + self.last_time = time.time() + self.start_time = self.last_time + if logdir is not None: + logfile = os.path.join(logdir, 'joblib.log') + self.logfile = logfile + if logfile is not None: + mkdirp(os.path.dirname(logfile)) + if os.path.exists(logfile): + # Rotate the logs + for i in range(1, 9): + try: + shutil.move(logfile + '.%i' % i, + logfile + '.%i' % (i + 1)) + except: + "No reason failing here" + # Use a copy rather than a move, so that a process + # monitoring this file does not get lost. + try: + shutil.copy(logfile, logfile + '.1') + except: + "No reason failing here" + try: + with open(logfile, 'w') as logfile: + logfile.write('\nLogging joblib python script\n') + logfile.write('\n---%s---\n' % time.ctime(self.last_time)) + except: + """ Multiprocessing writing to files can create race + conditions. Rather fail silently than crash the + computation. + """ + # XXX: We actually need a debug flag to disable this + # silent failure. + + def __call__(self, msg='', total=False): + """ Print the time elapsed between the last call and the current + call, with an optional message. + """ + if not total: + time_lapse = time.time() - self.last_time + full_msg = "%s: %s" % (msg, format_time(time_lapse)) + else: + # FIXME: Too much logic duplicated + time_lapse = time.time() - self.start_time + full_msg = "%s: %.2fs, %.1f min" % (msg, time_lapse, + time_lapse / 60) + print(full_msg, file=sys.stderr) + if self.logfile is not None: + try: + with open(self.logfile, 'a') as f: + print(full_msg, file=f) + except: + """ Multiprocessing writing to files can create race + conditions. Rather fail silently than crash the + calculation. + """ + # XXX: We actually need a debug flag to disable this + # silent failure. + self.last_time = time.time() diff --git a/minor_project/lib/python3.6/site-packages/joblib/memory.py b/minor_project/lib/python3.6/site-packages/joblib/memory.py new file mode 100644 index 0000000..c3dc76f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/memory.py @@ -0,0 +1,1037 @@ +""" +A context object for caching a function's return value each time it +is called with the same input arguments. + +""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + + +from __future__ import with_statement +import os +import time +import pathlib +import pydoc +import re +import functools +import traceback +import warnings +import inspect +import sys +import weakref + +from tokenize import open as open_py_source + +# Local imports +from . import hashing +from .func_inspect import get_func_code, get_func_name, filter_args +from .func_inspect import format_call +from .func_inspect import format_signature +from .logger import Logger, format_time, pformat +from ._store_backends import StoreBackendBase, FileSystemStoreBackend + + + +FIRST_LINE_TEXT = "# first line:" + +# TODO: The following object should have a data store object as a sub +# object, and the interface to persist and query should be separated in +# the data store. +# +# This would enable creating 'Memory' objects with a different logic for +# pickling that would simply span a MemorizedFunc with the same +# store (or do we want to copy it to avoid cross-talks?), for instance to +# implement HDF5 pickling. + +# TODO: Same remark for the logger, and probably use the Python logging +# mechanism. + + +def extract_first_line(func_code): + """ Extract the first line information from the function code + text if available. + """ + if func_code.startswith(FIRST_LINE_TEXT): + func_code = func_code.split('\n') + first_line = int(func_code[0][len(FIRST_LINE_TEXT):]) + func_code = '\n'.join(func_code[1:]) + else: + first_line = -1 + return func_code, first_line + + +class JobLibCollisionWarning(UserWarning): + """ Warn that there might be a collision between names of functions. + """ + + +_STORE_BACKENDS = {'local': FileSystemStoreBackend} + + +def register_store_backend(backend_name, backend): + """Extend available store backends. + + The Memory, MemorizeResult and MemorizeFunc objects are designed to be + agnostic to the type of store used behind. By default, the local file + system is used but this function gives the possibility to extend joblib's + memory pattern with other types of storage such as cloud storage (S3, GCS, + OpenStack, HadoopFS, etc) or blob DBs. + + Parameters + ---------- + backend_name: str + The name identifying the store backend being registered. For example, + 'local' is used with FileSystemStoreBackend. + backend: StoreBackendBase subclass + The name of a class that implements the StoreBackendBase interface. + + """ + if not isinstance(backend_name, str): + raise ValueError("Store backend name should be a string, " + "'{0}' given.".format(backend_name)) + if backend is None or not issubclass(backend, StoreBackendBase): + raise ValueError("Store backend should inherit " + "StoreBackendBase, " + "'{0}' given.".format(backend)) + + _STORE_BACKENDS[backend_name] = backend + + +def _store_backend_factory(backend, location, verbose=0, backend_options=None): + """Return the correct store object for the given location.""" + if backend_options is None: + backend_options = {} + + if isinstance(location, pathlib.Path): + location = str(location) + + if isinstance(location, StoreBackendBase): + return location + elif isinstance(location, str): + obj = None + location = os.path.expanduser(location) + # The location is not a local file system, we look in the + # registered backends if there's one matching the given backend + # name. + for backend_key, backend_obj in _STORE_BACKENDS.items(): + if backend == backend_key: + obj = backend_obj() + + # By default, we assume the FileSystemStoreBackend can be used if no + # matching backend could be found. + if obj is None: + raise TypeError('Unknown location {0} or backend {1}'.format( + location, backend)) + + # The store backend is configured with the extra named parameters, + # some of them are specific to the underlying store backend. + obj.configure(location, verbose=verbose, + backend_options=backend_options) + return obj + elif location is not None: + warnings.warn( + "Instanciating a backend using a {} as a location is not " + "supported by joblib. Returning None instead.".format( + location.__class__.__name__), UserWarning) + + + return None + + +def _get_func_fullname(func): + """Compute the part of part associated with a function.""" + modules, funcname = get_func_name(func) + modules.append(funcname) + return os.path.join(*modules) + + +def _build_func_identifier(func): + """Build a roughly unique identifier for the cached function.""" + parts = [] + if isinstance(func, str): + parts.append(func) + else: + parts.append(_get_func_fullname(func)) + + # We reuse historical fs-like way of building a function identifier + return os.path.join(*parts) + + +def _format_load_msg(func_id, args_id, timestamp=None, metadata=None): + """ Helper function to format the message when loading the results. + """ + signature = "" + try: + if metadata is not None: + args = ", ".join(['%s=%s' % (name, value) + for name, value + in metadata['input_args'].items()]) + signature = "%s(%s)" % (os.path.basename(func_id), args) + else: + signature = os.path.basename(func_id) + except KeyError: + pass + + if timestamp is not None: + ts_string = "{0: <16}".format(format_time(time.time() - timestamp)) + else: + ts_string = "" + return '[Memory]{0}: Loading {1}'.format(ts_string, str(signature)) + + +# An in-memory store to avoid looking at the disk-based function +# source code to check if a function definition has changed +_FUNCTION_HASHES = weakref.WeakKeyDictionary() + + +############################################################################### +# class `MemorizedResult` +############################################################################### +class MemorizedResult(Logger): + """Object representing a cached value. + + Attributes + ---------- + location: str + The location of joblib cache. Depends on the store backend used. + + func: function or str + function whose output is cached. The string case is intended only for + instanciation based on the output of repr() on another instance. + (namely eval(repr(memorized_instance)) works). + + argument_hash: str + hash of the function arguments. + + backend: str + Type of store backend for reading/writing cache files. + Default is 'local'. + + mmap_mode: {None, 'r+', 'r', 'w+', 'c'} + The memmapping mode used when loading from cache numpy arrays. See + numpy.load for the meaning of the different values. + + verbose: int + verbosity level (0 means no message). + + timestamp, metadata: string + for internal use only. + """ + def __init__(self, location, func, args_id, backend='local', + mmap_mode=None, verbose=0, timestamp=None, metadata=None): + Logger.__init__(self) + self.func_id = _build_func_identifier(func) + if isinstance(func, str): + self.func = func + else: + self.func = self.func_id + self.args_id = args_id + self.store_backend = _store_backend_factory(backend, location, + verbose=verbose) + self.mmap_mode = mmap_mode + + if metadata is not None: + self.metadata = metadata + else: + self.metadata = self.store_backend.get_metadata( + [self.func_id, self.args_id]) + + self.duration = self.metadata.get('duration', None) + self.verbose = verbose + self.timestamp = timestamp + + @property + def argument_hash(self): + warnings.warn( + "The 'argument_hash' attribute has been deprecated in version " + "0.12 and will be removed in version 0.14.\n" + "Use `args_id` attribute instead.", + DeprecationWarning, stacklevel=2) + return self.args_id + + def get(self): + """Read value from cache and return it.""" + if self.verbose: + msg = _format_load_msg(self.func_id, self.args_id, + timestamp=self.timestamp, + metadata=self.metadata) + else: + msg = None + + try: + return self.store_backend.load_item( + [self.func_id, self.args_id], msg=msg, verbose=self.verbose) + except ValueError as exc: + new_exc = KeyError( + "Error while trying to load a MemorizedResult's value. " + "It seems that this folder is corrupted : {}".format( + os.path.join( + self.store_backend.location, self.func_id, + self.args_id) + )) + raise new_exc from exc + + def clear(self): + """Clear value from cache""" + self.store_backend.clear_item([self.func_id, self.args_id]) + + def __repr__(self): + return ('{class_name}(location="{location}", func="{func}", ' + 'args_id="{args_id}")' + .format(class_name=self.__class__.__name__, + location=self.store_backend.location, + func=self.func, + args_id=self.args_id + )) + + def __getstate__(self): + state = self.__dict__.copy() + state['timestamp'] = None + return state + + +class NotMemorizedResult(object): + """Class representing an arbitrary value. + + This class is a replacement for MemorizedResult when there is no cache. + """ + __slots__ = ('value', 'valid') + + def __init__(self, value): + self.value = value + self.valid = True + + def get(self): + if self.valid: + return self.value + else: + raise KeyError("No value stored.") + + def clear(self): + self.valid = False + self.value = None + + def __repr__(self): + if self.valid: + return ('{class_name}({value})' + .format(class_name=self.__class__.__name__, + value=pformat(self.value))) + else: + return self.__class__.__name__ + ' with no value' + + # __getstate__ and __setstate__ are required because of __slots__ + def __getstate__(self): + return {"valid": self.valid, "value": self.value} + + def __setstate__(self, state): + self.valid = state["valid"] + self.value = state["value"] + + +############################################################################### +# class `NotMemorizedFunc` +############################################################################### +class NotMemorizedFunc(object): + """No-op object decorating a function. + + This class replaces MemorizedFunc when there is no cache. It provides an + identical API but does not write anything on disk. + + Attributes + ---------- + func: callable + Original undecorated function. + """ + # Should be a light as possible (for speed) + def __init__(self, func): + self.func = func + + def __call__(self, *args, **kwargs): + return self.func(*args, **kwargs) + + def call_and_shelve(self, *args, **kwargs): + return NotMemorizedResult(self.func(*args, **kwargs)) + + def __repr__(self): + return '{0}(func={1})'.format(self.__class__.__name__, self.func) + + def clear(self, warn=True): + # Argument "warn" is for compatibility with MemorizedFunc.clear + pass + + +############################################################################### +# class `MemorizedFunc` +############################################################################### +class MemorizedFunc(Logger): + """Callable object decorating a function for caching its return value + each time it is called. + + Methods are provided to inspect the cache or clean it. + + Attributes + ---------- + func: callable + The original, undecorated, function. + + location: string + The location of joblib cache. Depends on the store backend used. + + backend: str + Type of store backend for reading/writing cache files. + Default is 'local', in which case the location is the path to a + disk storage. + + ignore: list or None + List of variable names to ignore when choosing whether to + recompute. + + mmap_mode: {None, 'r+', 'r', 'w+', 'c'} + The memmapping mode used when loading from cache + numpy arrays. See numpy.load for the meaning of the different + values. + + compress: boolean, or integer + Whether to zip the stored data on disk. If an integer is + given, it should be between 1 and 9, and sets the amount + of compression. Note that compressed arrays cannot be + read by memmapping. + + verbose: int, optional + The verbosity flag, controls messages that are issued as + the function is evaluated. + """ + # ------------------------------------------------------------------------ + # Public interface + # ------------------------------------------------------------------------ + + def __init__(self, func, location, backend='local', ignore=None, + mmap_mode=None, compress=False, verbose=1, timestamp=None): + Logger.__init__(self) + self.mmap_mode = mmap_mode + self.compress = compress + self.func = func + + if ignore is None: + ignore = [] + self.ignore = ignore + self._verbose = verbose + + # retrieve store object from backend type and location. + self.store_backend = _store_backend_factory(backend, location, + verbose=verbose, + backend_options=dict( + compress=compress, + mmap_mode=mmap_mode), + ) + if self.store_backend is not None: + # Create func directory on demand. + self.store_backend.\ + store_cached_func_code([_build_func_identifier(self.func)]) + + if timestamp is None: + timestamp = time.time() + self.timestamp = timestamp + try: + functools.update_wrapper(self, func) + except: + " Objects like ufunc don't like that " + if inspect.isfunction(func): + doc = pydoc.TextDoc().document(func) + # Remove blank line + doc = doc.replace('\n', '\n\n', 1) + # Strip backspace-overprints for compatibility with autodoc + doc = re.sub('\x08.', '', doc) + else: + # Pydoc does a poor job on other objects + doc = func.__doc__ + self.__doc__ = 'Memoized version of %s' % doc + + self._func_code_info = None + self._func_code_id = None + + def _cached_call(self, args, kwargs, shelving=False): + """Call wrapped function and cache result, or read cache if available. + + This function returns the wrapped function output and some metadata. + + Arguments: + ---------- + + args, kwargs: list and dict + input arguments for wrapped function + + shelving: bool + True when called via the call_and_shelve function. + + + Returns + ------- + output: value or tuple or None + Output of the wrapped function. + If shelving is True and the call has been already cached, + output is None. + + argument_hash: string + Hash of function arguments. + + metadata: dict + Some metadata about wrapped function call (see _persist_input()). + """ + func_id, args_id = self._get_output_identifiers(*args, **kwargs) + metadata = None + msg = None + + # Wether or not the memorized function must be called + must_call = False + + # FIXME: The statements below should be try/excepted + # Compare the function code with the previous to see if the + # function code has changed + if not (self._check_previous_func_code(stacklevel=4) and + self.store_backend.contains_item([func_id, args_id])): + if self._verbose > 10: + _, name = get_func_name(self.func) + self.warn('Computing func {0}, argument hash {1} ' + 'in location {2}' + .format(name, args_id, + self.store_backend. + get_cached_func_info([func_id])['location'])) + must_call = True + else: + try: + t0 = time.time() + if self._verbose: + msg = _format_load_msg(func_id, args_id, + timestamp=self.timestamp, + metadata=metadata) + + if not shelving: + # When shelving, we do not need to load the output + out = self.store_backend.load_item( + [func_id, args_id], + msg=msg, + verbose=self._verbose) + else: + out = None + + if self._verbose > 4: + t = time.time() - t0 + _, name = get_func_name(self.func) + msg = '%s cache loaded - %s' % (name, format_time(t)) + print(max(0, (80 - len(msg))) * '_' + msg) + except Exception: + # XXX: Should use an exception logger + _, signature = format_signature(self.func, *args, **kwargs) + self.warn('Exception while loading results for ' + '{}\n {}'.format(signature, traceback.format_exc())) + + must_call = True + + if must_call: + out, metadata = self.call(*args, **kwargs) + if self.mmap_mode is not None: + # Memmap the output at the first call to be consistent with + # later calls + if self._verbose: + msg = _format_load_msg(func_id, args_id, + timestamp=self.timestamp, + metadata=metadata) + out = self.store_backend.load_item([func_id, args_id], msg=msg, + verbose=self._verbose) + + return (out, args_id, metadata) + + @property + def func_code_info(self): + # 3-tuple property containing: the function source code, source file, + # and first line of the code inside the source file + if hasattr(self.func, '__code__'): + if self._func_code_id is None: + self._func_code_id = id(self.func.__code__) + elif id(self.func.__code__) != self._func_code_id: + # Be robust to dynamic reassignments of self.func.__code__ + self._func_code_info = None + + if self._func_code_info is None: + # Cache the source code of self.func . Provided that get_func_code + # (which should be called once on self) gets called in the process + # in which self.func was defined, this caching mechanism prevents + # undesired cache clearing when the cached function is called in + # an environement where the introspection utilitiees get_func_code + # relies on do not work (typicially, in joblib child processes). + # See #1035 for more info + # TODO (pierreglaser): do the same with get_func_name? + self._func_code_info = get_func_code(self.func) + return self._func_code_info + + def call_and_shelve(self, *args, **kwargs): + """Call wrapped function, cache result and return a reference. + + This method returns a reference to the cached result instead of the + result itself. The reference object is small and pickeable, allowing + to send or store it easily. Call .get() on reference object to get + result. + + Returns + ------- + cached_result: MemorizedResult or NotMemorizedResult + reference to the value returned by the wrapped function. The + class "NotMemorizedResult" is used when there is no cache + activated (e.g. location=None in Memory). + """ + _, args_id, metadata = self._cached_call(args, kwargs, shelving=True) + return MemorizedResult(self.store_backend, self.func, args_id, + metadata=metadata, verbose=self._verbose - 1, + timestamp=self.timestamp) + + def __call__(self, *args, **kwargs): + return self._cached_call(args, kwargs)[0] + + def __getstate__(self): + # Make sure self.func's source is introspected prior to being pickled - + # code introspection utilities typically do not work inside child + # processes + _ = self.func_code_info + + # We don't store the timestamp when pickling, to avoid the hash + # depending from it. + state = self.__dict__.copy() + state['timestamp'] = None + + # Invalidate the code id as id(obj) will be different in the child + state['_func_code_id'] = None + + return state + + # ------------------------------------------------------------------------ + # Private interface + # ------------------------------------------------------------------------ + + def _get_argument_hash(self, *args, **kwargs): + return hashing.hash(filter_args(self.func, self.ignore, args, kwargs), + coerce_mmap=(self.mmap_mode is not None)) + + def _get_output_identifiers(self, *args, **kwargs): + """Return the func identifier and input parameter hash of a result.""" + func_id = _build_func_identifier(self.func) + argument_hash = self._get_argument_hash(*args, **kwargs) + return func_id, argument_hash + + def _hash_func(self): + """Hash a function to key the online cache""" + func_code_h = hash(getattr(self.func, '__code__', None)) + return id(self.func), hash(self.func), func_code_h + + def _write_func_code(self, func_code, first_line): + """ Write the function code and the filename to a file. + """ + # We store the first line because the filename and the function + # name is not always enough to identify a function: people + # sometimes have several functions named the same way in a + # file. This is bad practice, but joblib should be robust to bad + # practice. + func_id = _build_func_identifier(self.func) + func_code = u'%s %i\n%s' % (FIRST_LINE_TEXT, first_line, func_code) + self.store_backend.store_cached_func_code([func_id], func_code) + + # Also store in the in-memory store of function hashes + is_named_callable = False + is_named_callable = (hasattr(self.func, '__name__') and + self.func.__name__ != '') + if is_named_callable: + # Don't do this for lambda functions or strange callable + # objects, as it ends up being too fragile + func_hash = self._hash_func() + try: + _FUNCTION_HASHES[self.func] = func_hash + except TypeError: + # Some callable are not hashable + pass + + def _check_previous_func_code(self, stacklevel=2): + """ + stacklevel is the depth a which this function is called, to + issue useful warnings to the user. + """ + # First check if our function is in the in-memory store. + # Using the in-memory store not only makes things faster, but it + # also renders us robust to variations of the files when the + # in-memory version of the code does not vary + try: + if self.func in _FUNCTION_HASHES: + # We use as an identifier the id of the function and its + # hash. This is more likely to falsely change than have hash + # collisions, thus we are on the safe side. + func_hash = self._hash_func() + if func_hash == _FUNCTION_HASHES[self.func]: + return True + except TypeError: + # Some callables are not hashable + pass + + # Here, we go through some effort to be robust to dynamically + # changing code and collision. We cannot inspect.getsource + # because it is not reliable when using IPython's magic "%run". + func_code, source_file, first_line = self.func_code_info + func_id = _build_func_identifier(self.func) + + try: + old_func_code, old_first_line =\ + extract_first_line( + self.store_backend.get_cached_func_code([func_id])) + except (IOError, OSError): # some backend can also raise OSError + self._write_func_code(func_code, first_line) + return False + if old_func_code == func_code: + return True + + # We have differing code, is this because we are referring to + # different functions, or because the function we are referring to has + # changed? + + _, func_name = get_func_name(self.func, resolv_alias=False, + win_characters=False) + if old_first_line == first_line == -1 or func_name == '': + if not first_line == -1: + func_description = ("{0} ({1}:{2})" + .format(func_name, source_file, + first_line)) + else: + func_description = func_name + warnings.warn(JobLibCollisionWarning( + "Cannot detect name collisions for function '{0}'" + .format(func_description)), stacklevel=stacklevel) + + # Fetch the code at the old location and compare it. If it is the + # same than the code store, we have a collision: the code in the + # file has not changed, but the name we have is pointing to a new + # code block. + if not old_first_line == first_line and source_file is not None: + possible_collision = False + if os.path.exists(source_file): + _, func_name = get_func_name(self.func, resolv_alias=False) + num_lines = len(func_code.split('\n')) + with open_py_source(source_file) as f: + on_disk_func_code = f.readlines()[ + old_first_line - 1:old_first_line - 1 + num_lines - 1] + on_disk_func_code = ''.join(on_disk_func_code) + possible_collision = (on_disk_func_code.rstrip() == + old_func_code.rstrip()) + else: + possible_collision = source_file.startswith(' 10: + _, func_name = get_func_name(self.func, resolv_alias=False) + self.warn("Function {0} (identified by {1}) has changed" + ".".format(func_name, func_id)) + self.clear(warn=True) + return False + + def clear(self, warn=True): + """Empty the function's cache.""" + func_id = _build_func_identifier(self.func) + + if self._verbose > 0 and warn: + self.warn("Clearing function cache identified by %s" % func_id) + self.store_backend.clear_path([func_id, ]) + + func_code, _, first_line = self.func_code_info + self._write_func_code(func_code, first_line) + + def call(self, *args, **kwargs): + """ Force the execution of the function with the given arguments and + persist the output values. + """ + start_time = time.time() + func_id, args_id = self._get_output_identifiers(*args, **kwargs) + if self._verbose > 0: + print(format_call(self.func, args, kwargs)) + output = self.func(*args, **kwargs) + self.store_backend.dump_item( + [func_id, args_id], output, verbose=self._verbose) + + duration = time.time() - start_time + metadata = self._persist_input(duration, args, kwargs) + + if self._verbose > 0: + _, name = get_func_name(self.func) + msg = '%s - %s' % (name, format_time(duration)) + print(max(0, (80 - len(msg))) * '_' + msg) + return output, metadata + + def _persist_input(self, duration, args, kwargs, this_duration_limit=0.5): + """ Save a small summary of the call using json format in the + output directory. + + output_dir: string + directory where to write metadata. + + duration: float + time taken by hashing input arguments, calling the wrapped + function and persisting its output. + + args, kwargs: list and dict + input arguments for wrapped function + + this_duration_limit: float + Max execution time for this function before issuing a warning. + """ + start_time = time.time() + argument_dict = filter_args(self.func, self.ignore, + args, kwargs) + + input_repr = dict((k, repr(v)) for k, v in argument_dict.items()) + # This can fail due to race-conditions with multiple + # concurrent joblibs removing the file or the directory + metadata = {"duration": duration, "input_args": input_repr} + + func_id, args_id = self._get_output_identifiers(*args, **kwargs) + self.store_backend.store_metadata([func_id, args_id], metadata) + + this_duration = time.time() - start_time + if this_duration > this_duration_limit: + # This persistence should be fast. It will not be if repr() takes + # time and its output is large, because json.dump will have to + # write a large file. This should not be an issue with numpy arrays + # for which repr() always output a short representation, but can + # be with complex dictionaries. Fixing the problem should be a + # matter of replacing repr() above by something smarter. + warnings.warn("Persisting input arguments took %.2fs to run.\n" + "If this happens often in your code, it can cause " + "performance problems \n" + "(results will be correct in all cases). \n" + "The reason for this is probably some large input " + "arguments for a wrapped\n" + " function (e.g. large strings).\n" + "THIS IS A JOBLIB ISSUE. If you can, kindly provide " + "the joblib's team with an\n" + " example so that they can fix the problem." + % this_duration, stacklevel=5) + return metadata + + # XXX: Need a method to check if results are available. + + # ------------------------------------------------------------------------ + # Private `object` interface + # ------------------------------------------------------------------------ + + def __repr__(self): + return '{class_name}(func={func}, location={location})'.format( + class_name=self.__class__.__name__, + func=self.func, + location=self.store_backend.location,) + + +############################################################################### +# class `Memory` +############################################################################### +class Memory(Logger): + """ A context object for caching a function's return value each time it + is called with the same input arguments. + + All values are cached on the filesystem, in a deep directory + structure. + + Read more in the :ref:`User Guide `. + + Parameters + ---------- + location: str or None + The path of the base directory to use as a data store + or None. If None is given, no caching is done and + the Memory object is completely transparent. This option + replaces cachedir since version 0.12. + + backend: str, optional + Type of store backend for reading/writing cache files. + Default: 'local'. + The 'local' backend is using regular filesystem operations to + manipulate data (open, mv, etc) in the backend. + + cachedir: str or None, optional + + .. deprecated: 0.12 + 'cachedir' has been deprecated in 0.12 and will be + removed in 0.14. Use the 'location' parameter instead. + + mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional + The memmapping mode used when loading from cache + numpy arrays. See numpy.load for the meaning of the + arguments. + + compress: boolean, or integer, optional + Whether to zip the stored data on disk. If an integer is + given, it should be between 1 and 9, and sets the amount + of compression. Note that compressed arrays cannot be + read by memmapping. + + verbose: int, optional + Verbosity flag, controls the debug messages that are issued + as functions are evaluated. + + bytes_limit: int, optional + Limit in bytes of the size of the cache. + + backend_options: dict, optional + Contains a dictionnary of named parameters used to configure + the store backend. + """ + # ------------------------------------------------------------------------ + # Public interface + # ------------------------------------------------------------------------ + + def __init__(self, location=None, backend='local', cachedir=None, + mmap_mode=None, compress=False, verbose=1, bytes_limit=None, + backend_options=None): + # XXX: Bad explanation of the None value of cachedir + Logger.__init__(self) + self._verbose = verbose + self.mmap_mode = mmap_mode + self.timestamp = time.time() + self.bytes_limit = bytes_limit + self.backend = backend + self.compress = compress + if backend_options is None: + backend_options = {} + self.backend_options = backend_options + + if compress and mmap_mode is not None: + warnings.warn('Compressed results cannot be memmapped', + stacklevel=2) + if cachedir is not None: + if location is not None: + raise ValueError( + 'You set both "location={0!r} and "cachedir={1!r}". ' + "'cachedir' has been deprecated in version " + "0.12 and will be removed in version 0.14.\n" + 'Please only set "location={0!r}"'.format( + location, cachedir)) + + warnings.warn( + "The 'cachedir' parameter has been deprecated in version " + "0.12 and will be removed in version 0.14.\n" + 'You provided "cachedir={0!r}", ' + 'use "location={0!r}" instead.'.format(cachedir), + DeprecationWarning, stacklevel=2) + location = cachedir + + self.location = location + if isinstance(location, str): + location = os.path.join(location, 'joblib') + + self.store_backend = _store_backend_factory( + backend, location, verbose=self._verbose, + backend_options=dict(compress=compress, mmap_mode=mmap_mode, + **backend_options)) + + @property + def cachedir(self): + warnings.warn( + "The 'cachedir' attribute has been deprecated in version 0.12 " + "and will be removed in version 0.14.\n" + "Use os.path.join(memory.location, 'joblib') attribute instead.", + DeprecationWarning, stacklevel=2) + if self.location is None: + return None + return os.path.join(self.location, 'joblib') + + def cache(self, func=None, ignore=None, verbose=None, mmap_mode=False): + """ Decorates the given function func to only compute its return + value for input arguments not cached on disk. + + Parameters + ---------- + func: callable, optional + The function to be decorated + ignore: list of strings + A list of arguments name to ignore in the hashing + verbose: integer, optional + The verbosity mode of the function. By default that + of the memory object is used. + mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional + The memmapping mode used when loading from cache + numpy arrays. See numpy.load for the meaning of the + arguments. By default that of the memory object is used. + + Returns + ------- + decorated_func: MemorizedFunc object + The returned object is a MemorizedFunc object, that is + callable (behaves like a function), but offers extra + methods for cache lookup and management. See the + documentation for :class:`joblib.memory.MemorizedFunc`. + """ + if func is None: + # Partial application, to be able to specify extra keyword + # arguments in decorators + return functools.partial(self.cache, ignore=ignore, + verbose=verbose, mmap_mode=mmap_mode) + if self.store_backend is None: + return NotMemorizedFunc(func) + if verbose is None: + verbose = self._verbose + if mmap_mode is False: + mmap_mode = self.mmap_mode + if isinstance(func, MemorizedFunc): + func = func.func + return MemorizedFunc(func, location=self.store_backend, + backend=self.backend, + ignore=ignore, mmap_mode=mmap_mode, + compress=self.compress, + verbose=verbose, timestamp=self.timestamp) + + def clear(self, warn=True): + """ Erase the complete cache directory. + """ + if warn: + self.warn('Flushing completely the cache') + if self.store_backend is not None: + self.store_backend.clear() + + def reduce_size(self): + """Remove cache elements to make cache size fit in ``bytes_limit``.""" + if self.bytes_limit is not None and self.store_backend is not None: + self.store_backend.reduce_store_size(self.bytes_limit) + + def eval(self, func, *args, **kwargs): + """ Eval function func with arguments `*args` and `**kwargs`, + in the context of the memory. + + This method works similarly to the builtin `apply`, except + that the function is called only if the cache is not + up to date. + + """ + if self.store_backend is None: + return func(*args, **kwargs) + return self.cache(func)(*args, **kwargs) + + # ------------------------------------------------------------------------ + # Private `object` interface + # ------------------------------------------------------------------------ + + def __repr__(self): + return '{class_name}(location={location})'.format( + class_name=self.__class__.__name__, + location=(None if self.store_backend is None + else self.store_backend.location)) + + def __getstate__(self): + """ We don't store the timestamp when pickling, to avoid the hash + depending from it. + """ + state = self.__dict__.copy() + state['timestamp'] = None + return state diff --git a/minor_project/lib/python3.6/site-packages/joblib/my_exceptions.py b/minor_project/lib/python3.6/site-packages/joblib/my_exceptions.py new file mode 100644 index 0000000..d379bb2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/my_exceptions.py @@ -0,0 +1,35 @@ +from sys import version_info +from warnings import warn +from . import _deprecated_my_exceptions + +""" +Exceptions +""" +# Author: Gael Varoquaux < gael dot varoquaux at normalesup dot org > +# Copyright: 2010, Gael Varoquaux +# License: BSD 3 clause + +_deprecated_names = [ + name for name in dir(_deprecated_my_exceptions) if + not name.startswith("__") +] + + +if version_info[:2] >= (3, 7): + def __getattr__(name): + if not name.startswith("__") and name in _deprecated_names: + warn("{} is deprecated and will be removed from joblib " + "in 0.16".format(name), DeprecationWarning) + return getattr(_deprecated_my_exceptions, name) + raise AttributeError +else: + for name in _deprecated_names: + globals()[name] = getattr(_deprecated_my_exceptions, name) + + +class WorkerInterrupt(Exception): + """ An exception that is not KeyboardInterrupt to allow subprocesses + to be interrupted. + """ + + pass diff --git a/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle.py b/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle.py new file mode 100644 index 0000000..93e5537 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle.py @@ -0,0 +1,586 @@ +"""Utilities for fast persistence of big data, with optional compression.""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import pickle +import os +import warnings +try: + from pathlib import Path +except ImportError: + Path = None + +from .compressor import lz4, LZ4_NOT_INSTALLED_ERROR +from .compressor import _COMPRESSORS, register_compressor, BinaryZlibFile +from .compressor import (ZlibCompressorWrapper, GzipCompressorWrapper, + BZ2CompressorWrapper, LZMACompressorWrapper, + XZCompressorWrapper, LZ4CompressorWrapper) +from .numpy_pickle_utils import Unpickler, Pickler +from .numpy_pickle_utils import _read_fileobject, _write_fileobject +from .numpy_pickle_utils import _read_bytes, BUFFER_SIZE +from .numpy_pickle_compat import load_compatibility +from .numpy_pickle_compat import NDArrayWrapper +# For compatibility with old versions of joblib, we need ZNDArrayWrapper +# to be visible in the current namespace. +# Explicitly skipping next line from flake8 as it triggers an F401 warning +# which we don't care. +from .numpy_pickle_compat import ZNDArrayWrapper # noqa +from .backports import make_memmap + +# Register supported compressors +register_compressor('zlib', ZlibCompressorWrapper()) +register_compressor('gzip', GzipCompressorWrapper()) +register_compressor('bz2', BZ2CompressorWrapper()) +register_compressor('lzma', LZMACompressorWrapper()) +register_compressor('xz', XZCompressorWrapper()) +register_compressor('lz4', LZ4CompressorWrapper()) + + +############################################################################### +# Utility objects for persistence. + + +class NumpyArrayWrapper(object): + """An object to be persisted instead of numpy arrays. + + This object is used to hack into the pickle machinery and read numpy + array data from our custom persistence format. + More precisely, this object is used for: + * carrying the information of the persisted array: subclass, shape, order, + dtype. Those ndarray metadata are used to correctly reconstruct the array + with low level numpy functions. + * determining if memmap is allowed on the array. + * reading the array bytes from a file. + * reading the array using memorymap from a file. + * writing the array bytes to a file. + + Attributes + ---------- + subclass: numpy.ndarray subclass + Determine the subclass of the wrapped array. + shape: numpy.ndarray shape + Determine the shape of the wrapped array. + order: {'C', 'F'} + Determine the order of wrapped array data. 'C' is for C order, 'F' is + for fortran order. + dtype: numpy.ndarray dtype + Determine the data type of the wrapped array. + allow_mmap: bool + Determine if memory mapping is allowed on the wrapped array. + Default: False. + """ + + def __init__(self, subclass, shape, order, dtype, allow_mmap=False): + """Constructor. Store the useful information for later.""" + self.subclass = subclass + self.shape = shape + self.order = order + self.dtype = dtype + self.allow_mmap = allow_mmap + + def write_array(self, array, pickler): + """Write array bytes to pickler file handle. + + This function is an adaptation of the numpy write_array function + available in version 1.10.1 in numpy/lib/format.py. + """ + # Set buffer size to 16 MiB to hide the Python loop overhead. + buffersize = max(16 * 1024 ** 2 // array.itemsize, 1) + if array.dtype.hasobject: + # We contain Python objects so we cannot write out the data + # directly. Instead, we will pickle it out with version 2 of the + # pickle protocol. + pickle.dump(array, pickler.file_handle, protocol=2) + else: + for chunk in pickler.np.nditer(array, + flags=['external_loop', + 'buffered', + 'zerosize_ok'], + buffersize=buffersize, + order=self.order): + pickler.file_handle.write(chunk.tobytes('C')) + + def read_array(self, unpickler): + """Read array from unpickler file handle. + + This function is an adaptation of the numpy read_array function + available in version 1.10.1 in numpy/lib/format.py. + """ + if len(self.shape) == 0: + count = 1 + else: + # joblib issue #859: we cast the elements of self.shape to int64 to + # prevent a potential overflow when computing their product. + shape_int64 = [unpickler.np.int64(x) for x in self.shape] + count = unpickler.np.multiply.reduce(shape_int64) + # Now read the actual data. + if self.dtype.hasobject: + # The array contained Python objects. We need to unpickle the data. + array = pickle.load(unpickler.file_handle) + else: + # This is not a real file. We have to read it the + # memory-intensive way. + # crc32 module fails on reads greater than 2 ** 32 bytes, + # breaking large reads from gzip streams. Chunk reads to + # BUFFER_SIZE bytes to avoid issue and reduce memory overhead + # of the read. In non-chunked case count < max_read_count, so + # only one read is performed. + max_read_count = BUFFER_SIZE // min(BUFFER_SIZE, + self.dtype.itemsize) + + array = unpickler.np.empty(count, dtype=self.dtype) + for i in range(0, count, max_read_count): + read_count = min(max_read_count, count - i) + read_size = int(read_count * self.dtype.itemsize) + data = _read_bytes(unpickler.file_handle, + read_size, "array data") + array[i:i + read_count] = \ + unpickler.np.frombuffer(data, dtype=self.dtype, + count=read_count) + del data + + if self.order == 'F': + array.shape = self.shape[::-1] + array = array.transpose() + else: + array.shape = self.shape + + return array + + def read_mmap(self, unpickler): + """Read an array using numpy memmap.""" + offset = unpickler.file_handle.tell() + if unpickler.mmap_mode == 'w+': + unpickler.mmap_mode = 'r+' + + marray = make_memmap(unpickler.filename, + dtype=self.dtype, + shape=self.shape, + order=self.order, + mode=unpickler.mmap_mode, + offset=offset) + # update the offset so that it corresponds to the end of the read array + unpickler.file_handle.seek(offset + marray.nbytes) + + return marray + + def read(self, unpickler): + """Read the array corresponding to this wrapper. + + Use the unpickler to get all information to correctly read the array. + + Parameters + ---------- + unpickler: NumpyUnpickler + + Returns + ------- + array: numpy.ndarray + + """ + # When requested, only use memmap mode if allowed. + if unpickler.mmap_mode is not None and self.allow_mmap: + array = self.read_mmap(unpickler) + else: + array = self.read_array(unpickler) + + # Manage array subclass case + if (hasattr(array, '__array_prepare__') and + self.subclass not in (unpickler.np.ndarray, + unpickler.np.memmap)): + # We need to reconstruct another subclass + new_array = unpickler.np.core.multiarray._reconstruct( + self.subclass, (0,), 'b') + return new_array.__array_prepare__(array) + else: + return array + +############################################################################### +# Pickler classes + + +class NumpyPickler(Pickler): + """A pickler to persist big data efficiently. + + The main features of this object are: + * persistence of numpy arrays in a single file. + * optional compression with a special care on avoiding memory copies. + + Attributes + ---------- + fp: file + File object handle used for serializing the input object. + protocol: int, optional + Pickle protocol used. Default is pickle.DEFAULT_PROTOCOL. + """ + + dispatch = Pickler.dispatch.copy() + + def __init__(self, fp, protocol=None): + self.file_handle = fp + self.buffered = isinstance(self.file_handle, BinaryZlibFile) + + # By default we want a pickle protocol that only changes with + # the major python version and not the minor one + if protocol is None: + protocol = pickle.DEFAULT_PROTOCOL + + Pickler.__init__(self, self.file_handle, protocol=protocol) + # delayed import of numpy, to avoid tight coupling + try: + import numpy as np + except ImportError: + np = None + self.np = np + + def _create_array_wrapper(self, array): + """Create and returns a numpy array wrapper from a numpy array.""" + order = 'F' if (array.flags.f_contiguous and + not array.flags.c_contiguous) else 'C' + allow_mmap = not self.buffered and not array.dtype.hasobject + wrapper = NumpyArrayWrapper(type(array), + array.shape, order, array.dtype, + allow_mmap=allow_mmap) + + return wrapper + + def save(self, obj): + """Subclass the Pickler `save` method. + + This is a total abuse of the Pickler class in order to use the numpy + persistence function `save` instead of the default pickle + implementation. The numpy array is replaced by a custom wrapper in the + pickle persistence stack and the serialized array is written right + after in the file. Warning: the file produced does not follow the + pickle format. As such it can not be read with `pickle.load`. + """ + if self.np is not None and type(obj) in (self.np.ndarray, + self.np.matrix, + self.np.memmap): + if type(obj) is self.np.memmap: + # Pickling doesn't work with memmapped arrays + obj = self.np.asanyarray(obj) + + # The array wrapper is pickled instead of the real array. + wrapper = self._create_array_wrapper(obj) + Pickler.save(self, wrapper) + + # A framer was introduced with pickle protocol 4 and we want to + # ensure the wrapper object is written before the numpy array + # buffer in the pickle file. + # See https://www.python.org/dev/peps/pep-3154/#framing to get + # more information on the framer behavior. + if self.proto >= 4: + self.framer.commit_frame(force=True) + + # And then array bytes are written right after the wrapper. + wrapper.write_array(obj, self) + return + + return Pickler.save(self, obj) + + +class NumpyUnpickler(Unpickler): + """A subclass of the Unpickler to unpickle our numpy pickles. + + Attributes + ---------- + mmap_mode: str + The memorymap mode to use for reading numpy arrays. + file_handle: file_like + File object to unpickle from. + filename: str + Name of the file to unpickle from. It should correspond to file_handle. + This parameter is required when using mmap_mode. + np: module + Reference to numpy module if numpy is installed else None. + + """ + + dispatch = Unpickler.dispatch.copy() + + def __init__(self, filename, file_handle, mmap_mode=None): + # The next line is for backward compatibility with pickle generated + # with joblib versions less than 0.10. + self._dirname = os.path.dirname(filename) + + self.mmap_mode = mmap_mode + self.file_handle = file_handle + # filename is required for numpy mmap mode. + self.filename = filename + self.compat_mode = False + Unpickler.__init__(self, self.file_handle) + try: + import numpy as np + except ImportError: + np = None + self.np = np + + def load_build(self): + """Called to set the state of a newly created object. + + We capture it to replace our place-holder objects, NDArrayWrapper or + NumpyArrayWrapper, by the array we are interested in. We + replace them directly in the stack of pickler. + NDArrayWrapper is used for backward compatibility with joblib <= 0.9. + """ + Unpickler.load_build(self) + + # For backward compatibility, we support NDArrayWrapper objects. + if isinstance(self.stack[-1], (NDArrayWrapper, NumpyArrayWrapper)): + if self.np is None: + raise ImportError("Trying to unpickle an ndarray, " + "but numpy didn't import correctly") + array_wrapper = self.stack.pop() + # If any NDArrayWrapper is found, we switch to compatibility mode, + # this will be used to raise a DeprecationWarning to the user at + # the end of the unpickling. + if isinstance(array_wrapper, NDArrayWrapper): + self.compat_mode = True + self.stack.append(array_wrapper.read(self)) + + # Be careful to register our new method. + dispatch[pickle.BUILD[0]] = load_build + + +############################################################################### +# Utility functions + +def dump(value, filename, compress=0, protocol=None, cache_size=None): + """Persist an arbitrary Python object into one file. + + Read more in the :ref:`User Guide `. + + Parameters + ----------- + value: any Python object + The object to store to disk. + filename: str, pathlib.Path, or file object. + The file object or path of the file in which it is to be stored. + The compression method corresponding to one of the supported filename + extensions ('.z', '.gz', '.bz2', '.xz' or '.lzma') will be used + automatically. + compress: int from 0 to 9 or bool or 2-tuple, optional + Optional compression level for the data. 0 or False is no compression. + Higher value means more compression, but also slower read and + write times. Using a value of 3 is often a good compromise. + See the notes for more details. + If compress is True, the compression level used is 3. + If compress is a 2-tuple, the first element must correspond to a string + between supported compressors (e.g 'zlib', 'gzip', 'bz2', 'lzma' + 'xz'), the second element must be an integer from 0 to 9, corresponding + to the compression level. + protocol: int, optional + Pickle protocol, see pickle.dump documentation for more details. + cache_size: positive int, optional + This option is deprecated in 0.10 and has no effect. + + Returns + ------- + filenames: list of strings + The list of file names in which the data is stored. If + compress is false, each array is stored in a different file. + + See Also + -------- + joblib.load : corresponding loader + + Notes + ----- + Memmapping on load cannot be used for compressed files. Thus + using compression can significantly slow down loading. In + addition, compressed files take extra extra memory during + dump and load. + + """ + + if Path is not None and isinstance(filename, Path): + filename = str(filename) + + is_filename = isinstance(filename, str) + is_fileobj = hasattr(filename, "write") + + compress_method = 'zlib' # zlib is the default compression method. + if compress is True: + # By default, if compress is enabled, we want the default compress + # level of the compressor. + compress_level = None + elif isinstance(compress, tuple): + # a 2-tuple was set in compress + if len(compress) != 2: + raise ValueError( + 'Compress argument tuple should contain exactly 2 elements: ' + '(compress method, compress level), you passed {}' + .format(compress)) + compress_method, compress_level = compress + elif isinstance(compress, str): + compress_method = compress + compress_level = None # Use default compress level + compress = (compress_method, compress_level) + else: + compress_level = compress + + if compress_method == 'lz4' and lz4 is None: + raise ValueError(LZ4_NOT_INSTALLED_ERROR) + + if (compress_level is not None and + compress_level is not False and + compress_level not in range(10)): + # Raising an error if a non valid compress level is given. + raise ValueError( + 'Non valid compress level given: "{}". Possible values are ' + '{}.'.format(compress_level, list(range(10)))) + + if compress_method not in _COMPRESSORS: + # Raising an error if an unsupported compression method is given. + raise ValueError( + 'Non valid compression method given: "{}". Possible values are ' + '{}.'.format(compress_method, _COMPRESSORS)) + + if not is_filename and not is_fileobj: + # People keep inverting arguments, and the resulting error is + # incomprehensible + raise ValueError( + 'Second argument should be a filename or a file-like object, ' + '%s (type %s) was given.' + % (filename, type(filename)) + ) + + if is_filename and not isinstance(compress, tuple): + # In case no explicit compression was requested using both compression + # method and level in a tuple and the filename has an explicit + # extension, we select the corresponding compressor. + + # unset the variable to be sure no compression level is set afterwards. + compress_method = None + for name, compressor in _COMPRESSORS.items(): + if filename.endswith(compressor.extension): + compress_method = name + + if compress_method in _COMPRESSORS and compress_level == 0: + # we choose the default compress_level in case it was not given + # as an argument (using compress). + compress_level = None + + if cache_size is not None: + # Cache size is deprecated starting from version 0.10 + warnings.warn("Please do not set 'cache_size' in joblib.dump, " + "this parameter has no effect and will be removed. " + "You used 'cache_size={}'".format(cache_size), + DeprecationWarning, stacklevel=2) + + if compress_level != 0: + with _write_fileobject(filename, compress=(compress_method, + compress_level)) as f: + NumpyPickler(f, protocol=protocol).dump(value) + elif is_filename: + with open(filename, 'wb') as f: + NumpyPickler(f, protocol=protocol).dump(value) + else: + NumpyPickler(filename, protocol=protocol).dump(value) + + # If the target container is a file object, nothing is returned. + if is_fileobj: + return + + # For compatibility, the list of created filenames (e.g with one element + # after 0.10.0) is returned by default. + return [filename] + + +def _unpickle(fobj, filename="", mmap_mode=None): + """Internal unpickling function.""" + # We are careful to open the file handle early and keep it open to + # avoid race-conditions on renames. + # That said, if data is stored in companion files, which can be + # the case with the old persistence format, moving the directory + # will create a race when joblib tries to access the companion + # files. + unpickler = NumpyUnpickler(filename, fobj, mmap_mode=mmap_mode) + obj = None + try: + obj = unpickler.load() + if unpickler.compat_mode: + warnings.warn("The file '%s' has been generated with a " + "joblib version less than 0.10. " + "Please regenerate this pickle file." + % filename, + DeprecationWarning, stacklevel=3) + except UnicodeDecodeError as exc: + # More user-friendly error message + new_exc = ValueError( + 'You may be trying to read with ' + 'python 3 a joblib pickle generated with python 2. ' + 'This feature is not supported by joblib.') + new_exc.__cause__ = exc + raise new_exc + return obj + + +def load_temporary_memmap(filename, mmap_mode, unlink_on_gc_collect): + from ._memmapping_reducer import JOBLIB_MMAPS, add_maybe_unlink_finalizer + obj = load(filename, mmap_mode) + JOBLIB_MMAPS.add(obj.filename) + if unlink_on_gc_collect: + add_maybe_unlink_finalizer(obj) + return obj + + +def load(filename, mmap_mode=None): + """Reconstruct a Python object from a file persisted with joblib.dump. + + Read more in the :ref:`User Guide `. + + WARNING: joblib.load relies on the pickle module and can therefore + execute arbitrary Python code. It should therefore never be used + to load files from untrusted sources. + + Parameters + ----------- + filename: str, pathlib.Path, or file object. + The file object or path of the file from which to load the object + mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional + If not None, the arrays are memory-mapped from the disk. This + mode has no effect for compressed files. Note that in this + case the reconstructed object might no longer match exactly + the originally pickled object. + + Returns + ------- + result: any Python object + The object stored in the file. + + See Also + -------- + joblib.dump : function to save an object + + Notes + ----- + + This function can load numpy array files saved separately during the + dump. If the mmap_mode argument is given, it is passed to np.load and + arrays are loaded as memmaps. As a consequence, the reconstructed + object might not match the original pickled object. Note that if the + file was saved with compression, the arrays cannot be memmapped. + """ + if Path is not None and isinstance(filename, Path): + filename = str(filename) + + if hasattr(filename, "read"): + fobj = filename + filename = getattr(fobj, 'name', '') + with _read_fileobject(fobj, filename, mmap_mode) as fobj: + obj = _unpickle(fobj) + else: + with open(filename, 'rb') as f: + with _read_fileobject(f, filename, mmap_mode) as fobj: + if isinstance(fobj, str): + # if the returned file object is a string, this means we + # try to load a pickle file generated with an version of + # Joblib so we load it with joblib compatibility function. + return load_compatibility(fobj) + + obj = _unpickle(fobj, filename, mmap_mode) + return obj diff --git a/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle_compat.py b/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle_compat.py new file mode 100644 index 0000000..6541a06 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle_compat.py @@ -0,0 +1,240 @@ +"""Numpy pickle compatibility functions.""" + +import pickle +import os +import zlib +import inspect + +from io import BytesIO + +from .numpy_pickle_utils import _ZFILE_PREFIX +from .numpy_pickle_utils import Unpickler + + +def hex_str(an_int): + """Convert an int to an hexadecimal string.""" + return '{:#x}'.format(an_int) + + +def asbytes(s): + if isinstance(s, bytes): + return s + return s.encode('latin1') + + +_MAX_LEN = len(hex_str(2 ** 64)) +_CHUNK_SIZE = 64 * 1024 + + +def read_zfile(file_handle): + """Read the z-file and return the content as a string. + + Z-files are raw data compressed with zlib used internally by joblib + for persistence. Backward compatibility is not guaranteed. Do not + use for external purposes. + """ + file_handle.seek(0) + header_length = len(_ZFILE_PREFIX) + _MAX_LEN + length = file_handle.read(header_length) + length = length[len(_ZFILE_PREFIX):] + length = int(length, 16) + + # With python2 and joblib version <= 0.8.4 compressed pickle header is one + # character wider so we need to ignore an additional space if present. + # Note: the first byte of the zlib data is guaranteed not to be a + # space according to + # https://tools.ietf.org/html/rfc6713#section-2.1 + next_byte = file_handle.read(1) + if next_byte != b' ': + # The zlib compressed data has started and we need to go back + # one byte + file_handle.seek(header_length) + + # We use the known length of the data to tell Zlib the size of the + # buffer to allocate. + data = zlib.decompress(file_handle.read(), 15, length) + assert len(data) == length, ( + "Incorrect data length while decompressing %s." + "The file could be corrupted." % file_handle) + return data + + +def write_zfile(file_handle, data, compress=1): + """Write the data in the given file as a Z-file. + + Z-files are raw data compressed with zlib used internally by joblib + for persistence. Backward compatibility is not guarantied. Do not + use for external purposes. + """ + file_handle.write(_ZFILE_PREFIX) + length = hex_str(len(data)) + # Store the length of the data + file_handle.write(asbytes(length.ljust(_MAX_LEN))) + file_handle.write(zlib.compress(asbytes(data), compress)) + +############################################################################### +# Utility objects for persistence. + + +class NDArrayWrapper(object): + """An object to be persisted instead of numpy arrays. + + The only thing this object does, is to carry the filename in which + the array has been persisted, and the array subclass. + """ + + def __init__(self, filename, subclass, allow_mmap=True): + """Constructor. Store the useful information for later.""" + self.filename = filename + self.subclass = subclass + self.allow_mmap = allow_mmap + + def read(self, unpickler): + """Reconstruct the array.""" + filename = os.path.join(unpickler._dirname, self.filename) + # Load the array from the disk + # use getattr instead of self.allow_mmap to ensure backward compat + # with NDArrayWrapper instances pickled with joblib < 0.9.0 + allow_mmap = getattr(self, 'allow_mmap', True) + kwargs = {} + if allow_mmap: + kwargs['mmap_mode'] = unpickler.mmap_mode + if "allow_pickle" in inspect.signature(unpickler.np.load).parameters: + # Required in numpy 1.16.3 and later to aknowledge the security + # risk. + kwargs["allow_pickle"] = True + array = unpickler.np.load(filename, **kwargs) + + # Reconstruct subclasses. This does not work with old + # versions of numpy + if (hasattr(array, '__array_prepare__') and + self.subclass not in (unpickler.np.ndarray, + unpickler.np.memmap)): + # We need to reconstruct another subclass + new_array = unpickler.np.core.multiarray._reconstruct( + self.subclass, (0,), 'b') + return new_array.__array_prepare__(array) + else: + return array + + +class ZNDArrayWrapper(NDArrayWrapper): + """An object to be persisted instead of numpy arrays. + + This object store the Zfile filename in which + the data array has been persisted, and the meta information to + retrieve it. + The reason that we store the raw buffer data of the array and + the meta information, rather than array representation routine + (tobytes) is that it enables us to use completely the strided + model to avoid memory copies (a and a.T store as fast). In + addition saving the heavy information separately can avoid + creating large temporary buffers when unpickling data with + large arrays. + """ + + def __init__(self, filename, init_args, state): + """Constructor. Store the useful information for later.""" + self.filename = filename + self.state = state + self.init_args = init_args + + def read(self, unpickler): + """Reconstruct the array from the meta-information and the z-file.""" + # Here we a simply reproducing the unpickling mechanism for numpy + # arrays + filename = os.path.join(unpickler._dirname, self.filename) + array = unpickler.np.core.multiarray._reconstruct(*self.init_args) + with open(filename, 'rb') as f: + data = read_zfile(f) + state = self.state + (data,) + array.__setstate__(state) + return array + + +class ZipNumpyUnpickler(Unpickler): + """A subclass of the Unpickler to unpickle our numpy pickles.""" + + dispatch = Unpickler.dispatch.copy() + + def __init__(self, filename, file_handle, mmap_mode=None): + """Constructor.""" + self._filename = os.path.basename(filename) + self._dirname = os.path.dirname(filename) + self.mmap_mode = mmap_mode + self.file_handle = self._open_pickle(file_handle) + Unpickler.__init__(self, self.file_handle) + try: + import numpy as np + except ImportError: + np = None + self.np = np + + def _open_pickle(self, file_handle): + return BytesIO(read_zfile(file_handle)) + + def load_build(self): + """Set the state of a newly created object. + + We capture it to replace our place-holder objects, + NDArrayWrapper, by the array we are interested in. We + replace them directly in the stack of pickler. + """ + Unpickler.load_build(self) + if isinstance(self.stack[-1], NDArrayWrapper): + if self.np is None: + raise ImportError("Trying to unpickle an ndarray, " + "but numpy didn't import correctly") + nd_array_wrapper = self.stack.pop() + array = nd_array_wrapper.read(self) + self.stack.append(array) + + dispatch[pickle.BUILD[0]] = load_build + + +def load_compatibility(filename): + """Reconstruct a Python object from a file persisted with joblib.dump. + + This function ensures the compatibility with joblib old persistence format + (<= 0.9.3). + + Parameters + ----------- + filename: string + The name of the file from which to load the object + + Returns + ------- + result: any Python object + The object stored in the file. + + See Also + -------- + joblib.dump : function to save an object + + Notes + ----- + + This function can load numpy array files saved separately during the + dump. + """ + with open(filename, 'rb') as file_handle: + # We are careful to open the file handle early and keep it open to + # avoid race-conditions on renames. That said, if data is stored in + # companion files, moving the directory will create a race when + # joblib tries to access the companion files. + unpickler = ZipNumpyUnpickler(filename, file_handle=file_handle) + try: + obj = unpickler.load() + except UnicodeDecodeError as exc: + # More user-friendly error message + new_exc = ValueError( + 'You may be trying to read with ' + 'python 3 a joblib pickle generated with python 2. ' + 'This feature is not supported by joblib.') + new_exc.__cause__ = exc + raise new_exc + finally: + if hasattr(unpickler, 'file_handle'): + unpickler.file_handle.close() + return obj diff --git a/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle_utils.py b/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle_utils.py new file mode 100644 index 0000000..a501055 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/numpy_pickle_utils.py @@ -0,0 +1,228 @@ +"""Utilities for fast persistence of big data, with optional compression.""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import pickle +import io +import warnings +import contextlib + +from .compressor import _ZFILE_PREFIX +from .compressor import _COMPRESSORS + +try: + import numpy as np +except ImportError: + np = None + +Unpickler = pickle._Unpickler +Pickler = pickle._Pickler +xrange = range + + +try: + # The python standard library can be built without bz2 so we make bz2 + # usage optional. + # see https://github.com/scikit-learn/scikit-learn/issues/7526 for more + # details. + import bz2 +except ImportError: + bz2 = None + +# Buffer size used in io.BufferedReader and io.BufferedWriter +_IO_BUFFER_SIZE = 1024 ** 2 + + +def _is_raw_file(fileobj): + """Check if fileobj is a raw file object, e.g created with open.""" + fileobj = getattr(fileobj, 'raw', fileobj) + return isinstance(fileobj, io.FileIO) + + +def _get_prefixes_max_len(): + # Compute the max prefix len of registered compressors. + prefixes = [len(compressor.prefix) for compressor in _COMPRESSORS.values()] + prefixes += [len(_ZFILE_PREFIX)] + return max(prefixes) + + +############################################################################### +# Cache file utilities +def _detect_compressor(fileobj): + """Return the compressor matching fileobj. + + Parameters + ---------- + fileobj: file object + + Returns + ------- + str in {'zlib', 'gzip', 'bz2', 'lzma', 'xz', 'compat', 'not-compressed'} + """ + # Read the magic number in the first bytes of the file. + max_prefix_len = _get_prefixes_max_len() + if hasattr(fileobj, 'peek'): + # Peek allows to read those bytes without moving the cursor in the + # file whic. + first_bytes = fileobj.peek(max_prefix_len) + else: + # Fallback to seek if the fileobject is not peekable. + first_bytes = fileobj.read(max_prefix_len) + fileobj.seek(0) + + if first_bytes.startswith(_ZFILE_PREFIX): + return "compat" + else: + for name, compressor in _COMPRESSORS.items(): + if first_bytes.startswith(compressor.prefix): + return name + + return "not-compressed" + + +def _buffered_read_file(fobj): + """Return a buffered version of a read file object.""" + return io.BufferedReader(fobj, buffer_size=_IO_BUFFER_SIZE) + + +def _buffered_write_file(fobj): + """Return a buffered version of a write file object.""" + return io.BufferedWriter(fobj, buffer_size=_IO_BUFFER_SIZE) + + +@contextlib.contextmanager +def _read_fileobject(fileobj, filename, mmap_mode=None): + """Utility function opening the right fileobject from a filename. + + The magic number is used to choose between the type of file object to open: + * regular file object (default) + * zlib file object + * gzip file object + * bz2 file object + * lzma file object (for xz and lzma compressor) + + Parameters + ---------- + fileobj: file object + compressor: str in {'zlib', 'gzip', 'bz2', 'lzma', 'xz', 'compat', + 'not-compressed'} + filename: str + filename path corresponding to the fileobj parameter. + mmap_mode: str + memory map mode that should be used to open the pickle file. This + parameter is useful to verify that the user is not trying to one with + compression. Default: None. + + Returns + ------- + a file like object + + """ + # Detect if the fileobj contains compressed data. + compressor = _detect_compressor(fileobj) + + if compressor == 'compat': + # Compatibility with old pickle mode: simply return the input + # filename "as-is" and let the compatibility function be called by the + # caller. + warnings.warn("The file '%s' has been generated with a joblib " + "version less than 0.10. " + "Please regenerate this pickle file." % filename, + DeprecationWarning, stacklevel=2) + yield filename + else: + if compressor in _COMPRESSORS: + # based on the compressor detected in the file, we open the + # correct decompressor file object, wrapped in a buffer. + compressor_wrapper = _COMPRESSORS[compressor] + inst = compressor_wrapper.decompressor_file(fileobj) + fileobj = _buffered_read_file(inst) + + # Checking if incompatible load parameters with the type of file: + # mmap_mode cannot be used with compressed file or in memory buffers + # such as io.BytesIO. + if mmap_mode is not None: + if isinstance(fileobj, io.BytesIO): + warnings.warn('In memory persistence is not compatible with ' + 'mmap_mode "%(mmap_mode)s" flag passed. ' + 'mmap_mode option will be ignored.' + % locals(), stacklevel=2) + elif compressor != 'not-compressed': + warnings.warn('mmap_mode "%(mmap_mode)s" is not compatible ' + 'with compressed file %(filename)s. ' + '"%(mmap_mode)s" flag will be ignored.' + % locals(), stacklevel=2) + elif not _is_raw_file(fileobj): + warnings.warn('"%(fileobj)r" is not a raw file, mmap_mode ' + '"%(mmap_mode)s" flag will be ignored.' + % locals(), stacklevel=2) + + yield fileobj + + +def _write_fileobject(filename, compress=("zlib", 3)): + """Return the right compressor file object in write mode.""" + compressmethod = compress[0] + compresslevel = compress[1] + + if compressmethod in _COMPRESSORS.keys(): + file_instance = _COMPRESSORS[compressmethod].compressor_file( + filename, compresslevel=compresslevel) + return _buffered_write_file(file_instance) + else: + file_instance = _COMPRESSORS['zlib'].compressor_file( + filename, compresslevel=compresslevel) + return _buffered_write_file(file_instance) + + +# Utility functions/variables from numpy required for writing arrays. +# We need at least the functions introduced in version 1.9 of numpy. Here, +# we use the ones from numpy 1.10.2. +BUFFER_SIZE = 2 ** 18 # size of buffer for reading npz files in bytes + + +def _read_bytes(fp, size, error_template="ran out of data"): + """Read from file-like object until size bytes are read. + + TODO python2_drop: is it still needed? The docstring mentions python 2.6 + and it looks like this can be at least simplified ... + + Raises ValueError if not EOF is encountered before size bytes are read. + Non-blocking objects only supported if they derive from io objects. + + Required as e.g. ZipExtFile in python 2.6 can return less data than + requested. + + This function was taken from numpy/lib/format.py in version 1.10.2. + + Parameters + ---------- + fp: file-like object + size: int + error_template: str + + Returns + ------- + a bytes object + The data read in bytes. + + """ + data = bytes() + while True: + # io files (default in python3) return None or raise on + # would-block, python2 file will truncate, probably nothing can be + # done about that. note that regular files can't be non-blocking + try: + r = fp.read(size - len(data)) + data += r + if len(r) == 0 or len(data) == size: + break + except io.BlockingIOError: + pass + if len(data) != size: + msg = "EOF: reading %s, expected %d bytes got %d" + raise ValueError(msg % (error_template, size, len(data))) + else: + return data diff --git a/minor_project/lib/python3.6/site-packages/joblib/parallel.py b/minor_project/lib/python3.6/site-packages/joblib/parallel.py new file mode 100644 index 0000000..17a9f23 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/parallel.py @@ -0,0 +1,1072 @@ +""" +Helpers for embarrassingly parallel code. +""" +# Author: Gael Varoquaux < gael dot varoquaux at normalesup dot org > +# Copyright: 2010, Gael Varoquaux +# License: BSD 3 clause + +from __future__ import division + +import os +import sys +from math import sqrt +import functools +import time +import threading +import itertools +from uuid import uuid4 +from numbers import Integral +import warnings +import queue + +from ._multiprocessing_helpers import mp + +from .logger import Logger, short_format_time +from .disk import memstr_to_bytes +from ._parallel_backends import (FallbackToBackend, MultiprocessingBackend, + ThreadingBackend, SequentialBackend, + LokyBackend) +from .externals.cloudpickle import dumps, loads +from .externals import loky + +# Make sure that those two classes are part of the public joblib.parallel API +# so that 3rd party backend implementers can import them from here. +from ._parallel_backends import AutoBatchingMixin # noqa +from ._parallel_backends import ParallelBackendBase # noqa + + +BACKENDS = { + 'multiprocessing': MultiprocessingBackend, + 'threading': ThreadingBackend, + 'sequential': SequentialBackend, + 'loky': LokyBackend, +} +# name of the backend used by default by Parallel outside of any context +# managed by ``parallel_backend``. +DEFAULT_BACKEND = 'loky' +DEFAULT_N_JOBS = 1 +DEFAULT_THREAD_BACKEND = 'threading' + +# Thread local value that can be overridden by the ``parallel_backend`` context +# manager +_backend = threading.local() + +VALID_BACKEND_HINTS = ('processes', 'threads', None) +VALID_BACKEND_CONSTRAINTS = ('sharedmem', None) + + +def _register_dask(): + """ Register Dask Backend if called with parallel_backend("dask") """ + try: + from ._dask import DaskDistributedBackend + register_parallel_backend('dask', DaskDistributedBackend) + except ImportError as e: + msg = ("To use the dask.distributed backend you must install both " + "the `dask` and distributed modules.\n\n" + "See https://dask.pydata.org/en/latest/install.html for more " + "information.") + raise ImportError(msg) from e + + +EXTERNAL_BACKENDS = { + 'dask': _register_dask, +} + + +def get_active_backend(prefer=None, require=None, verbose=0): + """Return the active default backend""" + if prefer not in VALID_BACKEND_HINTS: + raise ValueError("prefer=%r is not a valid backend hint, " + "expected one of %r" % (prefer, VALID_BACKEND_HINTS)) + if require not in VALID_BACKEND_CONSTRAINTS: + raise ValueError("require=%r is not a valid backend constraint, " + "expected one of %r" + % (require, VALID_BACKEND_CONSTRAINTS)) + + if prefer == 'processes' and require == 'sharedmem': + raise ValueError("prefer == 'processes' and require == 'sharedmem'" + " are inconsistent settings") + backend_and_jobs = getattr(_backend, 'backend_and_jobs', None) + if backend_and_jobs is not None: + # Try to use the backend set by the user with the context manager. + backend, n_jobs = backend_and_jobs + nesting_level = backend.nesting_level + supports_sharedmem = getattr(backend, 'supports_sharedmem', False) + if require == 'sharedmem' and not supports_sharedmem: + # This backend does not match the shared memory constraint: + # fallback to the default thead-based backend. + sharedmem_backend = BACKENDS[DEFAULT_THREAD_BACKEND]( + nesting_level=nesting_level) + if verbose >= 10: + print("Using %s as joblib.Parallel backend instead of %s " + "as the latter does not provide shared memory semantics." + % (sharedmem_backend.__class__.__name__, + backend.__class__.__name__)) + return sharedmem_backend, DEFAULT_N_JOBS + else: + return backend_and_jobs + + # We are outside of the scope of any parallel_backend context manager, + # create the default backend instance now. + backend = BACKENDS[DEFAULT_BACKEND](nesting_level=0) + supports_sharedmem = getattr(backend, 'supports_sharedmem', False) + uses_threads = getattr(backend, 'uses_threads', False) + if ((require == 'sharedmem' and not supports_sharedmem) or + (prefer == 'threads' and not uses_threads)): + # Make sure the selected default backend match the soft hints and + # hard constraints: + backend = BACKENDS[DEFAULT_THREAD_BACKEND](nesting_level=0) + return backend, DEFAULT_N_JOBS + + +class parallel_backend(object): + """Change the default backend used by Parallel inside a with block. + + If ``backend`` is a string it must match a previously registered + implementation using the ``register_parallel_backend`` function. + + By default the following backends are available: + + - 'loky': single-host, process-based parallelism (used by default), + - 'threading': single-host, thread-based parallelism, + - 'multiprocessing': legacy single-host, process-based parallelism. + + 'loky' is recommended to run functions that manipulate Python objects. + 'threading' is a low-overhead alternative that is most efficient for + functions that release the Global Interpreter Lock: e.g. I/O-bound code or + CPU-bound code in a few calls to native code that explicitly releases the + GIL. + + In addition, if the `dask` and `distributed` Python packages are installed, + it is possible to use the 'dask' backend for better scheduling of nested + parallel calls without over-subscription and potentially distribute + parallel calls over a networked cluster of several hosts. + + It is also possible to use the distributed 'ray' backend for distributing + the workload to a cluster of nodes. To use the 'ray' joblib backend add + the following lines:: + + >>> from ray.util.joblib import register_ray # doctest: +SKIP + >>> register_ray() # doctest: +SKIP + >>> with parallel_backend("ray"): # doctest: +SKIP + ... print(Parallel()(delayed(neg)(i + 1) for i in range(5))) + [-1, -2, -3, -4, -5] + + Alternatively the backend can be passed directly as an instance. + + By default all available workers will be used (``n_jobs=-1``) unless the + caller passes an explicit value for the ``n_jobs`` parameter. + + This is an alternative to passing a ``backend='backend_name'`` argument to + the ``Parallel`` class constructor. It is particularly useful when calling + into library code that uses joblib internally but does not expose the + backend argument in its own API. + + >>> from operator import neg + >>> with parallel_backend('threading'): + ... print(Parallel()(delayed(neg)(i + 1) for i in range(5))) + ... + [-1, -2, -3, -4, -5] + + Warning: this function is experimental and subject to change in a future + version of joblib. + + Joblib also tries to limit the oversubscription by limiting the number of + threads usable in some third-party library threadpools like OpenBLAS, MKL + or OpenMP. The default limit in each worker is set to + ``max(cpu_count() // effective_n_jobs, 1)`` but this limit can be + overwritten with the ``inner_max_num_threads`` argument which will be used + to set this limit in the child processes. + + .. versionadded:: 0.10 + + """ + def __init__(self, backend, n_jobs=-1, inner_max_num_threads=None, + **backend_params): + if isinstance(backend, str): + if backend not in BACKENDS and backend in EXTERNAL_BACKENDS: + register = EXTERNAL_BACKENDS[backend] + register() + + backend = BACKENDS[backend](**backend_params) + + if inner_max_num_threads is not None: + msg = ("{} does not accept setting the inner_max_num_threads " + "argument.".format(backend.__class__.__name__)) + assert backend.supports_inner_max_num_threads, msg + backend.inner_max_num_threads = inner_max_num_threads + + # If the nesting_level of the backend is not set previously, use the + # nesting level from the previous active_backend to set it + current_backend_and_jobs = getattr(_backend, 'backend_and_jobs', None) + if backend.nesting_level is None: + if current_backend_and_jobs is None: + nesting_level = 0 + else: + nesting_level = current_backend_and_jobs[0].nesting_level + + backend.nesting_level = nesting_level + + # Save the backends info and set the active backend + self.old_backend_and_jobs = current_backend_and_jobs + self.new_backend_and_jobs = (backend, n_jobs) + + _backend.backend_and_jobs = (backend, n_jobs) + + def __enter__(self): + return self.new_backend_and_jobs + + def __exit__(self, type, value, traceback): + self.unregister() + + def unregister(self): + if self.old_backend_and_jobs is None: + if getattr(_backend, 'backend_and_jobs', None) is not None: + del _backend.backend_and_jobs + else: + _backend.backend_and_jobs = self.old_backend_and_jobs + + +# Under Linux or OS X the default start method of multiprocessing +# can cause third party libraries to crash. Under Python 3.4+ it is possible +# to set an environment variable to switch the default start method from +# 'fork' to 'forkserver' or 'spawn' to avoid this issue albeit at the cost +# of causing semantic changes and some additional pool instantiation overhead. +DEFAULT_MP_CONTEXT = None +if hasattr(mp, 'get_context'): + method = os.environ.get('JOBLIB_START_METHOD', '').strip() or None + if method is not None: + DEFAULT_MP_CONTEXT = mp.get_context(method=method) + + +class BatchedCalls(object): + """Wrap a sequence of (func, args, kwargs) tuples as a single callable""" + + def __init__(self, iterator_slice, backend_and_jobs, reducer_callback=None, + pickle_cache=None): + self.items = list(iterator_slice) + self._size = len(self.items) + self._reducer_callback = reducer_callback + if isinstance(backend_and_jobs, tuple): + self._backend, self._n_jobs = backend_and_jobs + else: + # this is for backward compatibility purposes. Before 0.12.6, + # nested backends were returned without n_jobs indications. + self._backend, self._n_jobs = backend_and_jobs, None + self._pickle_cache = pickle_cache if pickle_cache is not None else {} + + def __call__(self): + # Set the default nested backend to self._backend but do not set the + # change the default number of processes to -1 + with parallel_backend(self._backend, n_jobs=self._n_jobs): + return [func(*args, **kwargs) + for func, args, kwargs in self.items] + + def __reduce__(self): + if self._reducer_callback is not None: + self._reducer_callback() + # no need pickle the callback. + return ( + BatchedCalls, + (self.items, (self._backend, self._n_jobs), None, + self._pickle_cache) + ) + + def __len__(self): + return self._size + + +############################################################################### +# CPU count that works also when multiprocessing has been disabled via +# the JOBLIB_MULTIPROCESSING environment variable +def cpu_count(only_physical_cores=False): + """Return the number of CPUs. + + This delegates to loky.cpu_count that takes into account additional + constraints such as Linux CFS scheduler quotas (typically set by container + runtimes such as docker) and CPU affinity (for instance using the taskset + command on Linux). + + If only_physical_cores is True, do not take hyperthreading / SMT logical + cores into account. + """ + if mp is None: + return 1 + + return loky.cpu_count(only_physical_cores=only_physical_cores) + + +############################################################################### +# For verbosity + +def _verbosity_filter(index, verbose): + """ Returns False for indices increasingly apart, the distance + depending on the value of verbose. + + We use a lag increasing as the square of index + """ + if not verbose: + return True + elif verbose > 10: + return False + if index == 0: + return False + verbose = .5 * (11 - verbose) ** 2 + scale = sqrt(index / verbose) + next_scale = sqrt((index + 1) / verbose) + return (int(next_scale) == int(scale)) + + +############################################################################### +def delayed(function): + """Decorator used to capture the arguments of a function.""" + + def delayed_function(*args, **kwargs): + return function, args, kwargs + try: + delayed_function = functools.wraps(function)(delayed_function) + except AttributeError: + " functools.wraps fails on some callable objects " + return delayed_function + + +############################################################################### +class BatchCompletionCallBack(object): + """Callback used by joblib.Parallel's multiprocessing backend. + + This callable is executed by the parent process whenever a worker process + has returned the results of a batch of tasks. + + It is used for progress reporting, to update estimate of the batch + processing duration and to schedule the next batch of tasks to be + processed. + + """ + def __init__(self, dispatch_timestamp, batch_size, parallel): + self.dispatch_timestamp = dispatch_timestamp + self.batch_size = batch_size + self.parallel = parallel + + def __call__(self, out): + self.parallel.n_completed_tasks += self.batch_size + this_batch_duration = time.time() - self.dispatch_timestamp + + self.parallel._backend.batch_completed(self.batch_size, + this_batch_duration) + self.parallel.print_progress() + with self.parallel._lock: + if self.parallel._original_iterator is not None: + self.parallel.dispatch_next() + + +############################################################################### +def register_parallel_backend(name, factory, make_default=False): + """Register a new Parallel backend factory. + + The new backend can then be selected by passing its name as the backend + argument to the Parallel class. Moreover, the default backend can be + overwritten globally by setting make_default=True. + + The factory can be any callable that takes no argument and return an + instance of ``ParallelBackendBase``. + + Warning: this function is experimental and subject to change in a future + version of joblib. + + .. versionadded:: 0.10 + + """ + BACKENDS[name] = factory + if make_default: + global DEFAULT_BACKEND + DEFAULT_BACKEND = name + + +def effective_n_jobs(n_jobs=-1): + """Determine the number of jobs that can actually run in parallel + + n_jobs is the number of workers requested by the callers. Passing n_jobs=-1 + means requesting all available workers for instance matching the number of + CPU cores on the worker host(s). + + This method should return a guesstimate of the number of workers that can + actually perform work concurrently with the currently enabled default + backend. The primary use case is to make it possible for the caller to know + in how many chunks to slice the work. + + In general working on larger data chunks is more efficient (less scheduling + overhead and better use of CPU cache prefetching heuristics) as long as all + the workers have enough work to do. + + Warning: this function is experimental and subject to change in a future + version of joblib. + + .. versionadded:: 0.10 + + """ + backend, backend_n_jobs = get_active_backend() + if n_jobs is None: + n_jobs = backend_n_jobs + return backend.effective_n_jobs(n_jobs=n_jobs) + + +############################################################################### +class Parallel(Logger): + ''' Helper class for readable parallel mapping. + + Read more in the :ref:`User Guide `. + + Parameters + ----------- + n_jobs: int, default: None + The maximum number of concurrently running jobs, such as the number + of Python worker processes when backend="multiprocessing" + or the size of the thread-pool when backend="threading". + If -1 all CPUs are used. If 1 is given, no parallel computing code + is used at all, which is useful for debugging. For n_jobs below -1, + (n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all + CPUs but one are used. + None is a marker for 'unset' that will be interpreted as n_jobs=1 + (sequential execution) unless the call is performed under a + parallel_backend context manager that sets another value for + n_jobs. + backend: str, ParallelBackendBase instance or None, default: 'loky' + Specify the parallelization backend implementation. + Supported backends are: + + - "loky" used by default, can induce some + communication and memory overhead when exchanging input and + output data with the worker Python processes. + - "multiprocessing" previous process-based backend based on + `multiprocessing.Pool`. Less robust than `loky`. + - "threading" is a very low-overhead backend but it suffers + from the Python Global Interpreter Lock if the called function + relies a lot on Python objects. "threading" is mostly useful + when the execution bottleneck is a compiled extension that + explicitly releases the GIL (for instance a Cython loop wrapped + in a "with nogil" block or an expensive call to a library such + as NumPy). + - finally, you can register backends by calling + register_parallel_backend. This will allow you to implement + a backend of your liking. + + It is not recommended to hard-code the backend name in a call to + Parallel in a library. Instead it is recommended to set soft hints + (prefer) or hard constraints (require) so as to make it possible + for library users to change the backend from the outside using the + parallel_backend context manager. + prefer: str in {'processes', 'threads'} or None, default: None + Soft hint to choose the default backend if no specific backend + was selected with the parallel_backend context manager. The + default process-based backend is 'loky' and the default + thread-based backend is 'threading'. Ignored if the ``backend`` + parameter is specified. + require: 'sharedmem' or None, default None + Hard constraint to select the backend. If set to 'sharedmem', + the selected backend will be single-host and thread-based even + if the user asked for a non-thread based backend with + parallel_backend. + verbose: int, optional + The verbosity level: if non zero, progress messages are + printed. Above 50, the output is sent to stdout. + The frequency of the messages increases with the verbosity level. + If it more than 10, all iterations are reported. + timeout: float, optional + Timeout limit for each task to complete. If any task takes longer + a TimeOutError will be raised. Only applied when n_jobs != 1 + pre_dispatch: {'all', integer, or expression, as in '3*n_jobs'} + The number of batches (of tasks) to be pre-dispatched. + Default is '2*n_jobs'. When batch_size="auto" this is reasonable + default and the workers should never starve. + batch_size: int or 'auto', default: 'auto' + The number of atomic tasks to dispatch at once to each + worker. When individual evaluations are very fast, dispatching + calls to workers can be slower than sequential computation because + of the overhead. Batching fast computations together can mitigate + this. + The ``'auto'`` strategy keeps track of the time it takes for a batch + to complete, and dynamically adjusts the batch size to keep the time + on the order of half a second, using a heuristic. The initial batch + size is 1. + ``batch_size="auto"`` with ``backend="threading"`` will dispatch + batches of a single task at a time as the threading backend has + very little overhead and using larger batch size has not proved to + bring any gain in that case. + temp_folder: str, optional + Folder to be used by the pool for memmapping large arrays + for sharing memory with worker processes. If None, this will try in + order: + + - a folder pointed by the JOBLIB_TEMP_FOLDER environment + variable, + - /dev/shm if the folder exists and is writable: this is a + RAM disk filesystem available by default on modern Linux + distributions, + - the default system temporary folder that can be + overridden with TMP, TMPDIR or TEMP environment + variables, typically /tmp under Unix operating systems. + + Only active when backend="loky" or "multiprocessing". + max_nbytes int, str, or None, optional, 1M by default + Threshold on the size of arrays passed to the workers that + triggers automated memory mapping in temp_folder. Can be an int + in Bytes, or a human-readable string, e.g., '1M' for 1 megabyte. + Use None to disable memmapping of large arrays. + Only active when backend="loky" or "multiprocessing". + mmap_mode: {None, 'r+', 'r', 'w+', 'c'} + Memmapping mode for numpy arrays passed to workers. + See 'max_nbytes' parameter documentation for more details. + + Notes + ----- + + This object uses workers to compute in parallel the application of a + function to many different arguments. The main functionality it brings + in addition to using the raw multiprocessing or concurrent.futures API + are (see examples for details): + + * More readable code, in particular since it avoids + constructing list of arguments. + + * Easier debugging: + - informative tracebacks even when the error happens on + the client side + - using 'n_jobs=1' enables to turn off parallel computing + for debugging without changing the codepath + - early capture of pickling errors + + * An optional progress meter. + + * Interruption of multiprocesses jobs with 'Ctrl-C' + + * Flexible pickling control for the communication to and from + the worker processes. + + * Ability to use shared memory efficiently with worker + processes for large numpy-based datastructures. + + Examples + -------- + + A simple example: + + >>> from math import sqrt + >>> from joblib import Parallel, delayed + >>> Parallel(n_jobs=1)(delayed(sqrt)(i**2) for i in range(10)) + [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] + + Reshaping the output when the function has several return + values: + + >>> from math import modf + >>> from joblib import Parallel, delayed + >>> r = Parallel(n_jobs=1)(delayed(modf)(i/2.) for i in range(10)) + >>> res, i = zip(*r) + >>> res + (0.0, 0.5, 0.0, 0.5, 0.0, 0.5, 0.0, 0.5, 0.0, 0.5) + >>> i + (0.0, 0.0, 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 4.0, 4.0) + + The progress meter: the higher the value of `verbose`, the more + messages: + + >>> from time import sleep + >>> from joblib import Parallel, delayed + >>> r = Parallel(n_jobs=2, verbose=10)(delayed(sleep)(.2) for _ in range(10)) #doctest: +SKIP + [Parallel(n_jobs=2)]: Done 1 tasks | elapsed: 0.6s + [Parallel(n_jobs=2)]: Done 4 tasks | elapsed: 0.8s + [Parallel(n_jobs=2)]: Done 10 out of 10 | elapsed: 1.4s finished + + Traceback example, note how the line of the error is indicated + as well as the values of the parameter passed to the function that + triggered the exception, even though the traceback happens in the + child process: + + >>> from heapq import nlargest + >>> from joblib import Parallel, delayed + >>> Parallel(n_jobs=2)(delayed(nlargest)(2, n) for n in (range(4), 'abcde', 3)) #doctest: +SKIP + #... + --------------------------------------------------------------------------- + Sub-process traceback: + --------------------------------------------------------------------------- + TypeError Mon Nov 12 11:37:46 2012 + PID: 12934 Python 2.7.3: /usr/bin/python + ........................................................................... + /usr/lib/python2.7/heapq.pyc in nlargest(n=2, iterable=3, key=None) + 419 if n >= size: + 420 return sorted(iterable, key=key, reverse=True)[:n] + 421 + 422 # When key is none, use simpler decoration + 423 if key is None: + --> 424 it = izip(iterable, count(0,-1)) # decorate + 425 result = _nlargest(n, it) + 426 return map(itemgetter(0), result) # undecorate + 427 + 428 # General case, slowest method + TypeError: izip argument #1 must support iteration + ___________________________________________________________________________ + + + Using pre_dispatch in a producer/consumer situation, where the + data is generated on the fly. Note how the producer is first + called 3 times before the parallel loop is initiated, and then + called to generate new data on the fly: + + >>> from math import sqrt + >>> from joblib import Parallel, delayed + >>> def producer(): + ... for i in range(6): + ... print('Produced %s' % i) + ... yield i + >>> out = Parallel(n_jobs=2, verbose=100, pre_dispatch='1.5*n_jobs')( + ... delayed(sqrt)(i) for i in producer()) #doctest: +SKIP + Produced 0 + Produced 1 + Produced 2 + [Parallel(n_jobs=2)]: Done 1 jobs | elapsed: 0.0s + Produced 3 + [Parallel(n_jobs=2)]: Done 2 jobs | elapsed: 0.0s + Produced 4 + [Parallel(n_jobs=2)]: Done 3 jobs | elapsed: 0.0s + Produced 5 + [Parallel(n_jobs=2)]: Done 4 jobs | elapsed: 0.0s + [Parallel(n_jobs=2)]: Done 6 out of 6 | elapsed: 0.0s remaining: 0.0s + [Parallel(n_jobs=2)]: Done 6 out of 6 | elapsed: 0.0s finished + + ''' + def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, + pre_dispatch='2 * n_jobs', batch_size='auto', + temp_folder=None, max_nbytes='1M', mmap_mode='r', + prefer=None, require=None): + active_backend, context_n_jobs = get_active_backend( + prefer=prefer, require=require, verbose=verbose) + nesting_level = active_backend.nesting_level + if backend is None and n_jobs is None: + # If we are under a parallel_backend context manager, look up + # the default number of jobs and use that instead: + n_jobs = context_n_jobs + if n_jobs is None: + # No specific context override and no specific value request: + # default to 1. + n_jobs = 1 + self.n_jobs = n_jobs + self.verbose = verbose + self.timeout = timeout + self.pre_dispatch = pre_dispatch + self._ready_batches = queue.Queue() + self._id = uuid4().hex + self._reducer_callback = None + + if isinstance(max_nbytes, str): + max_nbytes = memstr_to_bytes(max_nbytes) + + self._backend_args = dict( + max_nbytes=max_nbytes, + mmap_mode=mmap_mode, + temp_folder=temp_folder, + prefer=prefer, + require=require, + verbose=max(0, self.verbose - 50), + ) + if DEFAULT_MP_CONTEXT is not None: + self._backend_args['context'] = DEFAULT_MP_CONTEXT + elif hasattr(mp, "get_context"): + self._backend_args['context'] = mp.get_context() + + if backend is None: + backend = active_backend + + elif isinstance(backend, ParallelBackendBase): + # Use provided backend as is, with the current nesting_level if it + # is not set yet. + if backend.nesting_level is None: + backend.nesting_level = nesting_level + + elif hasattr(backend, 'Pool') and hasattr(backend, 'Lock'): + # Make it possible to pass a custom multiprocessing context as + # backend to change the start method to forkserver or spawn or + # preload modules on the forkserver helper process. + self._backend_args['context'] = backend + backend = MultiprocessingBackend(nesting_level=nesting_level) + else: + try: + backend_factory = BACKENDS[backend] + except KeyError as e: + raise ValueError("Invalid backend: %s, expected one of %r" + % (backend, sorted(BACKENDS.keys()))) from e + backend = backend_factory(nesting_level=nesting_level) + + if (require == 'sharedmem' and + not getattr(backend, 'supports_sharedmem', False)): + raise ValueError("Backend %s does not support shared memory" + % backend) + + if (batch_size == 'auto' or isinstance(batch_size, Integral) and + batch_size > 0): + self.batch_size = batch_size + else: + raise ValueError( + "batch_size must be 'auto' or a positive integer, got: %r" + % batch_size) + + self._backend = backend + self._output = None + self._jobs = list() + self._managed_backend = False + + # This lock is used coordinate the main thread of this process with + # the async callback thread of our the pool. + self._lock = threading.RLock() + + def __enter__(self): + self._managed_backend = True + self._initialize_backend() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._terminate_backend() + self._managed_backend = False + + def _initialize_backend(self): + """Build a process or thread pool and return the number of workers""" + try: + n_jobs = self._backend.configure(n_jobs=self.n_jobs, parallel=self, + **self._backend_args) + if self.timeout is not None and not self._backend.supports_timeout: + warnings.warn( + 'The backend class {!r} does not support timeout. ' + "You have set 'timeout={}' in Parallel but " + "the 'timeout' parameter will not be used.".format( + self._backend.__class__.__name__, + self.timeout)) + + except FallbackToBackend as e: + # Recursively initialize the backend in case of requested fallback. + self._backend = e.backend + n_jobs = self._initialize_backend() + + return n_jobs + + def _effective_n_jobs(self): + if self._backend: + return self._backend.effective_n_jobs(self.n_jobs) + return 1 + + def _terminate_backend(self): + if self._backend is not None: + self._backend.terminate() + + def _dispatch(self, batch): + """Queue the batch for computing, with or without multiprocessing + + WARNING: this method is not thread-safe: it should be only called + indirectly via dispatch_one_batch. + + """ + # If job.get() catches an exception, it closes the queue: + if self._aborting: + return + + self.n_dispatched_tasks += len(batch) + self.n_dispatched_batches += 1 + + dispatch_timestamp = time.time() + cb = BatchCompletionCallBack(dispatch_timestamp, len(batch), self) + with self._lock: + job_idx = len(self._jobs) + job = self._backend.apply_async(batch, callback=cb) + # A job can complete so quickly than its callback is + # called before we get here, causing self._jobs to + # grow. To ensure correct results ordering, .insert is + # used (rather than .append) in the following line + self._jobs.insert(job_idx, job) + + def dispatch_next(self): + """Dispatch more data for parallel processing + + This method is meant to be called concurrently by the multiprocessing + callback. We rely on the thread-safety of dispatch_one_batch to protect + against concurrent consumption of the unprotected iterator. + + """ + if not self.dispatch_one_batch(self._original_iterator): + self._iterating = False + self._original_iterator = None + + def dispatch_one_batch(self, iterator): + """Prefetch the tasks for the next batch and dispatch them. + + The effective size of the batch is computed here. + If there are no more jobs to dispatch, return False, else return True. + + The iterator consumption and dispatching is protected by the same + lock so calling this function should be thread safe. + + """ + if self.batch_size == 'auto': + batch_size = self._backend.compute_batch_size() + else: + # Fixed batch size strategy + batch_size = self.batch_size + + with self._lock: + # to ensure an even distribution of the workolad between workers, + # we look ahead in the original iterators more than batch_size + # tasks - However, we keep consuming only one batch at each + # dispatch_one_batch call. The extra tasks are stored in a local + # queue, _ready_batches, that is looked-up prior to re-consuming + # tasks from the origal iterator. + try: + tasks = self._ready_batches.get(block=False) + except queue.Empty: + # slice the iterator n_jobs * batchsize items at a time. If the + # slice returns less than that, then the current batchsize puts + # too much weight on a subset of workers, while other may end + # up starving. So in this case, re-scale the batch size + # accordingly to distribute evenly the last items between all + # workers. + n_jobs = self._cached_effective_n_jobs + big_batch_size = batch_size * n_jobs + + islice = list(itertools.islice(iterator, big_batch_size)) + if len(islice) == 0: + return False + elif (iterator is self._original_iterator + and len(islice) < big_batch_size): + # We reached the end of the original iterator (unless + # iterator is the ``pre_dispatch``-long initial slice of + # the original iterator) -- decrease the batch size to + # account for potential variance in the batches running + # time. + final_batch_size = max(1, len(islice) // (10 * n_jobs)) + else: + final_batch_size = max(1, len(islice) // n_jobs) + + # enqueue n_jobs batches in a local queue + for i in range(0, len(islice), final_batch_size): + tasks = BatchedCalls(islice[i:i + final_batch_size], + self._backend.get_nested_backend(), + self._reducer_callback, + self._pickle_cache) + self._ready_batches.put(tasks) + + # finally, get one task. + tasks = self._ready_batches.get(block=False) + if len(tasks) == 0: + # No more tasks available in the iterator: tell caller to stop. + return False + else: + self._dispatch(tasks) + return True + + def _print(self, msg, msg_args): + """Display the message on stout or stderr depending on verbosity""" + # XXX: Not using the logger framework: need to + # learn to use logger better. + if not self.verbose: + return + if self.verbose < 50: + writer = sys.stderr.write + else: + writer = sys.stdout.write + msg = msg % msg_args + writer('[%s]: %s\n' % (self, msg)) + + def print_progress(self): + """Display the process of the parallel execution only a fraction + of time, controlled by self.verbose. + """ + if not self.verbose: + return + elapsed_time = time.time() - self._start_time + + # Original job iterator becomes None once it has been fully + # consumed : at this point we know the total number of jobs and we are + # able to display an estimation of the remaining time based on already + # completed jobs. Otherwise, we simply display the number of completed + # tasks. + if self._original_iterator is not None: + if _verbosity_filter(self.n_dispatched_batches, self.verbose): + return + self._print('Done %3i tasks | elapsed: %s', + (self.n_completed_tasks, + short_format_time(elapsed_time), )) + else: + index = self.n_completed_tasks + # We are finished dispatching + total_tasks = self.n_dispatched_tasks + # We always display the first loop + if not index == 0: + # Display depending on the number of remaining items + # A message as soon as we finish dispatching, cursor is 0 + cursor = (total_tasks - index + 1 - + self._pre_dispatch_amount) + frequency = (total_tasks // self.verbose) + 1 + is_last_item = (index + 1 == total_tasks) + if (is_last_item or cursor % frequency): + return + remaining_time = (elapsed_time / index) * \ + (self.n_dispatched_tasks - index * 1.0) + # only display status if remaining time is greater or equal to 0 + self._print('Done %3i out of %3i | elapsed: %s remaining: %s', + (index, + total_tasks, + short_format_time(elapsed_time), + short_format_time(remaining_time), + )) + + def retrieve(self): + self._output = list() + while self._iterating or len(self._jobs) > 0: + if len(self._jobs) == 0: + # Wait for an async callback to dispatch new jobs + time.sleep(0.01) + continue + # We need to be careful: the job list can be filling up as + # we empty it and Python list are not thread-safe by default hence + # the use of the lock + with self._lock: + job = self._jobs.pop(0) + + try: + if getattr(self._backend, 'supports_timeout', False): + self._output.extend(job.get(timeout=self.timeout)) + else: + self._output.extend(job.get()) + + except BaseException as exception: + # Note: we catch any BaseException instead of just Exception + # instances to also include KeyboardInterrupt. + + # Stop dispatching any new job in the async callback thread + self._aborting = True + + # If the backend allows it, cancel or kill remaining running + # tasks without waiting for the results as we will raise + # the exception we got back to the caller instead of returning + # any result. + backend = self._backend + if (backend is not None and + hasattr(backend, 'abort_everything')): + # If the backend is managed externally we need to make sure + # to leave it in a working state to allow for future jobs + # scheduling. + ensure_ready = self._managed_backend + backend.abort_everything(ensure_ready=ensure_ready) + raise + + def __call__(self, iterable): + if self._jobs: + raise ValueError('This Parallel instance is already running') + # A flag used to abort the dispatching of jobs in case an + # exception is found + self._aborting = False + + if not self._managed_backend: + n_jobs = self._initialize_backend() + else: + n_jobs = self._effective_n_jobs() + + if isinstance(self._backend, LokyBackend): + # For the loky backend, we add a callback executed when reducing + # BatchCalls, that makes the loky executor use a temporary folder + # specific to this Parallel object when pickling temporary memmaps. + # This callback is necessary to ensure that several Parallel + # objects using the same resuable executor don't use the same + # temporary resources. + + def _batched_calls_reducer_callback(): + # Relevant implementation detail: the following lines, called + # when reducing BatchedCalls, are called in a thread-safe + # situation, meaning that the context of the temporary folder + # manager will not be changed in between the callback execution + # and the end of the BatchedCalls pickling. The reason is that + # pickling (the only place where set_current_context is used) + # is done from a single thread (the queue_feeder_thread). + self._backend._workers._temp_folder_manager.set_current_context( # noqa + self._id + ) + self._reducer_callback = _batched_calls_reducer_callback + + # self._effective_n_jobs should be called in the Parallel.__call__ + # thread only -- store its value in an attribute for further queries. + self._cached_effective_n_jobs = n_jobs + + backend_name = self._backend.__class__.__name__ + if n_jobs == 0: + raise RuntimeError("%s has no active worker." % backend_name) + + self._print("Using backend %s with %d concurrent workers.", + (backend_name, n_jobs)) + if hasattr(self._backend, 'start_call'): + self._backend.start_call() + iterator = iter(iterable) + pre_dispatch = self.pre_dispatch + + if pre_dispatch == 'all' or n_jobs == 1: + # prevent further dispatch via multiprocessing callback thread + self._original_iterator = None + self._pre_dispatch_amount = 0 + else: + self._original_iterator = iterator + if hasattr(pre_dispatch, 'endswith'): + pre_dispatch = eval(pre_dispatch) + self._pre_dispatch_amount = pre_dispatch = int(pre_dispatch) + + # The main thread will consume the first pre_dispatch items and + # the remaining items will later be lazily dispatched by async + # callbacks upon task completions. + + # TODO: this iterator should be batch_size * n_jobs + iterator = itertools.islice(iterator, self._pre_dispatch_amount) + + self._start_time = time.time() + self.n_dispatched_batches = 0 + self.n_dispatched_tasks = 0 + self.n_completed_tasks = 0 + # Use a caching dict for callables that are pickled with cloudpickle to + # improve performances. This cache is used only in the case of + # functions that are defined in the __main__ module, functions that are + # defined locally (inside another function) and lambda expressions. + self._pickle_cache = dict() + try: + # Only set self._iterating to True if at least a batch + # was dispatched. In particular this covers the edge + # case of Parallel used with an exhausted iterator. If + # self._original_iterator is None, then this means either + # that pre_dispatch == "all", n_jobs == 1 or that the first batch + # was very quick and its callback already dispatched all the + # remaining jobs. + self._iterating = False + if self.dispatch_one_batch(iterator): + self._iterating = self._original_iterator is not None + + while self.dispatch_one_batch(iterator): + pass + + if pre_dispatch == "all" or n_jobs == 1: + # The iterable was consumed all at once by the above for loop. + # No need to wait for async callbacks to trigger to + # consumption. + self._iterating = False + + with self._backend.retrieval_context(): + self.retrieve() + # Make sure that we get a last message telling us we are done + elapsed_time = time.time() - self._start_time + self._print('Done %3i out of %3i | elapsed: %s finished', + (len(self._output), len(self._output), + short_format_time(elapsed_time))) + finally: + if hasattr(self._backend, 'stop_call'): + self._backend.stop_call() + if not self._managed_backend: + self._terminate_backend() + self._jobs = list() + self._pickle_cache = None + output = self._output + self._output = None + return output + + def __repr__(self): + return '%s(n_jobs=%s)' % (self.__class__.__name__, self.n_jobs) diff --git a/minor_project/lib/python3.6/site-packages/joblib/pool.py b/minor_project/lib/python3.6/site-packages/joblib/pool.py new file mode 100644 index 0000000..8443899 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/pool.py @@ -0,0 +1,352 @@ +"""Custom implementation of multiprocessing.Pool with custom pickler. + +This module provides efficient ways of working with data stored in +shared memory with numpy.memmap arrays without inducing any memory +copy between the parent and child processes. + +This module should not be imported if multiprocessing is not +available as it implements subclasses of multiprocessing Pool +that uses a custom alternative to SimpleQueue. + +""" +# Author: Olivier Grisel +# Copyright: 2012, Olivier Grisel +# License: BSD 3 clause + +import copyreg +import sys +import warnings +from time import sleep + +try: + WindowsError +except NameError: + WindowsError = type(None) + +from pickle import Pickler + +from pickle import HIGHEST_PROTOCOL +from io import BytesIO + +from ._memmapping_reducer import get_memmapping_reducers +from ._memmapping_reducer import TemporaryResourcesManager +from ._multiprocessing_helpers import mp, assert_spawning + +# We need the class definition to derive from it, not the multiprocessing.Pool +# factory function +from multiprocessing.pool import Pool + +try: + import numpy as np +except ImportError: + np = None + + +############################################################################### +# Enable custom pickling in Pool queues + +class CustomizablePickler(Pickler): + """Pickler that accepts custom reducers. + + TODO python2_drop : can this be simplified ? + + HIGHEST_PROTOCOL is selected by default as this pickler is used + to pickle ephemeral datastructures for interprocess communication + hence no backward compatibility is required. + + `reducers` is expected to be a dictionary with key/values + being `(type, callable)` pairs where `callable` is a function that + give an instance of `type` will return a tuple `(constructor, + tuple_of_objects)` to rebuild an instance out of the pickled + `tuple_of_objects` as would return a `__reduce__` method. See the + standard library documentation on pickling for more details. + + """ + + # We override the pure Python pickler as its the only way to be able to + # customize the dispatch table without side effects in Python 2.7 + # to 3.2. For Python 3.3+ leverage the new dispatch_table + # feature from https://bugs.python.org/issue14166 that makes it possible + # to use the C implementation of the Pickler which is faster. + + def __init__(self, writer, reducers=None, protocol=HIGHEST_PROTOCOL): + Pickler.__init__(self, writer, protocol=protocol) + if reducers is None: + reducers = {} + if hasattr(Pickler, 'dispatch'): + # Make the dispatch registry an instance level attribute instead of + # a reference to the class dictionary under Python 2 + self.dispatch = Pickler.dispatch.copy() + else: + # Under Python 3 initialize the dispatch table with a copy of the + # default registry + self.dispatch_table = copyreg.dispatch_table.copy() + for type, reduce_func in reducers.items(): + self.register(type, reduce_func) + + def register(self, type, reduce_func): + """Attach a reducer function to a given type in the dispatch table.""" + if hasattr(Pickler, 'dispatch'): + # Python 2 pickler dispatching is not explicitly customizable. + # Let us use a closure to workaround this limitation. + def dispatcher(self, obj): + reduced = reduce_func(obj) + self.save_reduce(obj=obj, *reduced) + self.dispatch[type] = dispatcher + else: + self.dispatch_table[type] = reduce_func + + +class CustomizablePicklingQueue(object): + """Locked Pipe implementation that uses a customizable pickler. + + This class is an alternative to the multiprocessing implementation + of SimpleQueue in order to make it possible to pass custom + pickling reducers, for instance to avoid memory copy when passing + memory mapped datastructures. + + `reducers` is expected to be a dict with key / values being + `(type, callable)` pairs where `callable` is a function that, given an + instance of `type`, will return a tuple `(constructor, tuple_of_objects)` + to rebuild an instance out of the pickled `tuple_of_objects` as would + return a `__reduce__` method. + + See the standard library documentation on pickling for more details. + """ + + def __init__(self, context, reducers=None): + self._reducers = reducers + self._reader, self._writer = context.Pipe(duplex=False) + self._rlock = context.Lock() + if sys.platform == 'win32': + self._wlock = None + else: + self._wlock = context.Lock() + self._make_methods() + + def __getstate__(self): + assert_spawning(self) + return (self._reader, self._writer, self._rlock, self._wlock, + self._reducers) + + def __setstate__(self, state): + (self._reader, self._writer, self._rlock, self._wlock, + self._reducers) = state + self._make_methods() + + def empty(self): + return not self._reader.poll() + + def _make_methods(self): + self._recv = recv = self._reader.recv + racquire, rrelease = self._rlock.acquire, self._rlock.release + + def get(): + racquire() + try: + return recv() + finally: + rrelease() + + self.get = get + + if self._reducers: + def send(obj): + buffer = BytesIO() + CustomizablePickler(buffer, self._reducers).dump(obj) + self._writer.send_bytes(buffer.getvalue()) + self._send = send + else: + self._send = send = self._writer.send + if self._wlock is None: + # writes to a message oriented win32 pipe are atomic + self.put = send + else: + wlock_acquire, wlock_release = ( + self._wlock.acquire, self._wlock.release) + + def put(obj): + wlock_acquire() + try: + return send(obj) + finally: + wlock_release() + + self.put = put + + +class PicklingPool(Pool): + """Pool implementation with customizable pickling reducers. + + This is useful to control how data is shipped between processes + and makes it possible to use shared memory without useless + copies induces by the default pickling methods of the original + objects passed as arguments to dispatch. + + `forward_reducers` and `backward_reducers` are expected to be + dictionaries with key/values being `(type, callable)` pairs where + `callable` is a function that, given an instance of `type`, will return a + tuple `(constructor, tuple_of_objects)` to rebuild an instance out of the + pickled `tuple_of_objects` as would return a `__reduce__` method. + See the standard library documentation about pickling for more details. + + """ + + def __init__(self, processes=None, forward_reducers=None, + backward_reducers=None, **kwargs): + if forward_reducers is None: + forward_reducers = dict() + if backward_reducers is None: + backward_reducers = dict() + self._forward_reducers = forward_reducers + self._backward_reducers = backward_reducers + poolargs = dict(processes=processes) + poolargs.update(kwargs) + super(PicklingPool, self).__init__(**poolargs) + + def _setup_queues(self): + context = getattr(self, '_ctx', mp) + self._inqueue = CustomizablePicklingQueue(context, + self._forward_reducers) + self._outqueue = CustomizablePicklingQueue(context, + self._backward_reducers) + self._quick_put = self._inqueue._send + self._quick_get = self._outqueue._recv + + +class MemmappingPool(PicklingPool): + """Process pool that shares large arrays to avoid memory copy. + + This drop-in replacement for `multiprocessing.pool.Pool` makes + it possible to work efficiently with shared memory in a numpy + context. + + Existing instances of numpy.memmap are preserved: the child + suprocesses will have access to the same shared memory in the + original mode except for the 'w+' mode that is automatically + transformed as 'r+' to avoid zeroing the original data upon + instantiation. + + Furthermore large arrays from the parent process are automatically + dumped to a temporary folder on the filesystem such as child + processes to access their content via memmapping (file system + backed shared memory). + + Note: it is important to call the terminate method to collect + the temporary folder used by the pool. + + Parameters + ---------- + processes: int, optional + Number of worker processes running concurrently in the pool. + initializer: callable, optional + Callable executed on worker process creation. + initargs: tuple, optional + Arguments passed to the initializer callable. + temp_folder: (str, callable) optional + If str: + Folder to be used by the pool for memmapping large arrays + for sharing memory with worker processes. If None, this will try in + order: + - a folder pointed by the JOBLIB_TEMP_FOLDER environment variable, + - /dev/shm if the folder exists and is writable: this is a RAMdisk + filesystem available by default on modern Linux distributions, + - the default system temporary folder that can be overridden + with TMP, TMPDIR or TEMP environment variables, typically /tmp + under Unix operating systems. + if callable: + An callable in charge of dynamically resolving a temporary folder + for memmapping large arrays. + max_nbytes int or None, optional, 1e6 by default + Threshold on the size of arrays passed to the workers that + triggers automated memory mapping in temp_folder. + Use None to disable memmapping of large arrays. + mmap_mode: {'r+', 'r', 'w+', 'c'} + Memmapping mode for numpy arrays passed to workers. + See 'max_nbytes' parameter documentation for more details. + forward_reducers: dictionary, optional + Reducers used to pickle objects passed from master to worker + processes: see below. + backward_reducers: dictionary, optional + Reducers used to pickle return values from workers back to the + master process. + verbose: int, optional + Make it possible to monitor how the communication of numpy arrays + with the subprocess is handled (pickling or memmapping) + prewarm: bool or str, optional, "auto" by default. + If True, force a read on newly memmapped array to make sure that OS + pre-cache it in memory. This can be useful to avoid concurrent disk + access when the same data array is passed to different worker + processes. If "auto" (by default), prewarm is set to True, unless the + Linux shared memory partition /dev/shm is available and used as temp + folder. + + `forward_reducers` and `backward_reducers` are expected to be + dictionaries with key/values being `(type, callable)` pairs where + `callable` is a function that give an instance of `type` will return + a tuple `(constructor, tuple_of_objects)` to rebuild an instance out + of the pickled `tuple_of_objects` as would return a `__reduce__` + method. See the standard library documentation on pickling for more + details. + + """ + + def __init__(self, processes=None, temp_folder=None, max_nbytes=1e6, + mmap_mode='r', forward_reducers=None, backward_reducers=None, + verbose=0, context_id=None, prewarm=False, **kwargs): + + if context_id is not None: + warnings.warn('context_id is deprecated and ignored in joblib' + ' 0.9.4 and will be removed in 0.11', + DeprecationWarning) + + manager = TemporaryResourcesManager(temp_folder) + self._temp_folder_manager = manager + + # The usage of a temp_folder_resolver over a simple temp_folder is + # superfluous for multiprocessing pools, as they don't get reused, see + # get_memmapping_executor for more details. We still use it for code + # simplicity. + forward_reducers, backward_reducers = \ + get_memmapping_reducers( + temp_folder_resolver=manager.resolve_temp_folder_name, + max_nbytes=max_nbytes, mmap_mode=mmap_mode, + forward_reducers=forward_reducers, + backward_reducers=backward_reducers, verbose=verbose, + unlink_on_gc_collect=False, prewarm=prewarm) + + poolargs = dict( + processes=processes, + forward_reducers=forward_reducers, + backward_reducers=backward_reducers) + poolargs.update(kwargs) + super(MemmappingPool, self).__init__(**poolargs) + + def terminate(self): + n_retries = 10 + for i in range(n_retries): + try: + super(MemmappingPool, self).terminate() + break + except OSError as e: + if isinstance(e, WindowsError): + # Workaround occasional "[Error 5] Access is denied" issue + # when trying to terminate a process under windows. + sleep(0.1) + if i + 1 == n_retries: + warnings.warn("Failed to terminate worker processes in" + " multiprocessing pool: %r" % e) + self._temp_folder_manager._unlink_temporary_resources() + + @property + def _temp_folder(self): + # Legacy property in tests. could be removed if we refactored the + # memmapping tests. SHOULD ONLY BE USED IN TESTS! + # We cache this property because it is called late in the tests - at + # this point, all context have been unregistered, and + # resolve_temp_folder_name raises an error. + if getattr(self, '_cached_temp_folder', None) is not None: + return self._cached_temp_folder + else: + self._cached_temp_folder = self._temp_folder_manager.resolve_temp_folder_name() # noqa + return self._cached_temp_folder diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/test/__init__.py new file mode 100644 index 0000000..401de78 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/__init__.py @@ -0,0 +1,2 @@ +from joblib.test import test_memory +from joblib.test import test_hashing diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..d5deb60 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/common.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/common.cpython-36.pyc new file mode 100644 index 0000000..82da1ff Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/common.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_backports.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_backports.cpython-36.pyc new file mode 100644 index 0000000..f7338fe Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_backports.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_dask.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_dask.cpython-36.pyc new file mode 100644 index 0000000..463c118 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_dask.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_deprecated_objects.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_deprecated_objects.cpython-36.pyc new file mode 100644 index 0000000..3ea604e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_deprecated_objects.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_disk.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_disk.cpython-36.pyc new file mode 100644 index 0000000..bab8544 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_disk.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_format_stack.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_format_stack.cpython-36.pyc new file mode 100644 index 0000000..fcf2707 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_format_stack.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_func_inspect.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_func_inspect.cpython-36.pyc new file mode 100644 index 0000000..cf03cf6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_func_inspect.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_func_inspect_special_encoding.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_func_inspect_special_encoding.cpython-36.pyc new file mode 100644 index 0000000..c150089 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_func_inspect_special_encoding.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_hashing.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_hashing.cpython-36.pyc new file mode 100644 index 0000000..1bfa6e1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_hashing.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_init.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_init.cpython-36.pyc new file mode 100644 index 0000000..ec6acd9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_init.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_logger.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_logger.cpython-36.pyc new file mode 100644 index 0000000..8954d94 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_logger.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_memmapping.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_memmapping.cpython-36.pyc new file mode 100644 index 0000000..8698f74 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_memmapping.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_memory.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_memory.cpython-36.pyc new file mode 100644 index 0000000..82c8441 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_memory.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_module.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_module.cpython-36.pyc new file mode 100644 index 0000000..2cc9e46 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_module.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_my_exceptions.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_my_exceptions.cpython-36.pyc new file mode 100644 index 0000000..b37eb2f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_my_exceptions.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle.cpython-36.pyc new file mode 100644 index 0000000..1204c79 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle_compat.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle_compat.cpython-36.pyc new file mode 100644 index 0000000..6793bc9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle_compat.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle_utils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle_utils.cpython-36.pyc new file mode 100644 index 0000000..2666c5c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_numpy_pickle_utils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_parallel.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_parallel.cpython-36.pyc new file mode 100644 index 0000000..72eca65 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_parallel.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_store_backends.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_store_backends.cpython-36.pyc new file mode 100644 index 0000000..1cf0f13 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_store_backends.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_testing.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_testing.cpython-36.pyc new file mode 100644 index 0000000..0880b71 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/test_testing.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/testutils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/testutils.cpython-36.pyc new file mode 100644 index 0000000..bb91d32 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/__pycache__/testutils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/common.py b/minor_project/lib/python3.6/site-packages/joblib/test/common.py new file mode 100644 index 0000000..6187894 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/common.py @@ -0,0 +1,114 @@ +""" +Small utilities for testing. +""" +import threading +import signal +import time +import os +import sys +import gc + +from joblib._multiprocessing_helpers import mp +from joblib.testing import SkipTest, skipif + +try: + import lz4 +except ImportError: + lz4 = None + +# A decorator to run tests only when numpy is available +try: + import numpy as np + + def with_numpy(func): + """A decorator to skip tests requiring numpy.""" + return func + +except ImportError: + def with_numpy(func): + """A decorator to skip tests requiring numpy.""" + def my_func(): + raise SkipTest('Test requires numpy') + return my_func + np = None + +# TODO: Turn this back on after refactoring yield based tests in test_hashing +# with_numpy = skipif(not np, reason='Test requires numpy.') + +# we use memory_profiler library for memory consumption checks +try: + from memory_profiler import memory_usage + + def with_memory_profiler(func): + """A decorator to skip tests requiring memory_profiler.""" + return func + + def memory_used(func, *args, **kwargs): + """Compute memory usage when executing func.""" + gc.collect() + mem_use = memory_usage((func, args, kwargs), interval=.001) + return max(mem_use) - min(mem_use) + +except ImportError: + def with_memory_profiler(func): + """A decorator to skip tests requiring memory_profiler.""" + def dummy_func(): + raise SkipTest('Test requires memory_profiler.') + return dummy_func + + memory_usage = memory_used = None + +# A utility to kill the test runner in case a multiprocessing assumption +# triggers an infinite wait on a pipe by the master process for one of its +# failed workers + +_KILLER_THREADS = dict() + + +def setup_autokill(module_name, timeout=30): + """Timeout based suiciding thread to kill the test runner process + + If some subprocess dies in an unexpected way we don't want the + parent process to block indefinitely. + """ + if "NO_AUTOKILL" in os.environ or "--pdb" in sys.argv: + # Do not install the autokiller + return + + # Renew any previous contract under that name by first cancelling the + # previous version (that should normally not happen in practice) + teardown_autokill(module_name) + + def autokill(): + pid = os.getpid() + print("Timeout exceeded: terminating stalled process: %d" % pid) + os.kill(pid, signal.SIGTERM) + + # If were are still there ask the OS to kill ourself for real + time.sleep(0.5) + print("Timeout exceeded: killing stalled process: %d" % pid) + os.kill(pid, signal.SIGKILL) + + _KILLER_THREADS[module_name] = t = threading.Timer(timeout, autokill) + t.start() + + +def teardown_autokill(module_name): + """Cancel a previously started killer thread""" + killer = _KILLER_THREADS.get(module_name) + if killer is not None: + killer.cancel() + + +with_multiprocessing = skipif( + mp is None, reason='Needs multiprocessing to run.') + + +with_dev_shm = skipif( + not os.path.exists('/dev/shm'), + reason='This test requires a large /dev/shm shared memory fs.') + +with_lz4 = skipif(lz4 is None, reason='Needs lz4 compression to run') + +without_lz4 = skipif( + lz4 is not None, reason='Needs lz4 not being installed to run') diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/data/__init__.py b/minor_project/lib/python3.6/site-packages/joblib/test/data/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/data/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/data/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..4f7a43c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/data/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/data/__pycache__/create_numpy_pickle.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/joblib/test/data/__pycache__/create_numpy_pickle.cpython-36.pyc new file mode 100644 index 0000000..a97f274 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/joblib/test/data/__pycache__/create_numpy_pickle.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/data/create_numpy_pickle.py b/minor_project/lib/python3.6/site-packages/joblib/test/data/create_numpy_pickle.py new file mode 100644 index 0000000..0128f91 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/data/create_numpy_pickle.py @@ -0,0 +1,94 @@ +""" +This script is used to generate test data for joblib/test/test_numpy_pickle.py +""" + +import sys +import re + +# pytest needs to be able to import this module even when numpy is +# not installed +try: + import numpy as np +except ImportError: + np = None + +import joblib + + +def get_joblib_version(joblib_version=joblib.__version__): + """Normalize joblib version by removing suffix. + + >>> get_joblib_version('0.8.4') + '0.8.4' + >>> get_joblib_version('0.8.4b1') + '0.8.4' + >>> get_joblib_version('0.9.dev0') + '0.9' + """ + matches = [re.match(r'(\d+).*', each) + for each in joblib_version.split('.')] + return '.'.join([m.group(1) for m in matches if m is not None]) + + +def write_test_pickle(to_pickle, args): + kwargs = {} + compress = args.compress + method = args.method + joblib_version = get_joblib_version() + py_version = '{0[0]}{0[1]}'.format(sys.version_info) + numpy_version = ''.join(np.__version__.split('.')[:2]) + + # The game here is to generate the right filename according to the options. + body = '_compressed' if (compress and method == 'zlib') else '' + if compress: + if method == 'zlib': + kwargs['compress'] = True + extension = '.gz' + else: + kwargs['compress'] = (method, 3) + extension = '.pkl.{}'.format(method) + if args.cache_size: + kwargs['cache_size'] = 0 + body += '_cache_size' + else: + extension = '.pkl' + + pickle_filename = 'joblib_{}{}_pickle_py{}_np{}{}'.format( + joblib_version, body, py_version, numpy_version, extension) + + try: + joblib.dump(to_pickle, pickle_filename, **kwargs) + except Exception as e: + # With old python version (=< 3.3.), we can arrive there when + # dumping compressed pickle with LzmaFile. + print("Error: cannot generate file '{}' with arguments '{}'. " + "Error was: {}".format(pickle_filename, kwargs, e)) + else: + print("File '{}' generated successfuly.".format(pickle_filename)) + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser(description="Joblib pickle data " + "generator.") + parser.add_argument('--cache_size', action="store_true", + help="Force creation of companion numpy " + "files for pickled arrays.") + parser.add_argument('--compress', action="store_true", + help="Generate compress pickles.") + parser.add_argument('--method', type=str, default='zlib', + choices=['zlib', 'gzip', 'bz2', 'xz', 'lzma', 'lz4'], + help="Set compression method.") + # We need to be specific about dtypes in particular endianness + # because the pickles can be generated on one architecture and + # the tests run on another one. See + # https://github.com/joblib/joblib/issues/279. + to_pickle = [np.arange(5, dtype=np.dtype(' 10 + + +def random2(): + return random() + + +def test_dont_assume_function_purity(loop): + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as client: # noqa: F841 + with parallel_backend('dask') as (ba, _): + x, y = Parallel()(delayed(random2)() for i in range(2)) + assert x != y + + +@pytest.mark.parametrize("mixed", [True, False]) +def test_dask_funcname(loop, mixed): + from joblib._dask import Batch + if not mixed: + tasks = [delayed(inc)(i) for i in range(4)] + batch_repr = 'batch_of_inc_4_calls' + else: + tasks = [ + delayed(abs)(i) if i % 2 else delayed(inc)(i) for i in range(4) + ] + batch_repr = 'mixed_batch_of_inc_4_calls' + + assert repr(Batch(tasks)) == batch_repr + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as client: + with parallel_backend('dask') as (ba, _): + _ = Parallel(batch_size=2, pre_dispatch='all')(tasks) + + def f(dask_scheduler): + return list(dask_scheduler.transition_log) + batch_repr = batch_repr.replace('4', '2') + log = client.run_on_scheduler(f) + assert all('batch_of_inc' in tup[0] for tup in log) + + +def test_no_undesired_distributed_cache_hit(loop): + # Dask has a pickle cache for callables that are called many times. Because + # the dask backends used to wrapp both the functions and the arguments + # under instances of the Batch callable class this caching mechanism could + # lead to bugs as described in: https://github.com/joblib/joblib/pull/1055 + # The joblib-dask backend has been refactored to avoid bundling the + # arguments as an attribute of the Batch instance to avoid this problem. + # This test serves as non-regression problem. + + # Use a large number of input arguments to give the AutoBatchingMixin + # enough tasks to kick-in. + lists = [[] for _ in range(100)] + np = pytest.importorskip('numpy') + X = np.arange(int(1e6)) + + def isolated_operation(list_, X=None): + list_.append(uuid4().hex) + return list_ + + cluster = LocalCluster(n_workers=1, threads_per_worker=2) + client = Client(cluster) + try: + with parallel_backend('dask') as (ba, _): + # dispatches joblib.parallel.BatchedCalls + res = Parallel()( + delayed(isolated_operation)(list_) for list_ in lists + ) + + # The original arguments should not have been mutated as the mutation + # happens in the dask worker process. + assert lists == [[] for _ in range(100)] + + # Here we did not pass any large numpy array as argument to + # isolated_operation so no scattering event should happen under the + # hood. + counts = count_events('receive-from-scatter', client) + assert sum(counts.values()) == 0 + assert all([len(r) == 1 for r in res]) + + with parallel_backend('dask') as (ba, _): + # Append a large array which will be scattered by dask, and + # dispatch joblib._dask.Batch + res = Parallel()( + delayed(isolated_operation)(list_, X=X) for list_ in lists + ) + + # This time, auto-scattering should have kicked it. + counts = count_events('receive-from-scatter', client) + assert sum(counts.values()) > 0 + assert all([len(r) == 1 for r in res]) + finally: + client.close() + cluster.close() + + +class CountSerialized(object): + def __init__(self, x): + self.x = x + self.count = 0 + + def __add__(self, other): + return self.x + getattr(other, 'x', other) + + __radd__ = __add__ + + def __reduce__(self): + self.count += 1 + return (CountSerialized, (self.x,)) + + +def add5(a, b, c, d=0, e=0): + return a + b + c + d + e + + +def test_manual_scatter(loop): + x = CountSerialized(1) + y = CountSerialized(2) + z = CountSerialized(3) + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as client: # noqa: F841 + with parallel_backend('dask', scatter=[x, y]) as (ba, _): + f = delayed(add5) + tasks = [f(x, y, z, d=4, e=5), + f(x, z, y, d=5, e=4), + f(y, x, z, d=x, e=5), + f(z, z, x, d=z, e=y)] + expected = [func(*args, **kwargs) + for func, args, kwargs in tasks] + results = Parallel()(tasks) + + # Scatter must take a list/tuple + with pytest.raises(TypeError): + with parallel_backend('dask', loop=loop, scatter=1): + pass + + assert results == expected + + # Scattered variables only serialized once + assert x.count == 1 + assert y.count == 1 + # Depending on the version of distributed, the unscattered z variable + # is either pickled 4 or 6 times, possibly because of the memoization + # of objects that appear several times in the arguments of a delayed + # task. + assert z.count in (4, 6) + + +# When the same IOLoop is used for multiple clients in a row, use +# loop_in_thread instead of loop to prevent the Client from closing it. See +# dask/distributed #4112 +def test_auto_scatter(loop_in_thread): + np = pytest.importorskip('numpy') + data1 = np.ones(int(1e4), dtype=np.uint8) + data2 = np.ones(int(1e4), dtype=np.uint8) + data_to_process = ([data1] * 3) + ([data2] * 3) + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop_in_thread) as client: + with parallel_backend('dask') as (ba, _): + # Passing the same data as arg and kwarg triggers a single + # scatter operation whose result is reused. + Parallel()(delayed(noop)(data, data, i, opt=data) + for i, data in enumerate(data_to_process)) + # By default large array are automatically scattered with + # broadcast=1 which means that one worker must directly receive + # the data from the scatter operation once. + counts = count_events('receive-from-scatter', client) + # assert counts[a['address']] + counts[b['address']] == 2 + assert 2 <= counts[a['address']] + counts[b['address']] <= 4 + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop_in_thread) as client: + with parallel_backend('dask') as (ba, _): + Parallel()(delayed(noop)(data1[:3], i) for i in range(5)) + # Small arrays are passed within the task definition without going + # through a scatter operation. + counts = count_events('receive-from-scatter', client) + assert counts[a['address']] == 0 + assert counts[b['address']] == 0 + + +@pytest.mark.parametrize("retry_no", list(range(2))) +def test_nested_scatter(loop, retry_no): + + np = pytest.importorskip('numpy') + + NUM_INNER_TASKS = 10 + NUM_OUTER_TASKS = 10 + + def my_sum(x, i, j): + return np.sum(x) + + def outer_function_joblib(array, i): + client = get_client() # noqa + with parallel_backend("dask"): + results = Parallel()( + delayed(my_sum)(array[j:], i, j) for j in range( + NUM_INNER_TASKS) + ) + return sum(results) + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as _: + with parallel_backend("dask"): + my_array = np.ones(10000) + _ = Parallel()( + delayed(outer_function_joblib)( + my_array[i:], i) for i in range(NUM_OUTER_TASKS) + ) + + +def test_nested_backend_context_manager(loop_in_thread): + def get_nested_pids(): + pids = set(Parallel(n_jobs=2)(delayed(os.getpid)() for _ in range(2))) + pids |= set(Parallel(n_jobs=2)(delayed(os.getpid)() for _ in range(2))) + return pids + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop_in_thread) as client: + with parallel_backend('dask') as (ba, _): + pid_groups = Parallel(n_jobs=2)( + delayed(get_nested_pids)() + for _ in range(10) + ) + for pid_group in pid_groups: + assert len(set(pid_group)) <= 2 + + # No deadlocks + with Client(s['address'], loop=loop_in_thread) as client: # noqa: F841 + with parallel_backend('dask') as (ba, _): + pid_groups = Parallel(n_jobs=2)( + delayed(get_nested_pids)() + for _ in range(10) + ) + for pid_group in pid_groups: + assert len(set(pid_group)) <= 2 + + +def test_nested_backend_context_manager_implicit_n_jobs(loop): + # Check that Parallel with no explicit n_jobs value automatically selects + # all the dask workers, including in nested calls. + + def _backend_type(p): + return p._backend.__class__.__name__ + + def get_nested_implicit_n_jobs(): + with Parallel() as p: + return _backend_type(p), p.n_jobs + + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as client: # noqa: F841 + with parallel_backend('dask') as (ba, _): + with Parallel() as p: + assert _backend_type(p) == "DaskDistributedBackend" + assert p.n_jobs == -1 + all_nested_n_jobs = p( + delayed(get_nested_implicit_n_jobs)() + for _ in range(2) + ) + for backend_type, nested_n_jobs in all_nested_n_jobs: + assert backend_type == "DaskDistributedBackend" + assert nested_n_jobs == -1 + + +def test_errors(loop): + with pytest.raises(ValueError) as info: + with parallel_backend('dask'): + pass + + assert "create a dask client" in str(info.value).lower() + + +def test_correct_nested_backend(loop): + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as client: # noqa: F841 + # No requirement, should be us + with parallel_backend('dask') as (ba, _): + result = Parallel(n_jobs=2)( + delayed(outer)(nested_require=None) for _ in range(1)) + assert isinstance(result[0][0][0], DaskDistributedBackend) + + # Require threads, should be threading + with parallel_backend('dask') as (ba, _): + result = Parallel(n_jobs=2)( + delayed(outer)(nested_require='sharedmem') + for _ in range(1)) + assert isinstance(result[0][0][0], ThreadingBackend) + + +def outer(nested_require): + return Parallel(n_jobs=2, prefer='threads')( + delayed(middle)(nested_require) for _ in range(1) + ) + + +def middle(require): + return Parallel(n_jobs=2, require=require)( + delayed(inner)() for _ in range(1) + ) + + +def inner(): + return Parallel()._backend + + +def test_secede_with_no_processes(loop): + # https://github.com/dask/distributed/issues/1775 + with Client(loop=loop, processes=False, set_as_default=True): + with parallel_backend('dask'): + Parallel(n_jobs=4)(delayed(id)(i) for i in range(2)) + + +def _worker_address(_): + from distributed import get_worker + return get_worker().address + + +def test_dask_backend_keywords(loop): + with cluster() as (s, [a, b]): + with Client(s['address'], loop=loop) as client: # noqa: F841 + with parallel_backend('dask', workers=a['address']) as (ba, _): + seq = Parallel()( + delayed(_worker_address)(i) for i in range(10)) + assert seq == [a['address']] * 10 + + with parallel_backend('dask', workers=b['address']) as (ba, _): + seq = Parallel()( + delayed(_worker_address)(i) for i in range(10)) + assert seq == [b['address']] * 10 + + +def test_cleanup(loop): + with Client(processes=False, loop=loop) as client: + with parallel_backend('dask'): + Parallel()(delayed(inc)(i) for i in range(10)) + + start = time() + while client.cluster.scheduler.tasks: + sleep(0.01) + assert time() < start + 5 + + assert not client.futures + + +@pytest.mark.parametrize("cluster_strategy", ["adaptive", "late_scaling"]) +@pytest.mark.skipif( + distributed.__version__ <= '2.1.1' and distributed.__version__ >= '1.28.0', + reason="distributed bug - https://github.com/dask/distributed/pull/2841") +def test_wait_for_workers(cluster_strategy): + cluster = LocalCluster(n_workers=0, processes=False, threads_per_worker=2) + client = Client(cluster) + if cluster_strategy == "adaptive": + cluster.adapt(minimum=0, maximum=2) + elif cluster_strategy == "late_scaling": + # Tell the cluster to start workers but this is a non-blocking call + # and new workers might take time to connect. In this case the Parallel + # call should wait for at least one worker to come up before starting + # to schedule work. + cluster.scale(2) + try: + with parallel_backend('dask'): + # The following should wait a bit for at least one worker to + # become available. + Parallel()(delayed(inc)(i) for i in range(10)) + finally: + client.close() + cluster.close() + + +def test_wait_for_workers_timeout(): + # Start a cluster with 0 worker: + cluster = LocalCluster(n_workers=0, processes=False, threads_per_worker=2) + client = Client(cluster) + try: + with parallel_backend('dask', wait_for_workers_timeout=0.1): + # Short timeout: DaskDistributedBackend + msg = "DaskDistributedBackend has no worker after 0.1 seconds." + with pytest.raises(TimeoutError, match=msg): + Parallel()(delayed(inc)(i) for i in range(10)) + + with parallel_backend('dask', wait_for_workers_timeout=0): + # No timeout: fallback to generic joblib failure: + msg = "DaskDistributedBackend has no active worker" + with pytest.raises(RuntimeError, match=msg): + Parallel()(delayed(inc)(i) for i in range(10)) + finally: + client.close() + cluster.close() diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_deprecated_objects.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_deprecated_objects.py new file mode 100644 index 0000000..d561483 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_deprecated_objects.py @@ -0,0 +1,31 @@ +""" +Tests making sure that deprecated objects properly raise a deprecation warning +when imported/created. +""" +import sys + +import pytest + +from joblib.my_exceptions import _deprecated_names as _deprecated_exceptions +from joblib.format_stack import _deprecated_names as _deprecated_format_utils + + +@pytest.mark.xfail(sys.version_info < (3, 7), reason="no module-level getattr") +def test_deprecated_joblib_exceptions(): + assert 'JoblibException' in _deprecated_exceptions + for name in _deprecated_exceptions: + msg = ('{} is deprecated and will be removed from joblib in ' + '0.16'.format(name)) + with pytest.warns(DeprecationWarning, match=msg): + exec('from joblib.my_exceptions import {}'.format(name)) + + +@pytest.mark.xfail(sys.version_info < (3, 7), reason="no module-level getattr") +def test_deprecated_formatting_utilities(capsys): + assert 'safe_repr' in _deprecated_format_utils + assert 'eq_repr' in _deprecated_format_utils + for name in _deprecated_format_utils: + msg = ('{} is deprecated and will be removed from joblib in ' + '0.16'.format(name)) + with pytest.warns(DeprecationWarning, match=msg): + exec('from joblib.format_stack import {}'.format(name)) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_disk.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_disk.py new file mode 100644 index 0000000..b825a8b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_disk.py @@ -0,0 +1,71 @@ +""" +Unit tests for the disk utilities. +""" + +# Authors: Gael Varoquaux +# Lars Buitinck +# Copyright (c) 2010 Gael Varoquaux +# License: BSD Style, 3 clauses. + +from __future__ import with_statement +import array +import os + +from joblib.disk import disk_used, memstr_to_bytes, mkdirp, rm_subdirs +from joblib.testing import parametrize, raises + +############################################################################### + + +def test_disk_used(tmpdir): + cachedir = tmpdir.strpath + # Not write a file that is 1M big in this directory, and check the + # size. The reason we use such a big file is that it makes us robust + # to errors due to block allocation. + a = array.array('i') + sizeof_i = a.itemsize + target_size = 1024 + n = int(target_size * 1024 / sizeof_i) + a = array.array('i', n * (1,)) + with open(os.path.join(cachedir, 'test'), 'wb') as output: + a.tofile(output) + assert disk_used(cachedir) >= target_size + assert disk_used(cachedir) < target_size + 12 + + +@parametrize('text,value', + [('80G', 80 * 1024 ** 3), + ('1.4M', int(1.4 * 1024 ** 2)), + ('120M', 120 * 1024 ** 2), + ('53K', 53 * 1024)]) +def test_memstr_to_bytes(text, value): + assert memstr_to_bytes(text) == value + + +@parametrize('text,exception,regex', + [('fooG', ValueError, r'Invalid literal for size.*fooG.*'), + ('1.4N', ValueError, r'Invalid literal for size.*1.4N.*')]) +def test_memstr_to_bytes_exception(text, exception, regex): + with raises(exception) as excinfo: + memstr_to_bytes(text) + assert excinfo.match(regex) + + +def test_mkdirp(tmpdir): + mkdirp(os.path.join(tmpdir.strpath, 'ham')) + mkdirp(os.path.join(tmpdir.strpath, 'ham')) + mkdirp(os.path.join(tmpdir.strpath, 'spam', 'spam')) + + # Not all OSErrors are ignored + with raises(OSError): + mkdirp('') + + +def test_rm_subdirs(tmpdir): + sub_path = os.path.join(tmpdir.strpath, "am", "stram") + full_path = os.path.join(sub_path, "gram") + mkdirp(os.path.join(full_path)) + + rm_subdirs(sub_path) + assert os.path.exists(sub_path) + assert not os.path.exists(full_path) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_format_stack.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_format_stack.py new file mode 100644 index 0000000..09fa26a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_format_stack.py @@ -0,0 +1,129 @@ +""" +Unit tests for the stack formatting utilities +""" + +# Author: Gael Varoquaux +# Copyright (c) 2010 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import imp +import os +import re +import sys +import pytest + +from joblib.format_stack import safe_repr, _fixed_getframes, format_records +from joblib.format_stack import format_exc +from joblib.test.common import with_numpy, np + +############################################################################### + + +class Vicious(object): + def __repr__(self): + raise ValueError + + +def test_safe_repr(): + safe_repr(Vicious()) + + +def _change_file_extensions_to_pyc(record): + _1, filename, _2, _3, _4, _5 = record + if filename.endswith('.py'): + filename += 'c' + return _1, filename, _2, _3, _4, _5 + + +def _raise_exception(a, b): + """Function that raises with a non trivial call stack + """ + def helper(a, b): + raise ValueError('Nope, this can not work') + + helper(a, b) + + +def test_format_records(): + try: + _raise_exception('a', 42) + except ValueError: + etb = sys.exc_info()[2] + records = _fixed_getframes(etb) + + # Modify filenames in traceback records from .py to .pyc + pyc_records = [_change_file_extensions_to_pyc(record) + for record in records] + + formatted_records = format_records(pyc_records) + + # Check that the .py file and not the .pyc one is listed in + # the traceback + for fmt_rec in formatted_records: + assert 'test_format_stack.py in' in fmt_rec + + # Check exception stack + arrow_regex = r'^-+>\s+\d+\s+' + assert re.search(arrow_regex + r"_raise_exception\('a', 42\)", + formatted_records[0], + re.MULTILINE) + assert re.search(arrow_regex + r'helper\(a, b\)', + formatted_records[1], + re.MULTILINE) + assert "a = 'a'" in formatted_records[1] + assert 'b = 42' in formatted_records[1] + assert re.search(arrow_regex + + r"raise ValueError\('Nope, this can not work'\)", + formatted_records[2], + re.MULTILINE) + + +def test_format_records_file_with_less_lines_than_context(tmpdir): + # See https://github.com/joblib/joblib/issues/420 + filename = os.path.join(tmpdir.strpath, 'small_file.py') + code_lines = ['def func():', ' 1/0'] + code = '\n'.join(code_lines) + with open(filename, 'w') as f: + f.write(code) + + small_file = imp.load_source('small_file', filename) + if not hasattr(small_file, 'func'): + pytest.skip("PyPy bug?") + try: + small_file.func() + except ZeroDivisionError: + etb = sys.exc_info()[2] + + records = _fixed_getframes(etb, context=10) + # Check that if context is bigger than the number of lines in + # the file you do not get padding + frame, tb_filename, line, func_name, context, _ = records[-1] + assert [l.rstrip() for l in context] == code_lines + + formatted_records = format_records(records) + # 2 lines for header in the traceback: lines of ...... + + # filename with function + len_header = 2 + nb_lines_formatted_records = len(formatted_records[1].splitlines()) + assert (nb_lines_formatted_records == len_header + len(code_lines)) + # Check exception stack + arrow_regex = r'^-+>\s+\d+\s+' + assert re.search(arrow_regex + r'1/0', + formatted_records[1], + re.MULTILINE) + + +@with_numpy +def test_format_exc_with_compiled_code(): + # Trying to tokenize compiled C code raise SyntaxError. + # See https://github.com/joblib/joblib/issues/101 for more details. + try: + np.random.uniform('invalid_value') + except Exception: + exc_type, exc_value, exc_traceback = sys.exc_info() + formatted_exc = format_exc(exc_type, exc_value, + exc_traceback, context=10) + # The name of the extension can be something like + # mtrand.cpython-33m.so + pattern = r'mtrand[a-z0-9._-]*\.(so|pyd)' + assert re.search(pattern, formatted_exc) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_func_inspect.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_func_inspect.py new file mode 100644 index 0000000..b3b7b7b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_func_inspect.py @@ -0,0 +1,290 @@ +""" +Test the func_inspect module. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import functools + +from joblib.func_inspect import filter_args, get_func_name, get_func_code +from joblib.func_inspect import _clean_win_chars, format_signature +from joblib.memory import Memory +from joblib.test.common import with_numpy +from joblib.testing import fixture, parametrize, raises + + +############################################################################### +# Module-level functions and fixture, for tests +def f(x, y=0): + pass + + +def g(x): + pass + + +def h(x, y=0, *args, **kwargs): + pass + + +def i(x=1): + pass + + +def j(x, y, **kwargs): + pass + + +def k(*args, **kwargs): + pass + + +def m1(x, *, y): + pass + + +def m2(x, *, y, z=3): + pass + + +@fixture(scope='module') +def cached_func(tmpdir_factory): + # Create a Memory object to test decorated functions. + # We should be careful not to call the decorated functions, so that + # cache directories are not created in the temp dir. + cachedir = tmpdir_factory.mktemp("joblib_test_func_inspect") + mem = Memory(cachedir.strpath) + + @mem.cache + def cached_func_inner(x): + return x + + return cached_func_inner + + +class Klass(object): + + def f(self, x): + return x + + +############################################################################### +# Tests + +@parametrize('func,args,filtered_args', + [(f, [[], (1, )], {'x': 1, 'y': 0}), + (f, [['x'], (1, )], {'y': 0}), + (f, [['y'], (0, )], {'x': 0}), + (f, [['y'], (0, ), {'y': 1}], {'x': 0}), + (f, [['x', 'y'], (0, )], {}), + (f, [[], (0,), {'y': 1}], {'x': 0, 'y': 1}), + (f, [['y'], (), {'x': 2, 'y': 1}], {'x': 2}), + (g, [[], (), {'x': 1}], {'x': 1}), + (i, [[], (2, )], {'x': 2})]) +def test_filter_args(func, args, filtered_args): + assert filter_args(func, *args) == filtered_args + + +def test_filter_args_method(): + obj = Klass() + assert filter_args(obj.f, [], (1, )) == {'x': 1, 'self': obj} + + +@parametrize('func,args,filtered_args', + [(h, [[], (1, )], + {'x': 1, 'y': 0, '*': [], '**': {}}), + (h, [[], (1, 2, 3, 4)], + {'x': 1, 'y': 2, '*': [3, 4], '**': {}}), + (h, [[], (1, 25), {'ee': 2}], + {'x': 1, 'y': 25, '*': [], '**': {'ee': 2}}), + (h, [['*'], (1, 2, 25), {'ee': 2}], + {'x': 1, 'y': 2, '**': {'ee': 2}})]) +def test_filter_varargs(func, args, filtered_args): + assert filter_args(func, *args) == filtered_args + + +test_filter_kwargs_extra_params = [ + (m1, [[], (1,), {'y': 2}], {'x': 1, 'y': 2}), + (m2, [[], (1,), {'y': 2}], {'x': 1, 'y': 2, 'z': 3}) +] + + +@parametrize('func,args,filtered_args', + [(k, [[], (1, 2), {'ee': 2}], + {'*': [1, 2], '**': {'ee': 2}}), + (k, [[], (3, 4)], + {'*': [3, 4], '**': {}})] + + test_filter_kwargs_extra_params) +def test_filter_kwargs(func, args, filtered_args): + assert filter_args(func, *args) == filtered_args + + +def test_filter_args_2(): + assert (filter_args(j, [], (1, 2), {'ee': 2}) == + {'x': 1, 'y': 2, '**': {'ee': 2}}) + + ff = functools.partial(f, 1) + # filter_args has to special-case partial + assert filter_args(ff, [], (1, )) == {'*': [1], '**': {}} + assert filter_args(ff, ['y'], (1, )) == {'*': [1], '**': {}} + + +@parametrize('func,funcname', [(f, 'f'), (g, 'g'), + (cached_func, 'cached_func')]) +def test_func_name(func, funcname): + # Check that we are not confused by decoration + # here testcase 'cached_func' is the function itself + assert get_func_name(func)[1] == funcname + + +def test_func_name_on_inner_func(cached_func): + # Check that we are not confused by decoration + # here testcase 'cached_func' is the 'cached_func_inner' function + # returned by 'cached_func' fixture + assert get_func_name(cached_func)[1] == 'cached_func_inner' + + +def test_func_inspect_errors(): + # Check that func_inspect is robust and will work on weird objects + assert get_func_name('a'.lower)[-1] == 'lower' + assert get_func_code('a'.lower)[1:] == (None, -1) + ff = lambda x: x + assert get_func_name(ff, win_characters=False)[-1] == '' + assert get_func_code(ff)[1] == __file__.replace('.pyc', '.py') + # Simulate a function defined in __main__ + ff.__module__ = '__main__' + assert get_func_name(ff, win_characters=False)[-1] == '' + assert get_func_code(ff)[1] == __file__.replace('.pyc', '.py') + + +def func_with_kwonly_args(a, b, *, kw1='kw1', kw2='kw2'): + pass + + +def func_with_signature(a: int, b: int) -> None: + pass + + +def test_filter_args_edge_cases(): + assert ( + filter_args(func_with_kwonly_args, [], (1, 2), + {'kw1': 3, 'kw2': 4}) == + {'a': 1, 'b': 2, 'kw1': 3, 'kw2': 4}) + + # filter_args doesn't care about keyword-only arguments so you + # can pass 'kw1' into *args without any problem + with raises(ValueError) as excinfo: + filter_args(func_with_kwonly_args, [], (1, 2, 3), {'kw2': 2}) + excinfo.match("Keyword-only parameter 'kw1' was passed as positional " + "parameter") + + assert ( + filter_args(func_with_kwonly_args, ['b', 'kw2'], (1, 2), + {'kw1': 3, 'kw2': 4}) == + {'a': 1, 'kw1': 3}) + + assert (filter_args(func_with_signature, ['b'], (1, 2)) == {'a': 1}) + + +def test_bound_methods(): + """ Make sure that calling the same method on two different instances + of the same class does resolv to different signatures. + """ + a = Klass() + b = Klass() + assert filter_args(a.f, [], (1, )) != filter_args(b.f, [], (1, )) + + +@parametrize('exception,regex,func,args', + [(ValueError, 'ignore_lst must be a list of parameters to ignore', + f, ['bar', (None, )]), + (ValueError, r'Ignore list: argument \'(.*)\' is not defined', + g, [['bar'], (None, )]), + (ValueError, 'Wrong number of arguments', + h, [[]])]) +def test_filter_args_error_msg(exception, regex, func, args): + """ Make sure that filter_args returns decent error messages, for the + sake of the user. + """ + with raises(exception) as excinfo: + filter_args(func, *args) + excinfo.match(regex) + + +def test_filter_args_no_kwargs_mutation(): + """None-regression test against 0.12.0 changes. + + https://github.com/joblib/joblib/pull/75 + + Make sure filter args doesn't mutate the kwargs dict that gets passed in. + """ + kwargs = {'x': 0} + filter_args(g, [], [], kwargs) + assert kwargs == {'x': 0} + + +def test_clean_win_chars(): + string = r'C:\foo\bar\main.py' + mangled_string = _clean_win_chars(string) + for char in ('\\', ':', '<', '>', '!'): + assert char not in mangled_string + + +@parametrize('func,args,kwargs,sgn_expected', + [(g, [list(range(5))], {}, 'g([0, 1, 2, 3, 4])'), + (k, [1, 2, (3, 4)], {'y': True}, 'k(1, 2, (3, 4), y=True)')]) +def test_format_signature(func, args, kwargs, sgn_expected): + # Test signature formatting. + path, sgn_result = format_signature(func, *args, **kwargs) + assert sgn_result == sgn_expected + + +def test_format_signature_long_arguments(): + shortening_threshold = 1500 + # shortening gets it down to 700 characters but there is the name + # of the function in the signature and a few additional things + # like dots for the ellipsis + shortening_target = 700 + 10 + + arg = 'a' * shortening_threshold + _, signature = format_signature(h, arg) + assert len(signature) < shortening_target + + nb_args = 5 + args = [arg for _ in range(nb_args)] + _, signature = format_signature(h, *args) + assert len(signature) < shortening_target * nb_args + + kwargs = {str(i): arg for i, arg in enumerate(args)} + _, signature = format_signature(h, **kwargs) + assert len(signature) < shortening_target * nb_args + + _, signature = format_signature(h, *args, **kwargs) + assert len(signature) < shortening_target * 2 * nb_args + + +@with_numpy +def test_format_signature_numpy(): + """ Test the format signature formatting with numpy. + """ + + +def test_special_source_encoding(): + from joblib.test.test_func_inspect_special_encoding import big5_f + func_code, source_file, first_line = get_func_code(big5_f) + assert first_line == 5 + assert "def big5_f():" in func_code + assert "test_func_inspect_special_encoding" in source_file + + +def _get_code(): + from joblib.test.test_func_inspect_special_encoding import big5_f + return get_func_code(big5_f)[0] + + +def test_func_code_consistency(): + from joblib.parallel import Parallel, delayed + codes = Parallel(n_jobs=2)(delayed(_get_code)() for _ in range(5)) + assert len(set(codes)) == 1 diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_func_inspect_special_encoding.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_func_inspect_special_encoding.py new file mode 100644 index 0000000..1b9fbe8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_func_inspect_special_encoding.py @@ -0,0 +1,10 @@ +# -*- coding: big5 -*- + + +# Some Traditional Chinese characters: ¤@¨Ç¤¤¤å¦r²Å +def big5_f(): + """¥Î©ó´ú¸Õªº¨ç¼Æ + """ + # µùÄÀ + return 0 + diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_hashing.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_hashing.py new file mode 100644 index 0000000..37d9480 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_hashing.py @@ -0,0 +1,497 @@ +""" +Test the hashing module. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import time +import hashlib +import sys +import os +import gc +import io +import collections +import itertools +import pickle +import random +from concurrent.futures import ProcessPoolExecutor +from decimal import Decimal +import pytest + +from joblib.hashing import hash +from joblib.func_inspect import filter_args +from joblib.memory import Memory +from joblib.testing import raises, skipif, fixture, parametrize +from joblib.test.common import np, with_numpy + + +def unicode(s): + return s + + +############################################################################### +# Helper functions for the tests +def time_func(func, *args): + """ Time function func on *args. + """ + times = list() + for _ in range(3): + t1 = time.time() + func(*args) + times.append(time.time() - t1) + return min(times) + + +def relative_time(func1, func2, *args): + """ Return the relative time between func1 and func2 applied on + *args. + """ + time_func1 = time_func(func1, *args) + time_func2 = time_func(func2, *args) + relative_diff = 0.5 * (abs(time_func1 - time_func2) + / (time_func1 + time_func2)) + return relative_diff + + +class Klass(object): + + def f(self, x): + return x + + +class KlassWithCachedMethod(object): + + def __init__(self, cachedir): + mem = Memory(cachedir=cachedir) + self.f = mem.cache(self.f) + + def f(self, x): + return x + + +############################################################################### +# Tests + +input_list = [1, 2, 1., 2., 1 + 1j, 2. + 1j, + 'a', 'b', + (1,), (1, 1,), [1, ], [1, 1, ], + {1: 1}, {1: 2}, {2: 1}, + None, + gc.collect, + [1, ].append, + # Next 2 sets have unorderable elements in python 3. + set(('a', 1)), + set(('a', 1, ('a', 1))), + # Next 2 dicts have unorderable type of keys in python 3. + {'a': 1, 1: 2}, + {'a': 1, 1: 2, 'd': {'a': 1}}] + + +@parametrize('obj1', input_list) +@parametrize('obj2', input_list) +def test_trivial_hash(obj1, obj2): + """Smoke test hash on various types.""" + # Check that 2 objects have the same hash only if they are the same. + are_hashes_equal = hash(obj1) == hash(obj2) + are_objs_identical = obj1 is obj2 + assert are_hashes_equal == are_objs_identical + + +def test_hash_methods(): + # Check that hashing instance methods works + a = io.StringIO(unicode('a')) + assert hash(a.flush) == hash(a.flush) + a1 = collections.deque(range(10)) + a2 = collections.deque(range(9)) + assert hash(a1.extend) != hash(a2.extend) + + +@fixture(scope='function') +@with_numpy +def three_np_arrays(): + rnd = np.random.RandomState(0) + arr1 = rnd.random_sample((10, 10)) + arr2 = arr1.copy() + arr3 = arr2.copy() + arr3[0] += 1 + return arr1, arr2, arr3 + + +def test_hash_numpy_arrays(three_np_arrays): + arr1, arr2, arr3 = three_np_arrays + + for obj1, obj2 in itertools.product(three_np_arrays, repeat=2): + are_hashes_equal = hash(obj1) == hash(obj2) + are_arrays_equal = np.all(obj1 == obj2) + assert are_hashes_equal == are_arrays_equal + + assert hash(arr1) != hash(arr1.T) + + +def test_hash_numpy_dict_of_arrays(three_np_arrays): + arr1, arr2, arr3 = three_np_arrays + + d1 = {1: arr1, 2: arr2} + d2 = {1: arr2, 2: arr1} + d3 = {1: arr2, 2: arr3} + + assert hash(d1) == hash(d2) + assert hash(d1) != hash(d3) + + +@with_numpy +@parametrize('dtype', ['datetime64[s]', 'timedelta64[D]']) +def test_numpy_datetime_array(dtype): + # memoryview is not supported for some dtypes e.g. datetime64 + # see https://github.com/joblib/joblib/issues/188 for more details + a_hash = hash(np.arange(10)) + array = np.arange(0, 10, dtype=dtype) + assert hash(array) != a_hash + + +@with_numpy +def test_hash_numpy_noncontiguous(): + a = np.asarray(np.arange(6000).reshape((1000, 2, 3)), + order='F')[:, :1, :] + b = np.ascontiguousarray(a) + assert hash(a) != hash(b) + + c = np.asfortranarray(a) + assert hash(a) != hash(c) + + +@with_numpy +@parametrize('coerce_mmap', [True, False]) +def test_hash_memmap(tmpdir, coerce_mmap): + """Check that memmap and arrays hash identically if coerce_mmap is True.""" + filename = tmpdir.join('memmap_temp').strpath + try: + m = np.memmap(filename, shape=(10, 10), mode='w+') + a = np.asarray(m) + are_hashes_equal = (hash(a, coerce_mmap=coerce_mmap) == + hash(m, coerce_mmap=coerce_mmap)) + assert are_hashes_equal == coerce_mmap + finally: + if 'm' in locals(): + del m + # Force a garbage-collection cycle, to be certain that the + # object is delete, and we don't run in a problem under + # Windows with a file handle still open. + gc.collect() + + +@with_numpy +@skipif(sys.platform == 'win32', reason='This test is not stable under windows' + ' for some reason') +def test_hash_numpy_performance(): + """ Check the performance of hashing numpy arrays: + + In [22]: a = np.random.random(1000000) + + In [23]: %timeit hashlib.md5(a).hexdigest() + 100 loops, best of 3: 20.7 ms per loop + + In [24]: %timeit hashlib.md5(pickle.dumps(a, protocol=2)).hexdigest() + 1 loops, best of 3: 73.1 ms per loop + + In [25]: %timeit hashlib.md5(cPickle.dumps(a, protocol=2)).hexdigest() + 10 loops, best of 3: 53.9 ms per loop + + In [26]: %timeit hash(a) + 100 loops, best of 3: 20.8 ms per loop + """ + rnd = np.random.RandomState(0) + a = rnd.random_sample(1000000) + + def md5_hash(x): + return hashlib.md5(memoryview(x)).hexdigest() + + relative_diff = relative_time(md5_hash, hash, a) + assert relative_diff < 0.3 + + # Check that hashing an tuple of 3 arrays takes approximately + # 3 times as much as hashing one array + time_hashlib = 3 * time_func(md5_hash, a) + time_hash = time_func(hash, (a, a, a)) + relative_diff = 0.5 * (abs(time_hash - time_hashlib) + / (time_hash + time_hashlib)) + assert relative_diff < 0.3 + + +def test_bound_methods_hash(): + """ Make sure that calling the same method on two different instances + of the same class does resolve to the same hashes. + """ + a = Klass() + b = Klass() + assert (hash(filter_args(a.f, [], (1, ))) == + hash(filter_args(b.f, [], (1, )))) + + +def test_bound_cached_methods_hash(tmpdir): + """ Make sure that calling the same _cached_ method on two different + instances of the same class does resolve to the same hashes. + """ + a = KlassWithCachedMethod(tmpdir.strpath) + b = KlassWithCachedMethod(tmpdir.strpath) + assert (hash(filter_args(a.f.func, [], (1, ))) == + hash(filter_args(b.f.func, [], (1, )))) + + +@with_numpy +def test_hash_object_dtype(): + """ Make sure that ndarrays with dtype `object' hash correctly.""" + + a = np.array([np.arange(i) for i in range(6)], dtype=object) + b = np.array([np.arange(i) for i in range(6)], dtype=object) + + assert hash(a) == hash(b) + + +@with_numpy +def test_numpy_scalar(): + # Numpy scalars are built from compiled functions, and lead to + # strange pickling paths explored, that can give hash collisions + a = np.float64(2.0) + b = np.float64(3.0) + assert hash(a) != hash(b) + + +def test_dict_hash(tmpdir): + # Check that dictionaries hash consistently, eventhough the ordering + # of the keys is not garanteed + k = KlassWithCachedMethod(tmpdir.strpath) + + d = {'#s12069__c_maps.nii.gz': [33], + '#s12158__c_maps.nii.gz': [33], + '#s12258__c_maps.nii.gz': [33], + '#s12277__c_maps.nii.gz': [33], + '#s12300__c_maps.nii.gz': [33], + '#s12401__c_maps.nii.gz': [33], + '#s12430__c_maps.nii.gz': [33], + '#s13817__c_maps.nii.gz': [33], + '#s13903__c_maps.nii.gz': [33], + '#s13916__c_maps.nii.gz': [33], + '#s13981__c_maps.nii.gz': [33], + '#s13982__c_maps.nii.gz': [33], + '#s13983__c_maps.nii.gz': [33]} + + a = k.f(d) + b = k.f(a) + + assert hash(a) == hash(b) + + +def test_set_hash(tmpdir): + # Check that sets hash consistently, even though their ordering + # is not guaranteed + k = KlassWithCachedMethod(tmpdir.strpath) + + s = set(['#s12069__c_maps.nii.gz', + '#s12158__c_maps.nii.gz', + '#s12258__c_maps.nii.gz', + '#s12277__c_maps.nii.gz', + '#s12300__c_maps.nii.gz', + '#s12401__c_maps.nii.gz', + '#s12430__c_maps.nii.gz', + '#s13817__c_maps.nii.gz', + '#s13903__c_maps.nii.gz', + '#s13916__c_maps.nii.gz', + '#s13981__c_maps.nii.gz', + '#s13982__c_maps.nii.gz', + '#s13983__c_maps.nii.gz']) + + a = k.f(s) + b = k.f(a) + + assert hash(a) == hash(b) + + +def test_set_decimal_hash(): + # Check that sets containing decimals hash consistently, even though + # ordering is not guaranteed + assert (hash(set([Decimal(0), Decimal('NaN')])) == + hash(set([Decimal('NaN'), Decimal(0)]))) + + +def test_string(): + # Test that we obtain the same hash for object owning several strings, + # whatever the past of these strings (which are immutable in Python) + string = 'foo' + a = {string: 'bar'} + b = {string: 'bar'} + c = pickle.loads(pickle.dumps(b)) + assert hash([a, b]) == hash([a, c]) + + +@with_numpy +def test_numpy_dtype_pickling(): + # numpy dtype hashing is tricky to get right: see #231, #239, #251 #1080, + # #1082, and explanatory comments inside + # ``joblib.hashing.NumpyHasher.save``. + + # In this test, we make sure that the pickling of numpy dtypes is robust to + # object identity and object copy. + + dt1 = np.dtype('f4') + dt2 = np.dtype('f4') + + # simple dtypes objects are interned + assert dt1 is dt2 + assert hash(dt1) == hash(dt2) + + dt1_roundtripped = pickle.loads(pickle.dumps(dt1)) + assert dt1 is not dt1_roundtripped + assert hash(dt1) == hash(dt1_roundtripped) + + assert hash([dt1, dt1]) == hash([dt1_roundtripped, dt1_roundtripped]) + assert hash([dt1, dt1]) == hash([dt1, dt1_roundtripped]) + + complex_dt1 = np.dtype( + [('name', np.str_, 16), ('grades', np.float64, (2,))] + ) + complex_dt2 = np.dtype( + [('name', np.str_, 16), ('grades', np.float64, (2,))] + ) + + # complex dtypes objects are not interned + assert hash(complex_dt1) == hash(complex_dt2) + + complex_dt1_roundtripped = pickle.loads(pickle.dumps(complex_dt1)) + assert complex_dt1_roundtripped is not complex_dt1 + assert hash(complex_dt1) == hash(complex_dt1_roundtripped) + + assert hash([complex_dt1, complex_dt1]) == hash( + [complex_dt1_roundtripped, complex_dt1_roundtripped] + ) + assert hash([complex_dt1, complex_dt1]) == hash( + [complex_dt1_roundtripped, complex_dt1] + ) + + +@parametrize('to_hash,expected', + [('This is a string to hash', + '71b3f47df22cb19431d85d92d0b230b2'), + (u"C'est l\xe9t\xe9", + '2d8d189e9b2b0b2e384d93c868c0e576'), + ((123456, 54321, -98765), + 'e205227dd82250871fa25aa0ec690aa3'), + ([random.Random(42).random() for _ in range(5)], + 'a11ffad81f9682a7d901e6edc3d16c84'), + ({'abcde': 123, 'sadfas': [-9999, 2, 3]}, + 'aeda150553d4bb5c69f0e69d51b0e2ef')]) +def test_hashes_stay_the_same(to_hash, expected): + # We want to make sure that hashes don't change with joblib + # version. For end users, that would mean that they have to + # regenerate their cache from scratch, which potentially means + # lengthy recomputations. + # Expected results have been generated with joblib 0.9.2 + assert hash(to_hash) == expected + + +@with_numpy +def test_hashes_are_different_between_c_and_fortran_contiguous_arrays(): + # We want to be sure that the c-contiguous and f-contiguous versions of the + # same array produce 2 different hashes. + rng = np.random.RandomState(0) + arr_c = rng.random_sample((10, 10)) + arr_f = np.asfortranarray(arr_c) + assert hash(arr_c) != hash(arr_f) + + +@with_numpy +def test_0d_array(): + hash(np.array(0)) + + +@with_numpy +def test_0d_and_1d_array_hashing_is_different(): + assert hash(np.array(0)) != hash(np.array([0])) + + +@with_numpy +def test_hashes_stay_the_same_with_numpy_objects(): + # Note: joblib used to test numpy objects hashing by comparing the produced + # hash of an object with some hard-coded target value to guarantee that + # hashing remains the same across joblib versions. However, since numpy + # 1.20 and joblib 1.0, joblib relies on potentially unstable implementation + # details of numpy to hash np.dtype objects, which makes the stability of + # hash values across different environments hard to guarantee and to test. + # As a result, hashing stability across joblib versions becomes best-effort + # only, and we only test the consistency within a single environment by + # making sure: + # - the hash of two copies of the same objects is the same + # - hashing some object in two different python processes produces the same + # value. This should be viewed as a proxy for testing hash consistency + # through time between Python sessions (provided no change in the + # environment was done between sessions). + + def create_objects_to_hash(): + rng = np.random.RandomState(42) + # Being explicit about dtypes in order to avoid + # architecture-related differences. Also using 'f4' rather than + # 'f8' for float arrays because 'f8' arrays generated by + # rng.random.randn don't seem to be bit-identical on 32bit and + # 64bit machines. + to_hash_list = [ + rng.randint(-1000, high=1000, size=50).astype(' +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import re + +from joblib.logger import PrintTime + + +def test_print_time(tmpdir, capsys): + # A simple smoke test for PrintTime. + logfile = tmpdir.join('test.log').strpath + print_time = PrintTime(logfile=logfile) + print_time('Foo') + # Create a second time, to smoke test log rotation. + print_time = PrintTime(logfile=logfile) + print_time('Foo') + # And a third time + print_time = PrintTime(logfile=logfile) + print_time('Foo') + + out_printed_text, err_printed_text = capsys.readouterr() + # Use regexps to be robust to time variations + match = r"Foo: 0\..s, 0\..min\nFoo: 0\..s, 0..min\nFoo: " + \ + r".\..s, 0..min\n" + if not re.match(match, err_printed_text): + raise AssertionError('Excepted %s, got %s' % + (match, err_printed_text)) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_memmapping.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_memmapping.py new file mode 100644 index 0000000..dc40d23 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_memmapping.py @@ -0,0 +1,1162 @@ +import os +import mmap +import sys +import platform +import gc +import pickle +import itertools +from time import sleep +import subprocess +import threading + +from joblib.test.common import with_numpy, np +from joblib.test.common import setup_autokill +from joblib.test.common import teardown_autokill +from joblib.test.common import with_multiprocessing +from joblib.test.common import with_dev_shm +from joblib.testing import raises, parametrize, skipif, xfail, param +from joblib.backports import make_memmap +from joblib.parallel import Parallel, delayed + +from joblib.pool import MemmappingPool +from joblib.executor import _TestingMemmappingExecutor as TestExecutor +from joblib._memmapping_reducer import has_shareable_memory +from joblib._memmapping_reducer import ArrayMemmapForwardReducer +from joblib._memmapping_reducer import _strided_from_memmap +from joblib._memmapping_reducer import _get_temp_dir +from joblib._memmapping_reducer import _WeakArrayKeyMap +from joblib._memmapping_reducer import _get_backing_memmap +import joblib._memmapping_reducer as jmr + + +def setup_module(): + setup_autokill(__name__, timeout=300) + + +def teardown_module(): + teardown_autokill(__name__) + + +def check_memmap_and_send_back(array): + assert _get_backing_memmap(array) is not None + return array + + +def check_array(args): + """Dummy helper function to be executed in subprocesses + + Check that the provided array has the expected values in the provided + range. + + """ + data, position, expected = args + np.testing.assert_array_equal(data[position], expected) + + +def inplace_double(args): + """Dummy helper function to be executed in subprocesses + + + Check that the input array has the right values in the provided range + and perform an inplace modification to double the values in the range by + two. + + """ + data, position, expected = args + assert data[position] == expected + data[position] *= 2 + np.testing.assert_array_equal(data[position], 2 * expected) + + +@with_numpy +@with_multiprocessing +def test_memmap_based_array_reducing(tmpdir): + """Check that it is possible to reduce a memmap backed array""" + assert_array_equal = np.testing.assert_array_equal + filename = tmpdir.join('test.mmap').strpath + + # Create a file larger than what will be used by a + buffer = np.memmap(filename, dtype=np.float64, shape=500, mode='w+') + + # Fill the original buffer with negative markers to detect over of + # underflow in case of test failures + buffer[:] = - 1.0 * np.arange(buffer.shape[0], dtype=buffer.dtype) + buffer.flush() + + # Memmap a 2D fortran array on a offseted subsection of the previous + # buffer + a = np.memmap(filename, dtype=np.float64, shape=(3, 5, 4), + mode='r+', order='F', offset=4) + a[:] = np.arange(60).reshape(a.shape) + + # Build various views that share the buffer with the original memmap + + # b is an memmap sliced view on an memmap instance + b = a[1:-1, 2:-1, 2:4] + + # c and d are array views + c = np.asarray(b) + d = c.T + + # Array reducer with auto dumping disabled + reducer = ArrayMemmapForwardReducer(None, tmpdir.strpath, 'c', True) + + def reconstruct_array_or_memmap(x): + cons, args = reducer(x) + return cons(*args) + + # Reconstruct original memmap + a_reconstructed = reconstruct_array_or_memmap(a) + assert has_shareable_memory(a_reconstructed) + assert isinstance(a_reconstructed, np.memmap) + assert_array_equal(a_reconstructed, a) + + # Reconstruct strided memmap view + b_reconstructed = reconstruct_array_or_memmap(b) + assert has_shareable_memory(b_reconstructed) + assert_array_equal(b_reconstructed, b) + + # Reconstruct arrays views on memmap base + c_reconstructed = reconstruct_array_or_memmap(c) + assert not isinstance(c_reconstructed, np.memmap) + assert has_shareable_memory(c_reconstructed) + assert_array_equal(c_reconstructed, c) + + d_reconstructed = reconstruct_array_or_memmap(d) + assert not isinstance(d_reconstructed, np.memmap) + assert has_shareable_memory(d_reconstructed) + assert_array_equal(d_reconstructed, d) + + # Test graceful degradation on fake memmap instances with in-memory + # buffers + a3 = a * 3 + assert not has_shareable_memory(a3) + a3_reconstructed = reconstruct_array_or_memmap(a3) + assert not has_shareable_memory(a3_reconstructed) + assert not isinstance(a3_reconstructed, np.memmap) + assert_array_equal(a3_reconstructed, a * 3) + + # Test graceful degradation on arrays derived from fake memmap instances + b3 = np.asarray(a3) + assert not has_shareable_memory(b3) + + b3_reconstructed = reconstruct_array_or_memmap(b3) + assert isinstance(b3_reconstructed, np.ndarray) + assert not has_shareable_memory(b3_reconstructed) + assert_array_equal(b3_reconstructed, b3) + + +@skipif(sys.platform != "win32", + reason="PermissionError only easily triggerable on Windows") +def test_resource_tracker_retries_when_permissionerror(tmpdir): + # Test resource_tracker retry mechanism when unlinking memmaps. See more + # thorough information in the ``unlink_file`` documentation of joblib. + filename = tmpdir.join('test.mmap').strpath + cmd = """if 1: + import os + import numpy as np + import time + from joblib.externals.loky.backend import resource_tracker + resource_tracker.VERBOSE = 1 + + # Start the resource tracker + resource_tracker.ensure_running() + time.sleep(1) + + # Create a file containing numpy data + memmap = np.memmap(r"{filename}", dtype=np.float64, shape=10, mode='w+') + memmap[:] = np.arange(10).astype(np.int8).data + memmap.flush() + assert os.path.exists(r"{filename}") + del memmap + + # Create a np.memmap backed by this file + memmap = np.memmap(r"{filename}", dtype=np.float64, shape=10, mode='w+') + resource_tracker.register(r"{filename}", "file") + + # Ask the resource_tracker to delete the file backing the np.memmap , this + # should raise PermissionError that the resource_tracker will log. + resource_tracker.maybe_unlink(r"{filename}", "file") + + # Wait for the resource_tracker to process the maybe_unlink before cleaning + # up the memmap + time.sleep(2) + """.format(filename=filename) + p = subprocess.Popen([sys.executable, '-c', cmd], stderr=subprocess.PIPE, + stdout=subprocess.PIPE) + p.wait() + out, err = p.communicate() + assert p.returncode == 0 + assert out == b'' + msg = 'tried to unlink {}, got PermissionError'.format(filename) + assert msg in err.decode() + + +@with_numpy +@with_multiprocessing +def test_high_dimension_memmap_array_reducing(tmpdir): + assert_array_equal = np.testing.assert_array_equal + + filename = tmpdir.join('test.mmap').strpath + + # Create a high dimensional memmap + a = np.memmap(filename, dtype=np.float64, shape=(100, 15, 15, 3), + mode='w+') + a[:] = np.arange(100 * 15 * 15 * 3).reshape(a.shape) + + # Create some slices/indices at various dimensions + b = a[0:10] + c = a[:, 5:10] + d = a[:, :, :, 0] + e = a[1:3:4] + + # Array reducer with auto dumping disabled + reducer = ArrayMemmapForwardReducer(None, tmpdir.strpath, 'c', True) + + def reconstruct_array_or_memmap(x): + cons, args = reducer(x) + return cons(*args) + + a_reconstructed = reconstruct_array_or_memmap(a) + assert has_shareable_memory(a_reconstructed) + assert isinstance(a_reconstructed, np.memmap) + assert_array_equal(a_reconstructed, a) + + b_reconstructed = reconstruct_array_or_memmap(b) + assert has_shareable_memory(b_reconstructed) + assert_array_equal(b_reconstructed, b) + + c_reconstructed = reconstruct_array_or_memmap(c) + assert has_shareable_memory(c_reconstructed) + assert_array_equal(c_reconstructed, c) + + d_reconstructed = reconstruct_array_or_memmap(d) + assert has_shareable_memory(d_reconstructed) + assert_array_equal(d_reconstructed, d) + + e_reconstructed = reconstruct_array_or_memmap(e) + assert has_shareable_memory(e_reconstructed) + assert_array_equal(e_reconstructed, e) + + +@with_numpy +def test__strided_from_memmap(tmpdir): + fname = tmpdir.join('test.mmap').strpath + size = 5 * mmap.ALLOCATIONGRANULARITY + offset = mmap.ALLOCATIONGRANULARITY + 1 + # This line creates the mmap file that is reused later + memmap_obj = np.memmap(fname, mode='w+', shape=size + offset) + # filename, dtype, mode, offset, order, shape, strides, total_buffer_len + memmap_obj = _strided_from_memmap(fname, dtype='uint8', mode='r', + offset=offset, order='C', shape=size, + strides=None, total_buffer_len=None, + unlink_on_gc_collect=False) + assert isinstance(memmap_obj, np.memmap) + assert memmap_obj.offset == offset + memmap_backed_obj = _strided_from_memmap( + fname, dtype='uint8', mode='r', offset=offset, order='C', + shape=(size // 2,), strides=(2,), total_buffer_len=size, + unlink_on_gc_collect=False + ) + assert _get_backing_memmap(memmap_backed_obj).offset == offset + + +@with_numpy +@with_multiprocessing +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_pool_with_memmap(factory, tmpdir): + """Check that subprocess can access and update shared memory memmap""" + assert_array_equal = np.testing.assert_array_equal + + # Fork the subprocess before allocating the objects to be passed + pool_temp_folder = tmpdir.mkdir('pool').strpath + p = factory(10, max_nbytes=2, temp_folder=pool_temp_folder) + try: + filename = tmpdir.join('test.mmap').strpath + a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+') + a.fill(1.0) + + p.map(inplace_double, [(a, (i, j), 1.0) + for i in range(a.shape[0]) + for j in range(a.shape[1])]) + + assert_array_equal(a, 2 * np.ones(a.shape)) + + # Open a copy-on-write view on the previous data + b = np.memmap(filename, dtype=np.float32, shape=(5, 3), mode='c') + + p.map(inplace_double, [(b, (i, j), 2.0) + for i in range(b.shape[0]) + for j in range(b.shape[1])]) + + # Passing memmap instances to the pool should not trigger the creation + # of new files on the FS + assert os.listdir(pool_temp_folder) == [] + + # the original data is untouched + assert_array_equal(a, 2 * np.ones(a.shape)) + assert_array_equal(b, 2 * np.ones(b.shape)) + + # readonly maps can be read but not updated + c = np.memmap(filename, dtype=np.float32, shape=(10,), mode='r', + offset=5 * 4) + + with raises(AssertionError): + p.map(check_array, [(c, i, 3.0) for i in range(c.shape[0])]) + + # depending on the version of numpy one can either get a RuntimeError + # or a ValueError + with raises((RuntimeError, ValueError)): + p.map(inplace_double, [(c, i, 2.0) for i in range(c.shape[0])]) + finally: + # Clean all filehandlers held by the pool + p.terminate() + del p + + +@with_numpy +@with_multiprocessing +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_pool_with_memmap_array_view(factory, tmpdir): + """Check that subprocess can access and update shared memory array""" + assert_array_equal = np.testing.assert_array_equal + + # Fork the subprocess before allocating the objects to be passed + pool_temp_folder = tmpdir.mkdir('pool').strpath + p = factory(10, max_nbytes=2, temp_folder=pool_temp_folder) + try: + + filename = tmpdir.join('test.mmap').strpath + a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+') + a.fill(1.0) + + # Create an ndarray view on the memmap instance + a_view = np.asarray(a) + assert not isinstance(a_view, np.memmap) + assert has_shareable_memory(a_view) + + p.map(inplace_double, [(a_view, (i, j), 1.0) + for i in range(a.shape[0]) + for j in range(a.shape[1])]) + + # Both a and the a_view have been updated + assert_array_equal(a, 2 * np.ones(a.shape)) + assert_array_equal(a_view, 2 * np.ones(a.shape)) + + # Passing memmap array view to the pool should not trigger the + # creation of new files on the FS + assert os.listdir(pool_temp_folder) == [] + + finally: + p.terminate() + del p + + +@with_numpy +@parametrize("backend", ["multiprocessing", "loky"]) +def test_permission_error_windows_reference_cycle(backend): + # Non regression test for: + # https://github.com/joblib/joblib/issues/806 + # + # The issue happens when trying to delete a memory mapped file that has + # not yet been closed by one of the worker processes. + cmd = """if 1: + import numpy as np + from joblib import Parallel, delayed + + + data = np.random.rand(int(2e6)).reshape((int(1e6), 2)) + + # Build a complex cyclic reference that is likely to delay garbage + # collection of the memmapped array in the worker processes. + first_list = current_list = [data] + for i in range(10): + current_list = [current_list] + first_list.append(current_list) + + if __name__ == "__main__": + results = Parallel(n_jobs=2, backend="{b}")( + delayed(len)(current_list) for i in range(10)) + assert results == [1] * 10 + """.format(b=backend) + p = subprocess.Popen([sys.executable, '-c', cmd], stderr=subprocess.PIPE, + stdout=subprocess.PIPE) + p.wait() + out, err = p.communicate() + assert p.returncode == 0, out.decode() + "\n\n" + err.decode() + + +@with_numpy +@parametrize("backend", ["multiprocessing", "loky"]) +def test_permission_error_windows_memmap_sent_to_parent(backend): + # Second non-regression test for: + # https://github.com/joblib/joblib/issues/806 + # previously, child process would not convert temporary memmaps to numpy + # arrays when sending the data back to the parent process. This would lead + # to permission errors on windows when deleting joblib's temporary folder, + # as the memmaped files handles would still opened in the parent process. + cmd = '''if 1: + import os + import time + + import numpy as np + + from joblib import Parallel, delayed + from testutils import return_slice_of_data + + data = np.ones(int(2e6)) + + if __name__ == '__main__': + # warm-up call to launch the workers and start the resource_tracker + _ = Parallel(n_jobs=2, verbose=5, backend='{b}')( + delayed(id)(i) for i in range(20)) + + time.sleep(0.5) + + slice_of_data = Parallel(n_jobs=2, verbose=5, backend='{b}')( + delayed(return_slice_of_data)(data, 0, 20) for _ in range(10)) + '''.format(b=backend) + + for _ in range(3): + env = os.environ.copy() + env['PYTHONPATH'] = os.path.dirname(__file__) + p = subprocess.Popen([sys.executable, '-c', cmd], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, env=env) + p.wait() + out, err = p.communicate() + assert p.returncode == 0, err + assert out == b'' + if sys.version_info[:3] not in [(3, 8, 0), (3, 8, 1)]: + # In early versions of Python 3.8, a reference leak + # https://github.com/cloudpipe/cloudpickle/issues/327, holds + # references to pickled objects, generating race condition during + # cleanup finalizers of joblib and noisy resource_tracker outputs. + assert b'resource_tracker' not in err + + +@with_numpy +@with_multiprocessing +@parametrize("backend", ["multiprocessing", "loky"]) +def test_parallel_isolated_temp_folders(backend): + # Test that consecutive Parallel call use isolated subfolders, even + # for the loky backend that reuses its executor instance across calls. + array = np.arange(int(1e2)) + [filename_1] = Parallel(n_jobs=2, backend=backend, max_nbytes=10)( + delayed(getattr)(array, 'filename') for _ in range(1) + ) + [filename_2] = Parallel(n_jobs=2, backend=backend, max_nbytes=10)( + delayed(getattr)(array, 'filename') for _ in range(1) + ) + assert os.path.dirname(filename_2) != os.path.dirname(filename_1) + + +@with_numpy +@with_multiprocessing +@parametrize("backend", ["multiprocessing", "loky"]) +def test_managed_backend_reuse_temp_folder(backend): + # Test that calls to a managed parallel object reuse the same memmaps. + array = np.arange(int(1e2)) + with Parallel(n_jobs=2, backend=backend, max_nbytes=10) as p: + [filename_1] = p( + delayed(getattr)(array, 'filename') for _ in range(1) + ) + [filename_2] = p( + delayed(getattr)(array, 'filename') for _ in range(1) + ) + assert os.path.dirname(filename_2) == os.path.dirname(filename_1) + + +@with_numpy +@with_multiprocessing +def test_memmapping_temp_folder_thread_safety(): + # Concurrent calls to Parallel with the loky backend will use the same + # executor, and thus the same reducers. Make sure that those reducers use + # different temporary folders depending on which Parallel objects called + # them, which is necessary to limit potential race conditions during the + # garbage collection of temporary memmaps. + array = np.arange(int(1e2)) + + temp_dirs_thread_1 = set() + temp_dirs_thread_2 = set() + + def concurrent_get_filename(array, temp_dirs): + with Parallel(backend='loky', n_jobs=2, max_nbytes=10) as p: + for i in range(10): + [filename] = p( + delayed(getattr)(array, 'filename') for _ in range(1) + ) + temp_dirs.add(os.path.dirname(filename)) + + t1 = threading.Thread( + target=concurrent_get_filename, args=(array, temp_dirs_thread_1) + ) + t2 = threading.Thread( + target=concurrent_get_filename, args=(array, temp_dirs_thread_2) + ) + + t1.start() + t2.start() + + t1.join() + t2.join() + + assert len(temp_dirs_thread_1) == 1 + assert len(temp_dirs_thread_2) == 1 + + assert temp_dirs_thread_1 != temp_dirs_thread_2 + + +@with_numpy +@with_multiprocessing +def test_multithreaded_parallel_termination_resource_tracker_silent(): + # test that concurrent termination attempts of a same executor does not + # emit any spurious error from the resource_tracker. We test various + # situations making 0, 1 or both parallel call sending a task that will + # make the worker (and thus the whole Parallel call) error out. + cmd = '''if 1: + import os + import numpy as np + from joblib import Parallel, delayed + from joblib.externals.loky.backend import resource_tracker + from concurrent.futures import ThreadPoolExecutor, wait + + resource_tracker.VERBOSE = 0 + + array = np.arange(int(1e2)) + + temp_dirs_thread_1 = set() + temp_dirs_thread_2 = set() + + + def raise_error(array): + raise ValueError + + + def parallel_get_filename(array, temp_dirs): + with Parallel(backend="loky", n_jobs=2, max_nbytes=10) as p: + for i in range(10): + [filename] = p( + delayed(getattr)(array, "filename") for _ in range(1) + ) + temp_dirs.add(os.path.dirname(filename)) + + + def parallel_raise(array, temp_dirs): + with Parallel(backend="loky", n_jobs=2, max_nbytes=10) as p: + for i in range(10): + [filename] = p( + delayed(raise_error)(array) for _ in range(1) + ) + temp_dirs.add(os.path.dirname(filename)) + + + executor = ThreadPoolExecutor(max_workers=2) + + # both function calls will use the same loky executor, but with a + # different Parallel object. + future_1 = executor.submit({f1}, array, temp_dirs_thread_1) + future_2 = executor.submit({f2}, array, temp_dirs_thread_2) + + # Wait for both threads to terminate their backend + wait([future_1, future_2]) + + future_1.result() + future_2.result() + ''' + functions_and_returncodes = [ + ("parallel_get_filename", "parallel_get_filename", 0), + ("parallel_get_filename", "parallel_raise", 1), + ("parallel_raise", "parallel_raise", 1) + ] + + for f1, f2, returncode in functions_and_returncodes: + p = subprocess.Popen([sys.executable, '-c', cmd.format(f1=f1, f2=f2)], + stderr=subprocess.PIPE, stdout=subprocess.PIPE) + p.wait() + out, err = p.communicate() + assert p.returncode == returncode, out.decode() + assert b"resource_tracker" not in err, err.decode() + + +@with_numpy +@with_multiprocessing +def test_nested_loop_error_in_grandchild_resource_tracker_silent(): + # Safety smoke test: test that nested parallel calls using the loky backend + # don't yield noisy resource_tracker outputs when the grandchild errors + # out. + cmd = '''if 1: + from joblib import Parallel, delayed + + + def raise_error(i): + raise ValueError + + + def nested_loop(f): + Parallel(backend="loky", n_jobs=2)( + delayed(f)(i) for i in range(10) + ) + + + if __name__ == "__main__": + Parallel(backend="loky", n_jobs=2)( + delayed(nested_loop)(func) for func in [raise_error] + ) + ''' + p = subprocess.Popen([sys.executable, '-c', cmd], + stderr=subprocess.PIPE, stdout=subprocess.PIPE) + p.wait() + out, err = p.communicate() + assert p.returncode == 1, out.decode() + assert b"resource_tracker" not in err, err.decode() + + +@with_numpy +@with_multiprocessing +@parametrize("backend", ["multiprocessing", "loky"]) +def test_many_parallel_calls_on_same_object(backend): + # After #966 got merged, consecutive Parallel objects were sharing temp + # folder, which would lead to race conditions happening during the + # temporary resources management with the resource_tracker. This is a + # non-regression test that makes sure that consecutive Parallel operations + # on the same object do not error out. + cmd = '''if 1: + import os + import time + + import numpy as np + + from joblib import Parallel, delayed + from testutils import return_slice_of_data + + data = np.ones(100) + + if __name__ == '__main__': + for i in range(5): + slice_of_data = Parallel( + n_jobs=2, max_nbytes=1, backend='{b}')( + delayed(return_slice_of_data)(data, 0, 20) + for _ in range(10) + ) + slice_of_data = Parallel( + n_jobs=2, max_nbytes=1, backend='{b}')( + delayed(return_slice_of_data)(data, 0, 20) + for _ in range(10) + ) + '''.format(b=backend) + + for _ in range(3): + env = os.environ.copy() + env['PYTHONPATH'] = os.path.dirname(__file__) + p = subprocess.Popen([sys.executable, '-c', cmd], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, env=env) + p.wait() + out, err = p.communicate() + assert p.returncode == 0, err + assert out == b'' + if sys.version_info[:3] not in [(3, 8, 0), (3, 8, 1)]: + # In early versions of Python 3.8, a reference leak + # https://github.com/cloudpipe/cloudpickle/issues/327, holds + # references to pickled objects, generating race condition during + # cleanup finalizers of joblib and noisy resource_tracker outputs. + assert b'resource_tracker' not in err + + +@with_numpy +@with_multiprocessing +@parametrize("backend", ["multiprocessing", "loky"]) +def test_memmap_returned_as_regular_array(backend): + data = np.ones(int(1e3)) + # Check that child processes send temporary memmaps back as numpy arrays. + [result] = Parallel(n_jobs=2, backend=backend, max_nbytes=100)( + delayed(check_memmap_and_send_back)(data) for _ in range(1)) + assert _get_backing_memmap(result) is None + + +@with_numpy +@with_multiprocessing +@parametrize("backend", ["multiprocessing", param("loky", marks=xfail)]) +def test_resource_tracker_silent_when_reference_cycles(backend): + # There is a variety of reasons that can make joblib with loky backend + # output noisy warnings when a reference cycle is preventing a memmap from + # being garbage collected. Especially, joblib's main process finalizer + # deletes the temporary folder if it was not done before, which can + # interact badly with the resource_tracker. We don't risk leaking any + # resources, but this will likely make joblib output a lot of low-level + # confusing messages. This test is marked as xfail for now: but a next PR + # should fix this behavior. + # Note that the script in ``cmd`` is the exact same script as in + # test_permission_error_windows_reference_cycle. + cmd = """if 1: + import numpy as np + from joblib import Parallel, delayed + + + data = np.random.rand(int(2e6)).reshape((int(1e6), 2)) + + # Build a complex cyclic reference that is likely to delay garbage + # collection of the memmapped array in the worker processes. + first_list = current_list = [data] + for i in range(10): + current_list = [current_list] + first_list.append(current_list) + + if __name__ == "__main__": + results = Parallel(n_jobs=2, backend="{b}")( + delayed(len)(current_list) for i in range(10)) + assert results == [1] * 10 + """.format(b=backend) + p = subprocess.Popen([sys.executable, '-c', cmd], stderr=subprocess.PIPE, + stdout=subprocess.PIPE) + p.wait() + out, err = p.communicate() + assert p.returncode == 0, out.decode() + assert b"resource_tracker" not in err, err.decode() + + +@with_numpy +@with_multiprocessing +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_memmapping_pool_for_large_arrays(factory, tmpdir): + """Check that large arrays are not copied in memory""" + + # Check that the tempfolder is empty + assert os.listdir(tmpdir.strpath) == [] + + # Build an array reducers that automaticaly dump large array content + # to filesystem backed memmap instances to avoid memory explosion + p = factory(3, max_nbytes=40, temp_folder=tmpdir.strpath, verbose=2) + try: + # The temporary folder for the pool is not provisioned in advance + assert os.listdir(tmpdir.strpath) == [] + assert not os.path.exists(p._temp_folder) + + small = np.ones(5, dtype=np.float32) + assert small.nbytes == 20 + p.map(check_array, [(small, i, 1.0) for i in range(small.shape[0])]) + + # Memory has been copied, the pool filesystem folder is unused + assert os.listdir(tmpdir.strpath) == [] + + # Try with a file larger than the memmap threshold of 40 bytes + large = np.ones(100, dtype=np.float64) + assert large.nbytes == 800 + p.map(check_array, [(large, i, 1.0) for i in range(large.shape[0])]) + + # The data has been dumped in a temp folder for subprocess to share it + # without per-child memory copies + assert os.path.isdir(p._temp_folder) + dumped_filenames = os.listdir(p._temp_folder) + assert len(dumped_filenames) == 1 + + # Check that memory mapping is not triggered for arrays with + # dtype='object' + objects = np.array(['abc'] * 100, dtype='object') + results = p.map(has_shareable_memory, [objects]) + assert not results[0] + + finally: + # check FS garbage upon pool termination + p.terminate() + for i in range(10): + sleep(.1) + if not os.path.exists(p._temp_folder): + break + else: # pragma: no cover + raise AssertionError( + 'temporary folder of {} was not deleted'.format(p) + ) + del p + + +@with_numpy +@with_multiprocessing +@parametrize("backend", ["multiprocessing", "loky"]) +def test_child_raises_parent_exits_cleanly(backend): + # When a task executed by a child process raises an error, the parent + # process's backend is notified, and calls abort_everything. + # In loky, abort_everything itself calls shutdown(kill_workers=True) which + # sends SIGKILL to the worker, preventing it from running the finalizers + # supposed to signal the resource_tracker when the worker is done using + # objects relying on a shared resource (e.g np.memmaps). Because this + # behavior is prone to : + # - cause a resource leak + # - make the resource tracker emit noisy resource warnings + # we explicitly test that, when the said situation occurs: + # - no resources are actually leaked + # - the temporary resources are deleted as soon as possible (typically, at + # the end of the failing Parallel call) + # - the resource_tracker does not emit any warnings. + cmd = """if 1: + import os + + import numpy as np + from joblib import Parallel, delayed + from testutils import print_filename_and_raise + + data = np.random.rand(1000) + + + def get_temp_folder(parallel_obj, backend): + if "{b}" == "loky": + return p._backend._workers._temp_folder + else: + return p._backend._pool._temp_folder + + + if __name__ == "__main__": + try: + with Parallel(n_jobs=2, backend="{b}", max_nbytes=100) as p: + temp_folder = get_temp_folder(p, "{b}") + p(delayed(print_filename_and_raise)(data) + for i in range(1)) + except ValueError: + # the temporary folder should be deleted by the end of this + # call + assert not os.path.exists(temp_folder) + """.format(b=backend) + env = os.environ.copy() + env['PYTHONPATH'] = os.path.dirname(__file__) + p = subprocess.Popen([sys.executable, '-c', cmd], stderr=subprocess.PIPE, + stdout=subprocess.PIPE, env=env) + p.wait() + out, err = p.communicate() + out, err = out.decode(), err.decode() + filename = out.split('\n')[0] + assert p.returncode == 0, out + assert err == '' # no resource_tracker warnings. + assert not os.path.exists(filename) + + +@with_numpy +@with_multiprocessing +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_memmapping_pool_for_large_arrays_disabled(factory, tmpdir): + """Check that large arrays memmapping can be disabled""" + # Set max_nbytes to None to disable the auto memmapping feature + p = factory(3, max_nbytes=None, temp_folder=tmpdir.strpath) + try: + + # Check that the tempfolder is empty + assert os.listdir(tmpdir.strpath) == [] + + # Try with a file largish than the memmap threshold of 40 bytes + large = np.ones(100, dtype=np.float64) + assert large.nbytes == 800 + p.map(check_array, [(large, i, 1.0) for i in range(large.shape[0])]) + + # Check that the tempfolder is still empty + assert os.listdir(tmpdir.strpath) == [] + + finally: + # Cleanup open file descriptors + p.terminate() + del p + + +@with_numpy +@with_multiprocessing +@with_dev_shm +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_memmapping_on_large_enough_dev_shm(factory): + """Check that memmapping uses /dev/shm when possible""" + orig_size = jmr.SYSTEM_SHARED_MEM_FS_MIN_SIZE + try: + # Make joblib believe that it can use /dev/shm even when running on a + # CI container where the size of the /dev/shm is not very large (that + # is at least 32 MB instead of 2 GB by default). + jmr.SYSTEM_SHARED_MEM_FS_MIN_SIZE = int(32e6) + p = factory(3, max_nbytes=10) + try: + # Check that the pool has correctly detected the presence of the + # shared memory filesystem. + pool_temp_folder = p._temp_folder + folder_prefix = '/dev/shm/joblib_memmapping_folder_' + assert pool_temp_folder.startswith(folder_prefix) + assert os.path.exists(pool_temp_folder) + + # Try with a file larger than the memmap threshold of 10 bytes + a = np.ones(100, dtype=np.float64) + assert a.nbytes == 800 + p.map(id, [a] * 10) + # a should have been memmapped to the pool temp folder: the joblib + # pickling procedure generate one .pkl file: + assert len(os.listdir(pool_temp_folder)) == 1 + + # create a new array with content that is different from 'a' so + # that it is mapped to a different file in the temporary folder of + # the pool. + b = np.ones(100, dtype=np.float64) * 2 + assert b.nbytes == 800 + p.map(id, [b] * 10) + # A copy of both a and b are now stored in the shared memory folder + assert len(os.listdir(pool_temp_folder)) == 2 + finally: + # Cleanup open file descriptors + p.terminate() + del p + + for i in range(100): + # The temp folder is cleaned up upon pool termination + if not os.path.exists(pool_temp_folder): + break + sleep(.1) + else: # pragma: no cover + raise AssertionError('temporary folder of pool was not deleted') + finally: + jmr.SYSTEM_SHARED_MEM_FS_MIN_SIZE = orig_size + + +@with_numpy +@with_multiprocessing +@with_dev_shm +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_memmapping_on_too_small_dev_shm(factory): + orig_size = jmr.SYSTEM_SHARED_MEM_FS_MIN_SIZE + try: + # Make joblib believe that it cannot use /dev/shm unless there is + # 42 exabytes of available shared memory in /dev/shm + jmr.SYSTEM_SHARED_MEM_FS_MIN_SIZE = int(42e18) + + p = factory(3, max_nbytes=10) + try: + # Check that the pool has correctly detected the presence of the + # shared memory filesystem. + pool_temp_folder = p._temp_folder + assert not pool_temp_folder.startswith('/dev/shm') + finally: + # Cleanup open file descriptors + p.terminate() + del p + + # The temp folder is cleaned up upon pool termination + assert not os.path.exists(pool_temp_folder) + finally: + jmr.SYSTEM_SHARED_MEM_FS_MIN_SIZE = orig_size + + +@with_numpy +@with_multiprocessing +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_memmapping_pool_for_large_arrays_in_return(factory, tmpdir): + """Check that large arrays are not copied in memory in return""" + assert_array_equal = np.testing.assert_array_equal + + # Build an array reducers that automaticaly dump large array content + # but check that the returned datastructure are regular arrays to avoid + # passing a memmap array pointing to a pool controlled temp folder that + # might be confusing to the user + + # The MemmappingPool user can always return numpy.memmap object explicitly + # to avoid memory copy + p = factory(3, max_nbytes=10, temp_folder=tmpdir.strpath) + try: + res = p.apply_async(np.ones, args=(1000,)) + large = res.get() + assert not has_shareable_memory(large) + assert_array_equal(large, np.ones(1000)) + finally: + p.terminate() + del p + + +def _worker_multiply(a, n_times): + """Multiplication function to be executed by subprocess""" + assert has_shareable_memory(a) + return a * n_times + + +@with_numpy +@with_multiprocessing +@parametrize("factory", [MemmappingPool, TestExecutor.get_memmapping_executor], + ids=["multiprocessing", "loky"]) +def test_workaround_against_bad_memmap_with_copied_buffers(factory, tmpdir): + """Check that memmaps with a bad buffer are returned as regular arrays + + Unary operations and ufuncs on memmap instances return a new memmap + instance with an in-memory buffer (probably a numpy bug). + """ + assert_array_equal = np.testing.assert_array_equal + + p = factory(3, max_nbytes=10, temp_folder=tmpdir.strpath) + try: + # Send a complex, large-ish view on a array that will be converted to + # a memmap in the worker process + a = np.asarray(np.arange(6000).reshape((1000, 2, 3)), + order='F')[:, :1, :] + + # Call a non-inplace multiply operation on the worker and memmap and + # send it back to the parent. + b = p.apply_async(_worker_multiply, args=(a, 3)).get() + assert not has_shareable_memory(b) + assert_array_equal(b, 3 * a) + finally: + p.terminate() + del p + + +def identity(arg): + return arg + + +@with_numpy +@with_multiprocessing +@parametrize( + "factory,retry_no", + list(itertools.product( + [MemmappingPool, TestExecutor.get_memmapping_executor], range(3))), + ids=['{}, {}'.format(x, y) for x, y in itertools.product( + ["multiprocessing", "loky"], map(str, range(3)))]) +def test_pool_memmap_with_big_offset(factory, retry_no, tmpdir): + # Test that numpy memmap offset is set correctly if greater than + # mmap.ALLOCATIONGRANULARITY, see + # https://github.com/joblib/joblib/issues/451 and + # https://github.com/numpy/numpy/pull/8443 for more details. + fname = tmpdir.join('test.mmap').strpath + size = 5 * mmap.ALLOCATIONGRANULARITY + offset = mmap.ALLOCATIONGRANULARITY + 1 + obj = make_memmap(fname, mode='w+', shape=size, dtype='uint8', + offset=offset) + + p = factory(2, temp_folder=tmpdir.strpath) + result = p.apply_async(identity, args=(obj,)).get() + assert isinstance(result, np.memmap) + assert result.offset == offset + np.testing.assert_array_equal(obj, result) + p.terminate() + + +def test_pool_get_temp_dir(tmpdir): + pool_folder_name = 'test.tmpdir' + pool_folder, shared_mem = _get_temp_dir(pool_folder_name, tmpdir.strpath) + assert shared_mem is False + assert pool_folder == tmpdir.join('test.tmpdir').strpath + + pool_folder, shared_mem = _get_temp_dir(pool_folder_name, temp_folder=None) + if sys.platform.startswith('win'): + assert shared_mem is False + assert pool_folder.endswith(pool_folder_name) + + +@with_numpy +@skipif(sys.platform == 'win32', reason='This test fails with a ' + 'PermissionError on Windows') +@parametrize("mmap_mode", ["r+", "w+"]) +def test_numpy_arrays_use_different_memory(mmap_mode): + def func(arr, value): + arr[:] = value + return arr + + arrays = [np.zeros((10, 10), dtype='float64') for i in range(10)] + + results = Parallel(mmap_mode=mmap_mode, max_nbytes=0, n_jobs=2)( + delayed(func)(arr, i) for i, arr in enumerate(arrays)) + + for i, arr in enumerate(results): + np.testing.assert_array_equal(arr, i) + + +@with_numpy +def test_weak_array_key_map(): + + def assert_empty_after_gc_collect(container, retries=100): + for i in range(retries): + if len(container) == 0: + return + gc.collect() + sleep(.1) + assert len(container) == 0 + + a = np.ones(42) + m = _WeakArrayKeyMap() + m.set(a, 'a') + assert m.get(a) == 'a' + + b = a + assert m.get(b) == 'a' + m.set(b, 'b') + assert m.get(a) == 'b' + + del a + gc.collect() + assert len(m._data) == 1 + assert m.get(b) == 'b' + + del b + assert_empty_after_gc_collect(m._data) + + c = np.ones(42) + m.set(c, 'c') + assert len(m._data) == 1 + assert m.get(c) == 'c' + + with raises(KeyError): + m.get(np.ones(42)) + + del c + assert_empty_after_gc_collect(m._data) + + # Check that creating and dropping numpy arrays with potentially the same + # object id will not cause the map to get confused. + def get_set_get_collect(m, i): + a = np.ones(42) + with raises(KeyError): + m.get(a) + m.set(a, i) + assert m.get(a) == i + return id(a) + + unique_ids = set([get_set_get_collect(m, i) for i in range(1000)]) + if platform.python_implementation() == 'CPython': + # On CPython (at least) the same id is often reused many times for the + # temporary arrays created under the local scope of the + # get_set_get_collect function without causing any spurious lookups / + # insertions in the map. + assert len(unique_ids) < 100 + + +def test_weak_array_key_map_no_pickling(): + m = _WeakArrayKeyMap() + with raises(pickle.PicklingError): + pickle.dumps(m) + + +@with_numpy +@with_multiprocessing +def test_direct_mmap(tmpdir): + testfile = str(tmpdir.join('arr.dat')) + a = np.arange(10, dtype='uint8') + a.tofile(testfile) + + def _read_array(): + with open(testfile) as fd: + mm = mmap.mmap(fd.fileno(), 0, access=mmap.ACCESS_READ, offset=0) + return np.ndarray((10,), dtype=np.uint8, buffer=mm, offset=0) + + def func(x): + return x**2 + + arr = _read_array() + + # this is expected to work and gives the reference + ref = Parallel(n_jobs=2)(delayed(func)(x) for x in [a]) + + # now test that it work with the mmap array + results = Parallel(n_jobs=2)(delayed(func)(x) for x in [arr]) + np.testing.assert_array_equal(results, ref) + + # also test with a mmap array read in the subprocess + def worker(): + return _read_array() + + results = Parallel(n_jobs=2)(delayed(worker)() for _ in range(1)) + np.testing.assert_array_equal(results[0], arr) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_memory.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_memory.py new file mode 100644 index 0000000..ad0ddf4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_memory.py @@ -0,0 +1,1300 @@ +""" +Test the memory module. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2009 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import gc +import shutil +import os +import os.path +import pathlib +import pickle +import sys +import time +import datetime +import textwrap + +import pytest + +from joblib.memory import Memory +from joblib.memory import MemorizedFunc, NotMemorizedFunc +from joblib.memory import MemorizedResult, NotMemorizedResult +from joblib.memory import _FUNCTION_HASHES +from joblib.memory import register_store_backend, _STORE_BACKENDS +from joblib.memory import _build_func_identifier, _store_backend_factory +from joblib.memory import JobLibCollisionWarning +from joblib.parallel import Parallel, delayed +from joblib._store_backends import StoreBackendBase, FileSystemStoreBackend +from joblib.test.common import with_numpy, np +from joblib.test.common import with_multiprocessing +from joblib.testing import parametrize, raises, warns +from joblib.hashing import hash + + + +############################################################################### +# Module-level variables for the tests +def f(x, y=1): + """ A module-level function for testing purposes. + """ + return x ** 2 + y + + +############################################################################### +# Helper function for the tests +def check_identity_lazy(func, accumulator, location): + """ Given a function and an accumulator (a list that grows every + time the function is called), check that the function can be + decorated by memory to be a lazy identity. + """ + # Call each function with several arguments, and check that it is + # evaluated only once per argument. + memory = Memory(location=location, verbose=0) + func = memory.cache(func) + for i in range(3): + for _ in range(2): + assert func(i) == i + assert len(accumulator) == i + 1 + + +def corrupt_single_cache_item(memory): + single_cache_item, = memory.store_backend.get_items() + output_filename = os.path.join(single_cache_item.path, 'output.pkl') + with open(output_filename, 'w') as f: + f.write('garbage') + + +def monkeypatch_cached_func_warn(func, monkeypatch_fixture): + # Need monkeypatch because pytest does not + # capture stdlib logging output (see + # https://github.com/pytest-dev/pytest/issues/2079) + + recorded = [] + + def append_to_record(item): + recorded.append(item) + monkeypatch_fixture.setattr(func, 'warn', append_to_record) + return recorded + + +############################################################################### +# Tests +def test_memory_integration(tmpdir): + """ Simple test of memory lazy evaluation. + """ + accumulator = list() + # Rmk: this function has the same name than a module-level function, + # thus it serves as a test to see that both are identified + # as different. + + def f(l): + accumulator.append(1) + return l + + check_identity_lazy(f, accumulator, tmpdir.strpath) + + # Now test clearing + for compress in (False, True): + for mmap_mode in ('r', None): + memory = Memory(location=tmpdir.strpath, verbose=10, + mmap_mode=mmap_mode, compress=compress) + # First clear the cache directory, to check that our code can + # handle that + # NOTE: this line would raise an exception, as the database file is + # still open; we ignore the error since we want to test what + # happens if the directory disappears + shutil.rmtree(tmpdir.strpath, ignore_errors=True) + g = memory.cache(f) + g(1) + g.clear(warn=False) + current_accumulator = len(accumulator) + out = g(1) + + assert len(accumulator) == current_accumulator + 1 + # Also, check that Memory.eval works similarly + assert memory.eval(f, 1) == out + assert len(accumulator) == current_accumulator + 1 + + # Now do a smoke test with a function defined in __main__, as the name + # mangling rules are more complex + f.__module__ = '__main__' + memory = Memory(location=tmpdir.strpath, verbose=0) + memory.cache(f)(1) + + +@parametrize("call_before_reducing", [True, False]) +def test_parallel_call_cached_function_defined_in_jupyter( + tmpdir, call_before_reducing +): + # Calling an interactively defined memory.cache()'d function inside a + # Parallel call used to clear the existing cache related to the said + # function (https://github.com/joblib/joblib/issues/1035) + + # This tests checks that this is no longer the case. + + # TODO: test that the cache related to the function cache persists across + # ipython sessions (provided that no code change were made to the + # function's source)? + + # The first part of the test makes the necessary low-level calls to emulate + # the definition of a function in an jupyter notebook cell. Joblib has + # some custom code to treat functions defined specifically in jupyter + # notebooks/ipython session -- we want to test this code, which requires + # the emulation to be rigorous. + for session_no in [0, 1]: + ipython_cell_source = ''' + def f(x): + return x + ''' + + ipython_cell_id = ''.format(session_no) + + exec( + compile( + textwrap.dedent(ipython_cell_source), + filename=ipython_cell_id, + mode='exec' + ) + ) + # f is now accessible in the locals mapping - but for some unknown + # reason, f = locals()['f'] throws a KeyError at runtime, we need to + # bind locals()['f'] to a different name in the local namespace + aliased_f = locals()['f'] + aliased_f.__module__ = "__main__" + + # Preliminary sanity checks, and tests checking that joblib properly + # identified f as an interactive function defined in a jupyter notebook + assert aliased_f(1) == 1 + assert aliased_f.__code__.co_filename == ipython_cell_id + + memory = Memory(location=tmpdir.strpath, verbose=0) + cached_f = memory.cache(aliased_f) + + assert len(os.listdir(tmpdir / 'joblib')) == 1 + f_cache_relative_directory = os.listdir(tmpdir / 'joblib')[0] + assert 'ipython-input' in f_cache_relative_directory + + f_cache_directory = tmpdir / 'joblib' / f_cache_relative_directory + + if session_no == 0: + # The cache should be empty as cached_f has not been called yet. + assert os.listdir(f_cache_directory) == ['f'] + assert os.listdir(f_cache_directory / 'f') == [] + + if call_before_reducing: + cached_f(3) + # Two files were just created, func_code.py, and a folder + # containing the informations (inputs hash/ouptput) of + # cached_f(3) + assert len(os.listdir(f_cache_directory / 'f')) == 2 + + # Now, testing #1035: when calling a cached function, joblib + # used to dynamically inspect the underlying function to + # extract its source code (to verify it matches the source code + # of the function as last inspected by joblib) -- however, + # source code introspection fails for dynamic functions sent to + # child processes - which would eventually make joblib clear + # the cache associated to f + res = Parallel(n_jobs=2)(delayed(cached_f)(i) for i in [1, 2]) + else: + # Submit the function to the joblib child processes, although + # the function has never been called in the parent yet. This + # triggers a specific code branch inside + # MemorizedFunc.__reduce__. + res = Parallel(n_jobs=2)(delayed(cached_f)(i) for i in [1, 2]) + assert len(os.listdir(f_cache_directory / 'f')) == 3 + + cached_f(3) + + # Making sure f's cache does not get cleared after the parallel + # calls, and contains ALL cached functions calls (f(1), f(2), f(3)) + # and 'func_code.py' + assert len(os.listdir(f_cache_directory / 'f')) == 4 + else: + # For the second session, there should be an already existing cache + assert len(os.listdir(f_cache_directory / 'f')) == 4 + + cached_f(3) + + # The previous cache should not be invalidated after calling the + # function in a new session + assert len(os.listdir(f_cache_directory / 'f')) == 4 + + +def test_no_memory(): + """ Test memory with location=None: no memoize """ + accumulator = list() + + def ff(l): + accumulator.append(1) + return l + + memory = Memory(location=None, verbose=0) + gg = memory.cache(ff) + for _ in range(4): + current_accumulator = len(accumulator) + gg(1) + assert len(accumulator) == current_accumulator + 1 + + +def test_memory_kwarg(tmpdir): + " Test memory with a function with keyword arguments." + accumulator = list() + + def g(l=None, m=1): + accumulator.append(1) + return l + + check_identity_lazy(g, accumulator, tmpdir.strpath) + + memory = Memory(location=tmpdir.strpath, verbose=0) + g = memory.cache(g) + # Smoke test with an explicit keyword argument: + assert g(l=30, m=2) == 30 + + +def test_memory_lambda(tmpdir): + " Test memory with a function with a lambda." + accumulator = list() + + def helper(x): + """ A helper function to define l as a lambda. + """ + accumulator.append(1) + return x + + l = lambda x: helper(x) + + check_identity_lazy(l, accumulator, tmpdir.strpath) + + +def test_memory_name_collision(tmpdir): + " Check that name collisions with functions will raise warnings" + memory = Memory(location=tmpdir.strpath, verbose=0) + + @memory.cache + def name_collision(x): + """ A first function called name_collision + """ + return x + + a = name_collision + + @memory.cache + def name_collision(x): + """ A second function called name_collision + """ + return x + + b = name_collision + + with warns(JobLibCollisionWarning) as warninfo: + a(1) + b(1) + + assert len(warninfo) == 1 + assert "collision" in str(warninfo[0].message) + + +def test_memory_warning_lambda_collisions(tmpdir): + # Check that multiple use of lambda will raise collisions + memory = Memory(location=tmpdir.strpath, verbose=0) + a = lambda x: x + a = memory.cache(a) + b = lambda x: x + 1 + b = memory.cache(b) + + with warns(JobLibCollisionWarning) as warninfo: + assert a(0) == 0 + assert b(1) == 2 + assert a(1) == 1 + + # In recent Python versions, we can retrieve the code of lambdas, + # thus nothing is raised + assert len(warninfo) == 4 + + +def test_memory_warning_collision_detection(tmpdir): + # Check that collisions impossible to detect will raise appropriate + # warnings. + memory = Memory(location=tmpdir.strpath, verbose=0) + a1 = eval('lambda x: x') + a1 = memory.cache(a1) + b1 = eval('lambda x: x+1') + b1 = memory.cache(b1) + + with warns(JobLibCollisionWarning) as warninfo: + a1(1) + b1(1) + a1(0) + + assert len(warninfo) == 2 + assert "cannot detect" in str(warninfo[0].message).lower() + + +def test_memory_partial(tmpdir): + " Test memory with functools.partial." + accumulator = list() + + def func(x, y): + """ A helper function to define l as a lambda. + """ + accumulator.append(1) + return y + + import functools + function = functools.partial(func, 1) + + check_identity_lazy(function, accumulator, tmpdir.strpath) + + +def test_memory_eval(tmpdir): + " Smoke test memory with a function with a function defined in an eval." + memory = Memory(location=tmpdir.strpath, verbose=0) + + m = eval('lambda x: x') + mm = memory.cache(m) + + assert mm(1) == 1 + + +def count_and_append(x=[]): + """ A function with a side effect in its arguments. + + Return the lenght of its argument and append one element. + """ + len_x = len(x) + x.append(None) + return len_x + + +def test_argument_change(tmpdir): + """ Check that if a function has a side effect in its arguments, it + should use the hash of changing arguments. + """ + memory = Memory(location=tmpdir.strpath, verbose=0) + func = memory.cache(count_and_append) + # call the function for the first time, is should cache it with + # argument x=[] + assert func() == 0 + # the second time the argument is x=[None], which is not cached + # yet, so the functions should be called a second time + assert func() == 1 + + +@with_numpy +@parametrize('mmap_mode', [None, 'r']) +def test_memory_numpy(tmpdir, mmap_mode): + " Test memory with a function with numpy arrays." + accumulator = list() + + def n(l=None): + accumulator.append(1) + return l + + memory = Memory(location=tmpdir.strpath, mmap_mode=mmap_mode, + verbose=0) + cached_n = memory.cache(n) + + rnd = np.random.RandomState(0) + for i in range(3): + a = rnd.random_sample((10, 10)) + for _ in range(3): + assert np.all(cached_n(a) == a) + assert len(accumulator) == i + 1 + + +@with_numpy +def test_memory_numpy_check_mmap_mode(tmpdir, monkeypatch): + """Check that mmap_mode is respected even at the first call""" + + memory = Memory(location=tmpdir.strpath, mmap_mode='r', verbose=0) + + @memory.cache() + def twice(a): + return a * 2 + + a = np.ones(3) + + b = twice(a) + c = twice(a) + + assert isinstance(c, np.memmap) + assert c.mode == 'r' + + assert isinstance(b, np.memmap) + assert b.mode == 'r' + + # Corrupts the file, Deleting b and c mmaps + # is necessary to be able edit the file + del b + del c + gc.collect() + corrupt_single_cache_item(memory) + + # Make sure that corrupting the file causes recomputation and that + # a warning is issued. + recorded_warnings = monkeypatch_cached_func_warn(twice, monkeypatch) + d = twice(a) + assert len(recorded_warnings) == 1 + exception_msg = 'Exception while loading results' + assert exception_msg in recorded_warnings[0] + # Asserts that the recomputation returns a mmap + assert isinstance(d, np.memmap) + assert d.mode == 'r' + + +def test_memory_exception(tmpdir): + """ Smoketest the exception handling of Memory. + """ + memory = Memory(location=tmpdir.strpath, verbose=0) + + class MyException(Exception): + pass + + @memory.cache + def h(exc=0): + if exc: + raise MyException + + # Call once, to initialise the cache + h() + + for _ in range(3): + # Call 3 times, to be sure that the Exception is always raised + with raises(MyException): + h(1) + + +def test_memory_ignore(tmpdir): + " Test the ignore feature of memory " + memory = Memory(location=tmpdir.strpath, verbose=0) + accumulator = list() + + @memory.cache(ignore=['y']) + def z(x, y=1): + accumulator.append(1) + + assert z.ignore == ['y'] + + z(0, y=1) + assert len(accumulator) == 1 + z(0, y=1) + assert len(accumulator) == 1 + z(0, y=2) + assert len(accumulator) == 1 + + +def test_memory_args_as_kwargs(tmpdir): + """Non-regression test against 0.12.0 changes. + + https://github.com/joblib/joblib/pull/751 + """ + memory = Memory(location=tmpdir.strpath, verbose=0) + + @memory.cache + def plus_one(a): + return a + 1 + + # It's possible to call a positional arg as a kwarg. + assert plus_one(1) == 2 + assert plus_one(a=1) == 2 + + # However, a positional argument that joblib hadn't seen + # before would cause a failure if it was passed as a kwarg. + assert plus_one(a=2) == 3 + + +@parametrize('ignore, verbose, mmap_mode', [(['x'], 100, 'r'), + ([], 10, None)]) +def test_partial_decoration(tmpdir, ignore, verbose, mmap_mode): + "Check cache may be called with kwargs before decorating" + memory = Memory(location=tmpdir.strpath, verbose=0) + + @memory.cache(ignore=ignore, verbose=verbose, mmap_mode=mmap_mode) + def z(x): + pass + + assert z.ignore == ignore + assert z._verbose == verbose + assert z.mmap_mode == mmap_mode + + +def test_func_dir(tmpdir): + # Test the creation of the memory cache directory for the function. + memory = Memory(location=tmpdir.strpath, verbose=0) + path = __name__.split('.') + path.append('f') + path = tmpdir.join('joblib', *path).strpath + + g = memory.cache(f) + # Test that the function directory is created on demand + func_id = _build_func_identifier(f) + location = os.path.join(g.store_backend.location, func_id) + assert location == path + assert os.path.exists(path) + assert memory.location == os.path.dirname(g.store_backend.location) + with warns(DeprecationWarning) as w: + assert memory.cachedir == g.store_backend.location + assert len(w) == 1 + assert "The 'cachedir' attribute has been deprecated" in str(w[-1].message) + + # Test that the code is stored. + # For the following test to be robust to previous execution, we clear + # the in-memory store + _FUNCTION_HASHES.clear() + assert not g._check_previous_func_code() + assert os.path.exists(os.path.join(path, 'func_code.py')) + assert g._check_previous_func_code() + + # Test the robustness to failure of loading previous results. + func_id, args_id = g._get_output_identifiers(1) + output_dir = os.path.join(g.store_backend.location, func_id, args_id) + a = g(1) + assert os.path.exists(output_dir) + os.remove(os.path.join(output_dir, 'output.pkl')) + assert a == g(1) + + +def test_persistence(tmpdir): + # Test the memorized functions can be pickled and restored. + memory = Memory(location=tmpdir.strpath, verbose=0) + g = memory.cache(f) + output = g(1) + + h = pickle.loads(pickle.dumps(g)) + + func_id, args_id = h._get_output_identifiers(1) + output_dir = os.path.join(h.store_backend.location, func_id, args_id) + assert os.path.exists(output_dir) + assert output == h.store_backend.load_item([func_id, args_id]) + memory2 = pickle.loads(pickle.dumps(memory)) + assert memory.store_backend.location == memory2.store_backend.location + + # Smoke test that pickling a memory with location=None works + memory = Memory(location=None, verbose=0) + pickle.loads(pickle.dumps(memory)) + g = memory.cache(f) + gp = pickle.loads(pickle.dumps(g)) + gp(1) + + +def test_call_and_shelve(tmpdir): + # Test MemorizedFunc outputting a reference to cache. + + for func, Result in zip((MemorizedFunc(f, tmpdir.strpath), + NotMemorizedFunc(f), + Memory(location=tmpdir.strpath, + verbose=0).cache(f), + Memory(location=None).cache(f), + ), + (MemorizedResult, NotMemorizedResult, + MemorizedResult, NotMemorizedResult)): + assert func(2) == 5 + result = func.call_and_shelve(2) + assert isinstance(result, Result) + assert result.get() == 5 + + result.clear() + with raises(KeyError): + result.get() + result.clear() # Do nothing if there is no cache. + + +def test_call_and_shelve_argument_hash(tmpdir): + # Verify that a warning is raised when accessing arguments_hash + # attribute from MemorizedResult + func = Memory(location=tmpdir.strpath, verbose=0).cache(f) + result = func.call_and_shelve(2) + assert isinstance(result, MemorizedResult) + with warns(DeprecationWarning) as w: + assert result.argument_hash == result.args_id + assert len(w) == 1 + assert "The 'argument_hash' attribute has been deprecated" \ + in str(w[-1].message) + + +def test_call_and_shelve_lazily_load_stored_result(tmpdir): + """Check call_and_shelve only load stored data if needed.""" + test_access_time_file = tmpdir.join('test_access') + test_access_time_file.write('test_access') + test_access_time = os.stat(test_access_time_file.strpath).st_atime + # check file system access time stats resolution is lower than test wait + # timings. + time.sleep(0.5) + assert test_access_time_file.read() == 'test_access' + + if test_access_time == os.stat(test_access_time_file.strpath).st_atime: + # Skip this test when access time cannot be retrieved with enough + # precision from the file system (e.g. NTFS on windows). + pytest.skip("filesystem does not support fine-grained access time " + "attribute") + + memory = Memory(location=tmpdir.strpath, verbose=0) + func = memory.cache(f) + func_id, argument_hash = func._get_output_identifiers(2) + result_path = os.path.join(memory.store_backend.location, + func_id, argument_hash, 'output.pkl') + assert func(2) == 5 + first_access_time = os.stat(result_path).st_atime + time.sleep(1) + + # Should not access the stored data + result = func.call_and_shelve(2) + assert isinstance(result, MemorizedResult) + assert os.stat(result_path).st_atime == first_access_time + time.sleep(1) + + # Read the stored data => last access time is greater than first_access + assert result.get() == 5 + assert os.stat(result_path).st_atime > first_access_time + + +def test_memorized_pickling(tmpdir): + for func in (MemorizedFunc(f, tmpdir.strpath), NotMemorizedFunc(f)): + filename = tmpdir.join('pickling_test.dat').strpath + result = func.call_and_shelve(2) + with open(filename, 'wb') as fp: + pickle.dump(result, fp) + with open(filename, 'rb') as fp: + result2 = pickle.load(fp) + assert result2.get() == result.get() + os.remove(filename) + + +def test_memorized_repr(tmpdir): + func = MemorizedFunc(f, tmpdir.strpath) + result = func.call_and_shelve(2) + + func2 = MemorizedFunc(f, tmpdir.strpath) + result2 = func2.call_and_shelve(2) + assert result.get() == result2.get() + assert repr(func) == repr(func2) + + # Smoke test with NotMemorizedFunc + func = NotMemorizedFunc(f) + repr(func) + repr(func.call_and_shelve(2)) + + # Smoke test for message output (increase code coverage) + func = MemorizedFunc(f, tmpdir.strpath, verbose=11, timestamp=time.time()) + result = func.call_and_shelve(11) + result.get() + + func = MemorizedFunc(f, tmpdir.strpath, verbose=11) + result = func.call_and_shelve(11) + result.get() + + func = MemorizedFunc(f, tmpdir.strpath, verbose=5, timestamp=time.time()) + result = func.call_and_shelve(11) + result.get() + + func = MemorizedFunc(f, tmpdir.strpath, verbose=5) + result = func.call_and_shelve(11) + result.get() + + +def test_memory_file_modification(capsys, tmpdir, monkeypatch): + # Test that modifying a Python file after loading it does not lead to + # Recomputation + dir_name = tmpdir.mkdir('tmp_import').strpath + filename = os.path.join(dir_name, 'tmp_joblib_.py') + content = 'def f(x):\n print(x)\n return x\n' + with open(filename, 'w') as module_file: + module_file.write(content) + + # Load the module: + monkeypatch.syspath_prepend(dir_name) + import tmp_joblib_ as tmp + + memory = Memory(location=tmpdir.strpath, verbose=0) + f = memory.cache(tmp.f) + # First call f a few times + f(1) + f(2) + f(1) + + # Now modify the module where f is stored without modifying f + with open(filename, 'w') as module_file: + module_file.write('\n\n' + content) + + # And call f a couple more times + f(1) + f(1) + + # Flush the .pyc files + shutil.rmtree(dir_name) + os.mkdir(dir_name) + # Now modify the module where f is stored, modifying f + content = 'def f(x):\n print("x=%s" % x)\n return x\n' + with open(filename, 'w') as module_file: + module_file.write(content) + + # And call f more times prior to reloading: the cache should not be + # invalidated at this point as the active function definition has not + # changed in memory yet. + f(1) + f(1) + + # Now reload + sys.stdout.write('Reloading\n') + sys.modules.pop('tmp_joblib_') + import tmp_joblib_ as tmp + f = memory.cache(tmp.f) + + # And call f more times + f(1) + f(1) + + out, err = capsys.readouterr() + assert out == '1\n2\nReloading\nx=1\n' + + +def _function_to_cache(a, b): + # Just a place holder function to be mutated by tests + pass + + +def _sum(a, b): + return a + b + + +def _product(a, b): + return a * b + + +def test_memory_in_memory_function_code_change(tmpdir): + _function_to_cache.__code__ = _sum.__code__ + + memory = Memory(location=tmpdir.strpath, verbose=0) + f = memory.cache(_function_to_cache) + + assert f(1, 2) == 3 + assert f(1, 2) == 3 + + with warns(JobLibCollisionWarning): + # Check that inline function modification triggers a cache invalidation + _function_to_cache.__code__ = _product.__code__ + assert f(1, 2) == 2 + assert f(1, 2) == 2 + + +def test_clear_memory_with_none_location(): + memory = Memory(location=None) + memory.clear() + + +def func_with_kwonly_args(a, b, *, kw1='kw1', kw2='kw2'): + return a, b, kw1, kw2 + + +def func_with_signature(a: int, b: float) -> float: + return a + b + + +def test_memory_func_with_kwonly_args(tmpdir): + memory = Memory(location=tmpdir.strpath, verbose=0) + func_cached = memory.cache(func_with_kwonly_args) + + assert func_cached(1, 2, kw1=3) == (1, 2, 3, 'kw2') + + # Making sure that providing a keyword-only argument by + # position raises an exception + with raises(ValueError) as excinfo: + func_cached(1, 2, 3, kw2=4) + excinfo.match("Keyword-only parameter 'kw1' was passed as positional " + "parameter") + + # Keyword-only parameter passed by position with cached call + # should still raise ValueError + func_cached(1, 2, kw1=3, kw2=4) + + with raises(ValueError) as excinfo: + func_cached(1, 2, 3, kw2=4) + excinfo.match("Keyword-only parameter 'kw1' was passed as positional " + "parameter") + + # Test 'ignore' parameter + func_cached = memory.cache(func_with_kwonly_args, ignore=['kw2']) + assert func_cached(1, 2, kw1=3, kw2=4) == (1, 2, 3, 4) + assert func_cached(1, 2, kw1=3, kw2='ignored') == (1, 2, 3, 4) + + +def test_memory_func_with_signature(tmpdir): + memory = Memory(location=tmpdir.strpath, verbose=0) + func_cached = memory.cache(func_with_signature) + + assert func_cached(1, 2.) == 3. + + +def _setup_toy_cache(tmpdir, num_inputs=10): + memory = Memory(location=tmpdir.strpath, verbose=0) + + @memory.cache() + def get_1000_bytes(arg): + return 'a' * 1000 + + inputs = list(range(num_inputs)) + for arg in inputs: + get_1000_bytes(arg) + + func_id = _build_func_identifier(get_1000_bytes) + hash_dirnames = [get_1000_bytes._get_output_identifiers(arg)[1] + for arg in inputs] + + full_hashdirs = [os.path.join(get_1000_bytes.store_backend.location, + func_id, dirname) + for dirname in hash_dirnames] + return memory, full_hashdirs, get_1000_bytes + + +def test__get_items(tmpdir): + memory, expected_hash_dirs, _ = _setup_toy_cache(tmpdir) + items = memory.store_backend.get_items() + hash_dirs = [ci.path for ci in items] + assert set(hash_dirs) == set(expected_hash_dirs) + + def get_files_size(directory): + full_paths = [os.path.join(directory, fn) + for fn in os.listdir(directory)] + return sum(os.path.getsize(fp) for fp in full_paths) + + expected_hash_cache_sizes = [get_files_size(hash_dir) + for hash_dir in hash_dirs] + hash_cache_sizes = [ci.size for ci in items] + assert hash_cache_sizes == expected_hash_cache_sizes + + output_filenames = [os.path.join(hash_dir, 'output.pkl') + for hash_dir in hash_dirs] + + expected_last_accesses = [ + datetime.datetime.fromtimestamp(os.path.getatime(fn)) + for fn in output_filenames] + last_accesses = [ci.last_access for ci in items] + assert last_accesses == expected_last_accesses + + +def test__get_items_to_delete(tmpdir): + memory, expected_hash_cachedirs, _ = _setup_toy_cache(tmpdir) + items = memory.store_backend.get_items() + # bytes_limit set to keep only one cache item (each hash cache + # folder is about 1000 bytes + metadata) + items_to_delete = memory.store_backend._get_items_to_delete('2K') + nb_hashes = len(expected_hash_cachedirs) + assert set.issubset(set(items_to_delete), set(items)) + assert len(items_to_delete) == nb_hashes - 1 + + # Sanity check bytes_limit=2048 is the same as bytes_limit='2K' + items_to_delete_2048b = memory.store_backend._get_items_to_delete(2048) + assert sorted(items_to_delete) == sorted(items_to_delete_2048b) + + # bytes_limit greater than the size of the cache + items_to_delete_empty = memory.store_backend._get_items_to_delete('1M') + assert items_to_delete_empty == [] + + # All the cache items need to be deleted + bytes_limit_too_small = 500 + items_to_delete_500b = memory.store_backend._get_items_to_delete( + bytes_limit_too_small) + assert set(items_to_delete_500b), set(items) + + # Test LRU property: surviving cache items should all have a more + # recent last_access that the ones that have been deleted + items_to_delete_6000b = memory.store_backend._get_items_to_delete(6000) + surviving_items = set(items).difference(items_to_delete_6000b) + + assert (max(ci.last_access for ci in items_to_delete_6000b) <= + min(ci.last_access for ci in surviving_items)) + + +def test_memory_reduce_size(tmpdir): + memory, _, _ = _setup_toy_cache(tmpdir) + ref_cache_items = memory.store_backend.get_items() + + # By default memory.bytes_limit is None and reduce_size is a noop + memory.reduce_size() + cache_items = memory.store_backend.get_items() + assert sorted(ref_cache_items) == sorted(cache_items) + + # No cache items deleted if bytes_limit greater than the size of + # the cache + memory.bytes_limit = '1M' + memory.reduce_size() + cache_items = memory.store_backend.get_items() + assert sorted(ref_cache_items) == sorted(cache_items) + + # bytes_limit is set so that only two cache items are kept + memory.bytes_limit = '3K' + memory.reduce_size() + cache_items = memory.store_backend.get_items() + assert set.issubset(set(cache_items), set(ref_cache_items)) + assert len(cache_items) == 2 + + # bytes_limit set so that no cache item is kept + bytes_limit_too_small = 500 + memory.bytes_limit = bytes_limit_too_small + memory.reduce_size() + cache_items = memory.store_backend.get_items() + assert cache_items == [] + + +def test_memory_clear(tmpdir): + memory, _, _ = _setup_toy_cache(tmpdir) + memory.clear() + + assert os.listdir(memory.store_backend.location) == [] + + +def fast_func_with_complex_output(): + complex_obj = ['a' * 1000] * 1000 + return complex_obj + + +def fast_func_with_conditional_complex_output(complex_output=True): + complex_obj = {str(i): i for i in range(int(1e5))} + return complex_obj if complex_output else 'simple output' + + +@with_multiprocessing +def test_cached_function_race_condition_when_persisting_output(tmpdir, capfd): + # Test race condition where multiple processes are writing into + # the same output.pkl. See + # https://github.com/joblib/joblib/issues/490 for more details. + memory = Memory(location=tmpdir.strpath) + func_cached = memory.cache(fast_func_with_complex_output) + + Parallel(n_jobs=2)(delayed(func_cached)() for i in range(3)) + + stdout, stderr = capfd.readouterr() + + # Checking both stdout and stderr (ongoing PR #434 may change + # logging destination) to make sure there is no exception while + # loading the results + exception_msg = 'Exception while loading results' + assert exception_msg not in stdout + assert exception_msg not in stderr + + +@with_multiprocessing +def test_cached_function_race_condition_when_persisting_output_2(tmpdir, + capfd): + # Test race condition in first attempt at solving + # https://github.com/joblib/joblib/issues/490. The race condition + # was due to the delay between seeing the cache directory created + # (interpreted as the result being cached) and the output.pkl being + # pickled. + memory = Memory(location=tmpdir.strpath) + func_cached = memory.cache(fast_func_with_conditional_complex_output) + + Parallel(n_jobs=2)(delayed(func_cached)(True if i % 2 == 0 else False) + for i in range(3)) + + stdout, stderr = capfd.readouterr() + + # Checking both stdout and stderr (ongoing PR #434 may change + # logging destination) to make sure there is no exception while + # loading the results + exception_msg = 'Exception while loading results' + assert exception_msg not in stdout + assert exception_msg not in stderr + + +def test_memory_recomputes_after_an_error_while_loading_results( + tmpdir, monkeypatch): + memory = Memory(location=tmpdir.strpath) + + def func(arg): + # This makes sure that the timestamp returned by two calls of + # func are different. This is needed on Windows where + # time.time resolution may not be accurate enough + time.sleep(0.01) + return arg, time.time() + + cached_func = memory.cache(func) + input_arg = 'arg' + arg, timestamp = cached_func(input_arg) + + # Make sure the function is correctly cached + assert arg == input_arg + + # Corrupting output.pkl to make sure that an error happens when + # loading the cached result + corrupt_single_cache_item(memory) + + # Make sure that corrupting the file causes recomputation and that + # a warning is issued. + recorded_warnings = monkeypatch_cached_func_warn(cached_func, monkeypatch) + recomputed_arg, recomputed_timestamp = cached_func(arg) + assert len(recorded_warnings) == 1 + exception_msg = 'Exception while loading results' + assert exception_msg in recorded_warnings[0] + assert recomputed_arg == arg + assert recomputed_timestamp > timestamp + + # Corrupting output.pkl to make sure that an error happens when + # loading the cached result + corrupt_single_cache_item(memory) + reference = cached_func.call_and_shelve(arg) + try: + reference.get() + raise AssertionError( + "It normally not possible to load a corrupted" + " MemorizedResult" + ) + except KeyError as e: + message = "is corrupted" + assert message in str(e.args) + + +def test_deprecated_cachedir_behaviour(tmpdir): + # verify the right deprecation warnings are raised when using cachedir + # option instead of new location parameter. + with warns(None) as w: + memory = Memory(cachedir=tmpdir.strpath, verbose=0) + assert memory.store_backend.location.startswith(tmpdir.strpath) + + assert len(w) == 1 + assert "The 'cachedir' parameter has been deprecated" in str(w[-1].message) + + with warns(None) as w: + memory = Memory() + assert memory.cachedir is None + + assert len(w) == 1 + assert "The 'cachedir' attribute has been deprecated" in str(w[-1].message) + + error_regex = """You set both "location='.+ and "cachedir='.+""" + with raises(ValueError, match=error_regex): + memory = Memory(location=tmpdir.strpath, cachedir=tmpdir.strpath, + verbose=0) + + +class IncompleteStoreBackend(StoreBackendBase): + """This backend cannot be instanciated and should raise a TypeError.""" + pass + + +class DummyStoreBackend(StoreBackendBase): + """A dummy store backend that does nothing.""" + + def _open_item(self, *args, **kwargs): + """Open an item on store.""" + "Does nothing" + + def _item_exists(self, location): + """Check if an item location exists.""" + "Does nothing" + + def _move_item(self, src, dst): + """Move an item from src to dst in store.""" + "Does nothing" + + def create_location(self, location): + """Create location on store.""" + "Does nothing" + + def exists(self, obj): + """Check if an object exists in the store""" + return False + + def clear_location(self, obj): + """Clear object on store""" + "Does nothing" + + def get_items(self): + """Returns the whole list of items available in cache.""" + return [] + + def configure(self, location, *args, **kwargs): + """Configure the store""" + "Does nothing" + + +@parametrize("invalid_prefix", [None, dict(), list()]) +def test_register_invalid_store_backends_key(invalid_prefix): + # verify the right exceptions are raised when passing a wrong backend key. + with raises(ValueError) as excinfo: + register_store_backend(invalid_prefix, None) + excinfo.match(r'Store backend name should be a string*') + + +def test_register_invalid_store_backends_object(): + # verify the right exceptions are raised when passing a wrong backend + # object. + with raises(ValueError) as excinfo: + register_store_backend("fs", None) + excinfo.match(r'Store backend should inherit StoreBackendBase*') + + +def test_memory_default_store_backend(): + # test an unknow backend falls back into a FileSystemStoreBackend + with raises(TypeError) as excinfo: + Memory(location='/tmp/joblib', backend='unknown') + excinfo.match(r"Unknown location*") + + +def test_warning_on_unknown_location_type(): + class NonSupportedLocationClass: + pass + unsupported_location = NonSupportedLocationClass() + + with warns(UserWarning) as warninfo: + _store_backend_factory("local", location=unsupported_location) + + expected_mesage = ("Instanciating a backend using a " + "NonSupportedLocationClass as a location is not " + "supported by joblib") + assert expected_mesage in str(warninfo[0].message) + + +def test_instanciate_incomplete_store_backend(): + # Verify that registering an external incomplete store backend raises an + # exception when one tries to instanciate it. + backend_name = "isb" + register_store_backend(backend_name, IncompleteStoreBackend) + assert (backend_name, IncompleteStoreBackend) in _STORE_BACKENDS.items() + with raises(TypeError) as excinfo: + _store_backend_factory(backend_name, "fake_location") + excinfo.match(r"Can't instantiate abstract class " + "IncompleteStoreBackend with abstract methods*") + + +def test_dummy_store_backend(): + # Verify that registering an external store backend works. + + backend_name = "dsb" + register_store_backend(backend_name, DummyStoreBackend) + assert (backend_name, DummyStoreBackend) in _STORE_BACKENDS.items() + + backend_obj = _store_backend_factory(backend_name, "dummy_location") + assert isinstance(backend_obj, DummyStoreBackend) + + +def test_instanciate_store_backend_with_pathlib_path(): + # Instanciate a FileSystemStoreBackend using a pathlib.Path object + path = pathlib.Path("some_folder") + backend_obj = _store_backend_factory("local", path) + assert backend_obj.location == "some_folder" + + +def test_filesystem_store_backend_repr(tmpdir): + # Verify string representation of a filesystem store backend. + + repr_pattern = 'FileSystemStoreBackend(location="{location}")' + backend = FileSystemStoreBackend() + assert backend.location is None + + repr(backend) # Should not raise an exception + + assert str(backend) == repr_pattern.format(location=None) + + # backend location is passed explicitely via the configure method (called + # by the internal _store_backend_factory function) + backend.configure(tmpdir.strpath) + + assert str(backend) == repr_pattern.format(location=tmpdir.strpath) + + repr(backend) # Should not raise an exception + + +def test_memory_objects_repr(tmpdir): + # Verify printable reprs of MemorizedResult, MemorizedFunc and Memory. + + def my_func(a, b): + return a + b + + memory = Memory(location=tmpdir.strpath, verbose=0) + memorized_func = memory.cache(my_func) + + memorized_func_repr = 'MemorizedFunc(func={func}, location={location})' + + assert str(memorized_func) == memorized_func_repr.format( + func=my_func, + location=memory.store_backend.location) + + memorized_result = memorized_func.call_and_shelve(42, 42) + + memorized_result_repr = ('MemorizedResult(location="{location}", ' + 'func="{func}", args_id="{args_id}")') + + assert str(memorized_result) == memorized_result_repr.format( + location=memory.store_backend.location, + func=memorized_result.func_id, + args_id=memorized_result.args_id) + + assert str(memory) == 'Memory(location={location})'.format( + location=memory.store_backend.location) + + +def test_memorized_result_pickle(tmpdir): + # Verify a MemoryResult object can be pickled/depickled. Non regression + # test introduced following issue + # https://github.com/joblib/joblib/issues/747 + + memory = Memory(location=tmpdir.strpath) + + @memory.cache + def g(x): + return x**2 + + memorized_result = g.call_and_shelve(4) + memorized_result_pickle = pickle.dumps(memorized_result) + memorized_result_loads = pickle.loads(memorized_result_pickle) + + assert memorized_result.store_backend.location == \ + memorized_result_loads.store_backend.location + assert memorized_result.func == memorized_result_loads.func + assert memorized_result.args_id == memorized_result_loads.args_id + assert str(memorized_result) == str(memorized_result_loads) + + +def compare(left, right, ignored_attrs=None): + if ignored_attrs is None: + ignored_attrs = [] + + left_vars = vars(left) + right_vars = vars(right) + assert set(left_vars.keys()) == set(right_vars.keys()) + for attr in left_vars.keys(): + if attr in ignored_attrs: + continue + assert left_vars[attr] == right_vars[attr] + + +@pytest.mark.parametrize('memory_kwargs', + [{'compress': 3, 'verbose': 2}, + {'mmap_mode': 'r', 'verbose': 5, 'bytes_limit': 1e6, + 'backend_options': {'parameter': 'unused'}}]) +def test_memory_pickle_dump_load(tmpdir, memory_kwargs): + memory = Memory(location=tmpdir.strpath, **memory_kwargs) + + memory_reloaded = pickle.loads(pickle.dumps(memory)) + + # Compare Memory instance before and after pickle roundtrip + compare(memory.store_backend, memory_reloaded.store_backend) + compare(memory, memory_reloaded, + ignored_attrs=set(['store_backend', 'timestamp', '_func_code_id'])) + assert hash(memory) == hash(memory_reloaded) + + func_cached = memory.cache(f) + + func_cached_reloaded = pickle.loads(pickle.dumps(func_cached)) + + # Compare MemorizedFunc instance before/after pickle roundtrip + compare(func_cached.store_backend, func_cached_reloaded.store_backend) + compare(func_cached, func_cached_reloaded, + ignored_attrs=set(['store_backend', 'timestamp', '_func_code_id'])) + assert hash(func_cached) == hash(func_cached_reloaded) + + # Compare MemorizedResult instance before/after pickle roundtrip + memorized_result = func_cached.call_and_shelve(1) + memorized_result_reloaded = pickle.loads(pickle.dumps(memorized_result)) + + compare(memorized_result.store_backend, + memorized_result_reloaded.store_backend) + compare(memorized_result, memorized_result_reloaded, + ignored_attrs=set(['store_backend', 'timestamp', '_func_code_id'])) + assert hash(memorized_result) == hash(memorized_result_reloaded) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_module.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_module.py new file mode 100644 index 0000000..9c3b12b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_module.py @@ -0,0 +1,50 @@ +import sys +import joblib +import pytest +from joblib.testing import check_subprocess_call + + +def test_version(): + assert hasattr(joblib, '__version__'), ( + "There are no __version__ argument on the joblib module") + + +def test_no_start_method_side_effect_on_import(): + # check that importing joblib does not implicitly set the global + # start_method for multiprocessing. + code = """if True: + import joblib + import multiprocessing as mp + # The following line would raise RuntimeError if the + # start_method is already set. + mp.set_start_method("loky") + """ + check_subprocess_call([sys.executable, '-c', code]) + + +def test_no_semaphore_tracker_on_import(): + # check that importing joblib does not implicitly spawn a resource tracker + # or a semaphore tracker + code = """if True: + import joblib + from multiprocessing import semaphore_tracker + # The following line would raise RuntimeError if the + # start_method is already set. + msg = "multiprocessing.semaphore_tracker has been spawned on import" + assert semaphore_tracker._semaphore_tracker._fd is None, msg""" + if sys.version_info >= (3, 8): + # semaphore_tracker was renamed in Python 3.8: + code = code.replace("semaphore_tracker", "resource_tracker") + check_subprocess_call([sys.executable, '-c', code]) + + +def test_no_resource_tracker_on_import(): + code = """if True: + import joblib + from joblib.externals.loky.backend import resource_tracker + # The following line would raise RuntimeError if the + # start_method is already set. + msg = "loky.resource_tracker has been spawned on import" + assert resource_tracker._resource_tracker._fd is None, msg + """ + check_subprocess_call([sys.executable, '-c', code]) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_my_exceptions.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_my_exceptions.py new file mode 100644 index 0000000..272bc17 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_my_exceptions.py @@ -0,0 +1,60 @@ +""" +Test my automatically generate exceptions +""" +from joblib.my_exceptions import ( + JoblibException, JoblibNameError, _mk_exception) + + +class CustomException(Exception): + def __init__(self, a, b, c, d): + self.a, self.b, self.c, self.d = a, b, c, d + + +class CustomException2(Exception): + """A custom exception with a .args attribute + + Just to check that the JoblibException created from it + has it args set correctly + """ + def __init__(self, a, *args): + self.a = a + self.args = args + + +def test_inheritance(): + assert isinstance(JoblibNameError(), NameError) + assert isinstance(JoblibNameError(), JoblibException) + assert (JoblibNameError is _mk_exception(NameError)[0]) + + +def test_inheritance_special_cases(): + # _mk_exception should transform Exception to JoblibException + assert (_mk_exception(Exception)[0] is JoblibException) + + # Subclasses of JoblibException should be mapped to + # them-selves by _mk_exception + assert (_mk_exception(JoblibException)[0] is JoblibException) + + # Non-inheritable exception classes should be mapped to + # JoblibException by _mk_exception. That can happen with classes + # generated with SWIG. See + # https://github.com/joblib/joblib/issues/269 for a concrete + # example. + non_inheritable_classes = [type(lambda: None), bool] + for exception in non_inheritable_classes: + assert (_mk_exception(exception)[0] is JoblibException) + + +def test__mk_exception(): + # Check that _mk_exception works on a bunch of different exceptions + for klass in (Exception, TypeError, SyntaxError, ValueError, + ImportError, CustomException, CustomException2): + message = 'This message should be in the exception repr' + exc = _mk_exception(klass)[0]( + message, 'some', 'other', 'args', 'that are not', 'in the repr') + exc_repr = repr(exc) + + assert isinstance(exc, klass) + assert isinstance(exc, JoblibException) + assert exc.__class__.__name__ in exc_repr + assert message in exc_repr diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle.py new file mode 100644 index 0000000..db130b1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle.py @@ -0,0 +1,1012 @@ +"""Test the numpy pickler as a replacement of the standard pickler.""" + +import copy +import os +import random +import re +import io +import warnings +import gzip +import zlib +import bz2 +import pickle +import socket +from contextlib import closing +import mmap +try: + import lzma +except ImportError: + lzma = None +import pytest + +from joblib.test.common import np, with_numpy, with_lz4, without_lz4 +from joblib.test.common import with_memory_profiler, memory_used +from joblib.testing import parametrize, raises, SkipTest, warns + +# numpy_pickle is not a drop-in replacement of pickle, as it takes +# filenames instead of open files as arguments. +from joblib import numpy_pickle, register_compressor +from joblib.test import data + +from joblib.numpy_pickle_utils import _IO_BUFFER_SIZE +from joblib.numpy_pickle_utils import _detect_compressor +from joblib.compressor import (_COMPRESSORS, _LZ4_PREFIX, CompressorWrapper, + LZ4_NOT_INSTALLED_ERROR, BinaryZlibFile) + + +############################################################################### +# Define a list of standard types. +# Borrowed from dill, initial author: Micheal McKerns: +# http://dev.danse.us/trac/pathos/browser/dill/dill_test2.py + +typelist = [] + +# testing types +_none = None +typelist.append(_none) +_type = type +typelist.append(_type) +_bool = bool(1) +typelist.append(_bool) +_int = int(1) +typelist.append(_int) +_float = float(1) +typelist.append(_float) +_complex = complex(1) +typelist.append(_complex) +_string = str(1) +typelist.append(_string) +_tuple = () +typelist.append(_tuple) +_list = [] +typelist.append(_list) +_dict = {} +typelist.append(_dict) +_builtin = len +typelist.append(_builtin) + + +def _function(x): + yield x + + +class _class: + def _method(self): + pass + + +class _newclass(object): + def _method(self): + pass + + +typelist.append(_function) +typelist.append(_class) +typelist.append(_newclass) # +_instance = _class() +typelist.append(_instance) +_object = _newclass() +typelist.append(_object) # + + +############################################################################### +# Tests + +@parametrize('compress', [0, 1]) +@parametrize('member', typelist) +def test_standard_types(tmpdir, compress, member): + # Test pickling and saving with standard types. + filename = tmpdir.join('test.pkl').strpath + numpy_pickle.dump(member, filename, compress=compress) + _member = numpy_pickle.load(filename) + # We compare the pickled instance to the reloaded one only if it + # can be compared to a copied one + if member == copy.deepcopy(member): + assert member == _member + + +def test_value_error(): + # Test inverting the input arguments to dump + with raises(ValueError): + numpy_pickle.dump('foo', dict()) + + +@parametrize('wrong_compress', [-1, 10, dict()]) +def test_compress_level_error(wrong_compress): + # Verify that passing an invalid compress argument raises an error. + exception_msg = ('Non valid compress level given: ' + '"{0}"'.format(wrong_compress)) + with raises(ValueError) as excinfo: + numpy_pickle.dump('dummy', 'foo', compress=wrong_compress) + excinfo.match(exception_msg) + + +@with_numpy +@parametrize('compress', [False, True, 0, 3, 'zlib']) +def test_numpy_persistence(tmpdir, compress): + filename = tmpdir.join('test.pkl').strpath + rnd = np.random.RandomState(0) + a = rnd.random_sample((10, 2)) + # We use 'a.T' to have a non C-contiguous array. + for index, obj in enumerate(((a,), (a.T,), (a, a), [a, a, a])): + filenames = numpy_pickle.dump(obj, filename, compress=compress) + + # All is cached in one file + assert len(filenames) == 1 + # Check that only one file was created + assert filenames[0] == filename + # Check that this file does exist + assert os.path.exists(filenames[0]) + + # Unpickle the object + obj_ = numpy_pickle.load(filename) + # Check that the items are indeed arrays + for item in obj_: + assert isinstance(item, np.ndarray) + # And finally, check that all the values are equal. + np.testing.assert_array_equal(np.array(obj), np.array(obj_)) + + # Now test with array subclasses + for obj in (np.matrix(np.zeros(10)), + np.memmap(filename + 'mmap', + mode='w+', shape=4, dtype=np.float)): + filenames = numpy_pickle.dump(obj, filename, compress=compress) + # All is cached in one file + assert len(filenames) == 1 + + obj_ = numpy_pickle.load(filename) + if (type(obj) is not np.memmap and + hasattr(obj, '__array_prepare__')): + # We don't reconstruct memmaps + assert isinstance(obj_, type(obj)) + + np.testing.assert_array_equal(obj_, obj) + + # Test with an object containing multiple numpy arrays + obj = ComplexTestObject() + filenames = numpy_pickle.dump(obj, filename, compress=compress) + # All is cached in one file + assert len(filenames) == 1 + + obj_loaded = numpy_pickle.load(filename) + assert isinstance(obj_loaded, type(obj)) + np.testing.assert_array_equal(obj_loaded.array_float, obj.array_float) + np.testing.assert_array_equal(obj_loaded.array_int, obj.array_int) + np.testing.assert_array_equal(obj_loaded.array_obj, obj.array_obj) + + +@with_numpy +def test_numpy_persistence_bufferred_array_compression(tmpdir): + big_array = np.ones((_IO_BUFFER_SIZE + 100), dtype=np.uint8) + filename = tmpdir.join('test.pkl').strpath + numpy_pickle.dump(big_array, filename, compress=True) + arr_reloaded = numpy_pickle.load(filename) + + np.testing.assert_array_equal(big_array, arr_reloaded) + + +@with_numpy +def test_memmap_persistence(tmpdir): + rnd = np.random.RandomState(0) + a = rnd.random_sample(10) + filename = tmpdir.join('test1.pkl').strpath + numpy_pickle.dump(a, filename) + b = numpy_pickle.load(filename, mmap_mode='r') + + assert isinstance(b, np.memmap) + + # Test with an object containing multiple numpy arrays + filename = tmpdir.join('test2.pkl').strpath + obj = ComplexTestObject() + numpy_pickle.dump(obj, filename) + obj_loaded = numpy_pickle.load(filename, mmap_mode='r') + assert isinstance(obj_loaded, type(obj)) + assert isinstance(obj_loaded.array_float, np.memmap) + assert not obj_loaded.array_float.flags.writeable + assert isinstance(obj_loaded.array_int, np.memmap) + assert not obj_loaded.array_int.flags.writeable + # Memory map not allowed for numpy object arrays + assert not isinstance(obj_loaded.array_obj, np.memmap) + np.testing.assert_array_equal(obj_loaded.array_float, + obj.array_float) + np.testing.assert_array_equal(obj_loaded.array_int, + obj.array_int) + np.testing.assert_array_equal(obj_loaded.array_obj, + obj.array_obj) + + # Test we can write in memmapped arrays + obj_loaded = numpy_pickle.load(filename, mmap_mode='r+') + assert obj_loaded.array_float.flags.writeable + obj_loaded.array_float[0:10] = 10.0 + assert obj_loaded.array_int.flags.writeable + obj_loaded.array_int[0:10] = 10 + + obj_reloaded = numpy_pickle.load(filename, mmap_mode='r') + np.testing.assert_array_equal(obj_reloaded.array_float, + obj_loaded.array_float) + np.testing.assert_array_equal(obj_reloaded.array_int, + obj_loaded.array_int) + + # Test w+ mode is caught and the mode has switched to r+ + numpy_pickle.load(filename, mmap_mode='w+') + assert obj_loaded.array_int.flags.writeable + assert obj_loaded.array_int.mode == 'r+' + assert obj_loaded.array_float.flags.writeable + assert obj_loaded.array_float.mode == 'r+' + + +@with_numpy +def test_memmap_persistence_mixed_dtypes(tmpdir): + # loading datastructures that have sub-arrays with dtype=object + # should not prevent memmapping on fixed size dtype sub-arrays. + rnd = np.random.RandomState(0) + a = rnd.random_sample(10) + b = np.array([1, 'b'], dtype=object) + construct = (a, b) + filename = tmpdir.join('test.pkl').strpath + numpy_pickle.dump(construct, filename) + a_clone, b_clone = numpy_pickle.load(filename, mmap_mode='r') + + # the floating point array has been memory mapped + assert isinstance(a_clone, np.memmap) + + # the object-dtype array has been loaded in memory + assert not isinstance(b_clone, np.memmap) + + +@with_numpy +def test_masked_array_persistence(tmpdir): + # The special-case picker fails, because saving masked_array + # not implemented, but it just delegates to the standard pickler. + rnd = np.random.RandomState(0) + a = rnd.random_sample(10) + a = np.ma.masked_greater(a, 0.5) + filename = tmpdir.join('test.pkl').strpath + numpy_pickle.dump(a, filename) + b = numpy_pickle.load(filename, mmap_mode='r') + assert isinstance(b, np.ma.masked_array) + + +@with_numpy +def test_compress_mmap_mode_warning(tmpdir): + # Test the warning in case of compress + mmap_mode + rnd = np.random.RandomState(0) + a = rnd.random_sample(10) + this_filename = tmpdir.join('test.pkl').strpath + numpy_pickle.dump(a, this_filename, compress=1) + with warns(UserWarning) as warninfo: + numpy_pickle.load(this_filename, mmap_mode='r+') + assert len(warninfo) == 1 + assert (str(warninfo[0].message) == + 'mmap_mode "%(mmap_mode)s" is not compatible with compressed ' + 'file %(filename)s. "%(mmap_mode)s" flag will be ignored.' % + {'filename': this_filename, 'mmap_mode': 'r+'}) + + +@with_numpy +@parametrize('cache_size', [None, 0, 10]) +def test_cache_size_warning(tmpdir, cache_size): + # Check deprecation warning raised when cache size is not None + filename = tmpdir.join('test.pkl').strpath + rnd = np.random.RandomState(0) + a = rnd.random_sample((10, 2)) + + warnings.simplefilter("always") + with warns(None) as warninfo: + numpy_pickle.dump(a, filename, cache_size=cache_size) + expected_nb_warnings = 1 if cache_size is not None else 0 + assert len(warninfo) == expected_nb_warnings + for w in warninfo: + assert w.category == DeprecationWarning + assert (str(w.message) == + "Please do not set 'cache_size' in joblib.dump, this " + "parameter has no effect and will be removed. You " + "used 'cache_size={0}'".format(cache_size)) + + +@with_numpy +@with_memory_profiler +@parametrize('compress', [True, False]) +def test_memory_usage(tmpdir, compress): + # Verify memory stays within expected bounds. + filename = tmpdir.join('test.pkl').strpath + small_array = np.ones((10, 10)) + big_array = np.ones(shape=100 * int(1e6), dtype=np.uint8) + small_matrix = np.matrix(small_array) + big_matrix = np.matrix(big_array) + + for obj in (small_array, big_array, small_matrix, big_matrix): + size = obj.nbytes / 1e6 + obj_filename = filename + str(np.random.randint(0, 1000)) + mem_used = memory_used(numpy_pickle.dump, + obj, obj_filename, compress=compress) + + # The memory used to dump the object shouldn't exceed the buffer + # size used to write array chunks (16MB). + write_buf_size = _IO_BUFFER_SIZE + 16 * 1024 ** 2 / 1e6 + assert mem_used <= write_buf_size + + mem_used = memory_used(numpy_pickle.load, obj_filename) + # memory used should be less than array size + buffer size used to + # read the array chunk by chunk. + read_buf_size = 32 + _IO_BUFFER_SIZE # MiB + assert mem_used < size + read_buf_size + + +@with_numpy +def test_compressed_pickle_dump_and_load(tmpdir): + expected_list = [np.arange(5, dtype=np.dtype('i8')), + np.arange(5, dtype=np.dtype('f8')), + np.array([1, 'abc', {'a': 1, 'b': 2}], dtype='O'), + np.arange(256, dtype=np.uint8).tobytes(), + # np.matrix is a subclass of np.ndarray, here we want + # to verify this type of object is correctly unpickled + # among versions. + np.matrix([0, 1, 2], dtype=np.dtype('i8')), + u"C'est l'\xe9t\xe9 !"] + + fname = tmpdir.join('temp.pkl.gz').strpath + + dumped_filenames = numpy_pickle.dump(expected_list, fname, compress=1) + assert len(dumped_filenames) == 1 + result_list = numpy_pickle.load(fname) + for result, expected in zip(result_list, expected_list): + if isinstance(expected, np.ndarray): + assert result.dtype == expected.dtype + np.testing.assert_equal(result, expected) + else: + assert result == expected + + +def _check_pickle(filename, expected_list): + """Helper function to test joblib pickle content. + + Note: currently only pickles containing an iterable are supported + by this function. + """ + version_match = re.match(r'.+py(\d)(\d).+', filename) + py_version_used_for_writing = int(version_match.group(1)) + + py_version_to_default_pickle_protocol = {2: 2, 3: 3} + pickle_reading_protocol = py_version_to_default_pickle_protocol.get(3, 4) + pickle_writing_protocol = py_version_to_default_pickle_protocol.get( + py_version_used_for_writing, 4) + if pickle_reading_protocol >= pickle_writing_protocol: + try: + with warns(None) as warninfo: + warnings.simplefilter('always') + warnings.filterwarnings( + 'ignore', module='numpy', + message='The compiler package is deprecated') + result_list = numpy_pickle.load(filename) + filename_base = os.path.basename(filename) + expected_nb_warnings = 1 if ("_0.9" in filename_base or + "_0.8.4" in filename_base) else 0 + assert len(warninfo) == expected_nb_warnings + for w in warninfo: + assert w.category == DeprecationWarning + assert (str(w.message) == + "The file '{0}' has been generated with a joblib " + "version less than 0.10. Please regenerate this " + "pickle file.".format(filename)) + for result, expected in zip(result_list, expected_list): + if isinstance(expected, np.ndarray): + assert result.dtype == expected.dtype + np.testing.assert_equal(result, expected) + else: + assert result == expected + except Exception as exc: + # When trying to read with python 3 a pickle generated + # with python 2 we expect a user-friendly error + if py_version_used_for_writing == 2: + assert isinstance(exc, ValueError) + message = ('You may be trying to read with ' + 'python 3 a joblib pickle generated with python 2.') + assert message in str(exc) + elif filename.endswith('.lz4') and with_lz4.args[0]: + assert isinstance(exc, ValueError) + assert LZ4_NOT_INSTALLED_ERROR in str(exc) + else: + raise + else: + # Pickle protocol used for writing is too high. We expect a + # "unsupported pickle protocol" error message + try: + numpy_pickle.load(filename) + raise AssertionError('Numpy pickle loading should ' + 'have raised a ValueError exception') + except ValueError as e: + message = 'unsupported pickle protocol: {0}'.format( + pickle_writing_protocol) + assert message in str(e.args) + + +@with_numpy +def test_joblib_pickle_across_python_versions(): + # We need to be specific about dtypes in particular endianness + # because the pickles can be generated on one architecture and + # the tests run on another one. See + # https://github.com/joblib/joblib/issues/279. + expected_list = [np.arange(5, dtype=np.dtype(' size + np.testing.assert_array_equal(obj, memmaps) + + +def test_register_compressor(tmpdir): + # Check that registering compressor file works. + compressor_name = 'test-name' + compressor_prefix = 'test-prefix' + + class BinaryCompressorTestFile(io.BufferedIOBase): + pass + + class BinaryCompressorTestWrapper(CompressorWrapper): + + def __init__(self): + CompressorWrapper.__init__(self, obj=BinaryCompressorTestFile, + prefix=compressor_prefix) + + register_compressor(compressor_name, BinaryCompressorTestWrapper()) + + assert (_COMPRESSORS[compressor_name].fileobj_factory == + BinaryCompressorTestFile) + assert _COMPRESSORS[compressor_name].prefix == compressor_prefix + + # Remove this dummy compressor file from extra compressors because other + # tests might fail because of this + _COMPRESSORS.pop(compressor_name) + + +@parametrize('invalid_name', [1, (), {}]) +def test_register_compressor_invalid_name(invalid_name): + # Test that registering an invalid compressor name is not allowed. + with raises(ValueError) as excinfo: + register_compressor(invalid_name, None) + excinfo.match("Compressor name should be a string") + + +def test_register_compressor_invalid_fileobj(): + # Test that registering an invalid file object is not allowed. + + class InvalidFileObject(): + pass + + class InvalidFileObjectWrapper(CompressorWrapper): + def __init__(self): + CompressorWrapper.__init__(self, obj=InvalidFileObject, + prefix=b'prefix') + + with raises(ValueError) as excinfo: + register_compressor('invalid', InvalidFileObjectWrapper()) + + excinfo.match("Compressor 'fileobj_factory' attribute should implement " + "the file object interface") + + +class AnotherZlibCompressorWrapper(CompressorWrapper): + + def __init__(self): + CompressorWrapper.__init__(self, obj=BinaryZlibFile, prefix=b'prefix') + + +class StandardLibGzipCompressorWrapper(CompressorWrapper): + + def __init__(self): + CompressorWrapper.__init__(self, obj=gzip.GzipFile, prefix=b'prefix') + + +def test_register_compressor_already_registered(): + # Test registration of existing compressor files. + compressor_name = 'test-name' + + # register a test compressor + register_compressor(compressor_name, AnotherZlibCompressorWrapper()) + + with raises(ValueError) as excinfo: + register_compressor(compressor_name, + StandardLibGzipCompressorWrapper()) + excinfo.match("Compressor '{}' already registered." + .format(compressor_name)) + + register_compressor(compressor_name, StandardLibGzipCompressorWrapper(), + force=True) + + assert compressor_name in _COMPRESSORS + assert _COMPRESSORS[compressor_name].fileobj_factory == gzip.GzipFile + + # Remove this dummy compressor file from extra compressors because other + # tests might fail because of this + _COMPRESSORS.pop(compressor_name) + + +@with_lz4 +def test_lz4_compression(tmpdir): + # Check that lz4 can be used when dependency is available. + import lz4.frame + compressor = 'lz4' + assert compressor in _COMPRESSORS + assert _COMPRESSORS[compressor].fileobj_factory == lz4.frame.LZ4FrameFile + + fname = tmpdir.join('test.pkl').strpath + data = 'test data' + numpy_pickle.dump(data, fname, compress=compressor) + + with open(fname, 'rb') as f: + assert f.read(len(_LZ4_PREFIX)) == _LZ4_PREFIX + assert numpy_pickle.load(fname) == data + + # Test that LZ4 is applied based on file extension + numpy_pickle.dump(data, fname + '.lz4') + with open(fname, 'rb') as f: + assert f.read(len(_LZ4_PREFIX)) == _LZ4_PREFIX + assert numpy_pickle.load(fname) == data + + +@without_lz4 +def test_lz4_compression_without_lz4(tmpdir): + # Check that lz4 cannot be used when dependency is not available. + fname = tmpdir.join('test.nolz4').strpath + data = 'test data' + msg = LZ4_NOT_INSTALLED_ERROR + with raises(ValueError) as excinfo: + numpy_pickle.dump(data, fname, compress='lz4') + excinfo.match(msg) + + with raises(ValueError) as excinfo: + numpy_pickle.dump(data, fname + '.lz4') + excinfo.match(msg) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle_compat.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle_compat.py new file mode 100644 index 0000000..5e83192 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle_compat.py @@ -0,0 +1,18 @@ +"""Test the old numpy pickler, compatibility version.""" + +import random + +# numpy_pickle is not a drop-in replacement of pickle, as it takes +# filenames instead of open files as arguments. +from joblib import numpy_pickle_compat + + +def test_z_file(tmpdir): + # Test saving and loading data with Zfiles. + filename = tmpdir.join('test.pkl').strpath + data = numpy_pickle_compat.asbytes('Foo, \n Bar, baz, \n\nfoobar') + with open(filename, 'wb') as f: + numpy_pickle_compat.write_zfile(f, data) + with open(filename, 'rb') as f: + data_read = numpy_pickle_compat.read_zfile(f) + assert data == data_read diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle_utils.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle_utils.py new file mode 100644 index 0000000..39c2cad --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_numpy_pickle_utils.py @@ -0,0 +1,10 @@ +from joblib import numpy_pickle_utils +from joblib.compressor import BinaryZlibFile +from joblib.testing import parametrize + + +@parametrize('filename', ['test', u'test']) # testing str and unicode names +def test_binary_zlib_file(tmpdir, filename): + """Testing creation of files depending on the type of the filenames.""" + binary_file = BinaryZlibFile(tmpdir.join(filename).strpath, mode='wb') + binary_file.close() diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_parallel.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_parallel.py new file mode 100644 index 0000000..2458ed9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_parallel.py @@ -0,0 +1,1690 @@ +""" +Test the parallel module. +""" + +# Author: Gael Varoquaux +# Copyright (c) 2010-2011 Gael Varoquaux +# License: BSD Style, 3 clauses. + +import os +import sys +import time +import mmap +import threading +from traceback import format_exception +from math import sqrt +from time import sleep +from pickle import PicklingError +from multiprocessing import TimeoutError +import pickle +import pytest + +from importlib import reload + +import joblib +from joblib import parallel +from joblib import dump, load +from joblib.externals.loky import get_reusable_executor + +from joblib.test.common import np, with_numpy +from joblib.test.common import with_multiprocessing +from joblib.testing import (parametrize, raises, check_subprocess_call, + skipif, SkipTest, warns) + +from joblib.externals.loky.process_executor import TerminatedWorkerError + +from queue import Queue + +try: + import posix +except ImportError: + posix = None + +try: + from ._openmp_test_helper.parallel_sum import parallel_sum +except ImportError: + parallel_sum = None + +try: + import distributed +except ImportError: + distributed = None + +from joblib._parallel_backends import SequentialBackend +from joblib._parallel_backends import ThreadingBackend +from joblib._parallel_backends import MultiprocessingBackend +from joblib._parallel_backends import ParallelBackendBase +from joblib._parallel_backends import LokyBackend +from joblib._parallel_backends import SafeFunction + +from joblib.parallel import Parallel, delayed +from joblib.parallel import register_parallel_backend, parallel_backend +from joblib.parallel import effective_n_jobs, cpu_count + +from joblib.parallel import mp, BACKENDS, DEFAULT_BACKEND, EXTERNAL_BACKENDS +from joblib.my_exceptions import JoblibException +from joblib.my_exceptions import WorkerInterrupt + + +ALL_VALID_BACKENDS = [None] + sorted(BACKENDS.keys()) +# Add instances of backend classes deriving from ParallelBackendBase +ALL_VALID_BACKENDS += [BACKENDS[backend_str]() for backend_str in BACKENDS] +PROCESS_BACKENDS = ['multiprocessing', 'loky'] +PARALLEL_BACKENDS = PROCESS_BACKENDS + ['threading'] + +if hasattr(mp, 'get_context'): + # Custom multiprocessing context in Python 3.4+ + ALL_VALID_BACKENDS.append(mp.get_context('spawn')) + +DefaultBackend = BACKENDS[DEFAULT_BACKEND] + + +def get_workers(backend): + return getattr(backend, '_pool', getattr(backend, '_workers', None)) + + +def division(x, y): + return x / y + + +def square(x): + return x ** 2 + + +class MyExceptionWithFinickyInit(Exception): + """An exception class with non trivial __init__ + """ + def __init__(self, a, b, c, d): + pass + + +def exception_raiser(x, custom_exception=False): + if x == 7: + raise (MyExceptionWithFinickyInit('a', 'b', 'c', 'd') + if custom_exception else ValueError) + return x + + +def interrupt_raiser(x): + time.sleep(.05) + raise KeyboardInterrupt + + +def f(x, y=0, z=0): + """ A module-level function so that it can be spawn with + multiprocessing. + """ + return x ** 2 + y + z + + +def _active_backend_type(): + return type(parallel.get_active_backend()[0]) + + +def parallel_func(inner_n_jobs, backend): + return Parallel(n_jobs=inner_n_jobs, backend=backend)( + delayed(square)(i) for i in range(3)) + + +############################################################################### +def test_cpu_count(): + assert cpu_count() > 0 + + +def test_effective_n_jobs(): + assert effective_n_jobs() > 0 + + +@pytest.mark.parametrize( + "backend_n_jobs, expected_n_jobs", + [(3, 3), (-1, effective_n_jobs(n_jobs=-1)), (None, 1)], + ids=["positive-int", "negative-int", "None"] +) +@with_multiprocessing +def test_effective_n_jobs_None(backend_n_jobs, expected_n_jobs): + # check the number of effective jobs when `n_jobs=None` + # non-regression test for https://github.com/joblib/joblib/issues/984 + with parallel_backend("threading", n_jobs=backend_n_jobs): + # when using a backend, the default of number jobs will be the one set + # in the backend + assert effective_n_jobs(n_jobs=None) == expected_n_jobs + # without any backend, None will default to a single job + assert effective_n_jobs(n_jobs=None) == 1 + + +############################################################################### +# Test parallel + +@parametrize('backend', ALL_VALID_BACKENDS) +@parametrize('n_jobs', [1, 2, -1, -2]) +@parametrize('verbose', [2, 11, 100]) +def test_simple_parallel(backend, n_jobs, verbose): + assert ([square(x) for x in range(5)] == + Parallel(n_jobs=n_jobs, backend=backend, + verbose=verbose)( + delayed(square)(x) for x in range(5))) + + +@parametrize('backend', ALL_VALID_BACKENDS) +def test_main_thread_renamed_no_warning(backend, monkeypatch): + # Check that no default backend relies on the name of the main thread: + # https://github.com/joblib/joblib/issues/180#issuecomment-253266247 + # Some programs use a different name for the main thread. This is the case + # for uWSGI apps for instance. + monkeypatch.setattr(target=threading.current_thread(), name='name', + value='some_new_name_for_the_main_thread') + + with warns(None) as warninfo: + results = Parallel(n_jobs=2, backend=backend)( + delayed(square)(x) for x in range(3)) + assert results == [0, 1, 4] + + # Due to the default parameters of LokyBackend, there is a chance that + # warninfo catches Warnings from worker timeouts. We remove it if it exists + warninfo = [w for w in warninfo if "worker timeout" not in str(w.message)] + + # The multiprocessing backend will raise a warning when detecting that is + # started from the non-main thread. Let's check that there is no false + # positive because of the name change. + assert len(warninfo) == 0 + + +def _assert_warning_nested(backend, inner_n_jobs, expected): + with warns(None) as records: + parallel_func(backend=backend, inner_n_jobs=inner_n_jobs) + + if expected: + # with threading, we might see more that one records + if len(records) > 0: + return 'backed parallel loops cannot' in records[0].message.args[0] + return False + else: + assert len(records) == 0 + return True + + +@with_multiprocessing +@parametrize('parent_backend,child_backend,expected', [ + ('loky', 'multiprocessing', True), ('loky', 'loky', False), + ('multiprocessing', 'multiprocessing', True), + ('multiprocessing', 'loky', True), + ('threading', 'multiprocessing', True), + ('threading', 'loky', True), +]) +def test_nested_parallel_warnings(parent_backend, child_backend, expected): + + # no warnings if inner_n_jobs=1 + Parallel(n_jobs=2, backend=parent_backend)( + delayed(_assert_warning_nested)( + backend=child_backend, inner_n_jobs=1, + expected=False) + for _ in range(5)) + + # warnings if inner_n_jobs != 1 and expected + res = Parallel(n_jobs=2, backend=parent_backend)( + delayed(_assert_warning_nested)( + backend=child_backend, inner_n_jobs=2, + expected=expected) + for _ in range(5)) + + # warning handling is not thread safe. One thread might see multiple + # warning or no warning at all. + if parent_backend == "threading": + assert any(res) + else: + assert all(res) + + +@with_multiprocessing +@parametrize('backend', ['loky', 'multiprocessing', 'threading']) +def test_background_thread_parallelism(backend): + is_run_parallel = [False] + + def background_thread(is_run_parallel): + with warns(None) as records: + Parallel(n_jobs=2)( + delayed(sleep)(.1) for _ in range(4)) + print(len(records)) + is_run_parallel[0] = len(records) == 0 + + t = threading.Thread(target=background_thread, args=(is_run_parallel,)) + t.start() + t.join() + assert is_run_parallel[0] + + +def nested_loop(backend): + Parallel(n_jobs=2, backend=backend)( + delayed(square)(.01) for _ in range(2)) + + +@parametrize('child_backend', BACKENDS) +@parametrize('parent_backend', BACKENDS) +def test_nested_loop(parent_backend, child_backend): + Parallel(n_jobs=2, backend=parent_backend)( + delayed(nested_loop)(child_backend) for _ in range(2)) + + +def raise_exception(backend): + raise ValueError + + +def test_nested_loop_with_exception_with_loky(): + with raises(ValueError): + with Parallel(n_jobs=2, backend="loky") as parallel: + parallel([delayed(nested_loop)("loky"), + delayed(raise_exception)("loky")]) + + +def test_mutate_input_with_threads(): + """Input is mutable when using the threading backend""" + q = Queue(maxsize=5) + Parallel(n_jobs=2, backend="threading")( + delayed(q.put)(1) for _ in range(5)) + assert q.full() + + +@parametrize('n_jobs', [1, 2, 3]) +def test_parallel_kwargs(n_jobs): + """Check the keyword argument processing of pmap.""" + lst = range(10) + assert ([f(x, y=1) for x in lst] == + Parallel(n_jobs=n_jobs)(delayed(f)(x, y=1) for x in lst)) + + +@parametrize('backend', PARALLEL_BACKENDS) +def test_parallel_as_context_manager(backend): + lst = range(10) + expected = [f(x, y=1) for x in lst] + + with Parallel(n_jobs=4, backend=backend) as p: + # Internally a pool instance has been eagerly created and is managed + # via the context manager protocol + managed_backend = p._backend + + # We make call with the managed parallel object several times inside + # the managed block: + assert expected == p(delayed(f)(x, y=1) for x in lst) + assert expected == p(delayed(f)(x, y=1) for x in lst) + + # Those calls have all used the same pool instance: + if mp is not None: + assert get_workers(managed_backend) is get_workers(p._backend) + + # As soon as we exit the context manager block, the pool is terminated and + # no longer referenced from the parallel object: + if mp is not None: + assert get_workers(p._backend) is None + + # It's still possible to use the parallel instance in non-managed mode: + assert expected == p(delayed(f)(x, y=1) for x in lst) + if mp is not None: + assert get_workers(p._backend) is None + + +@with_multiprocessing +def test_parallel_pickling(): + """ Check that pmap captures the errors when it is passed an object + that cannot be pickled. + """ + class UnpicklableObject(object): + def __reduce__(self): + raise RuntimeError('123') + + with raises(PicklingError, match=r"the task to send"): + Parallel(n_jobs=2)(delayed(id)(UnpicklableObject()) for _ in range(10)) + + +@parametrize('backend', PARALLEL_BACKENDS) +def test_parallel_timeout_success(backend): + # Check that timeout isn't thrown when function is fast enough + assert len(Parallel(n_jobs=2, backend=backend, timeout=10)( + delayed(sleep)(0.001) for x in range(10))) == 10 + + +@with_multiprocessing +@parametrize('backend', PARALLEL_BACKENDS) +def test_parallel_timeout_fail(backend): + # Check that timeout properly fails when function is too slow + with raises(TimeoutError): + Parallel(n_jobs=2, backend=backend, timeout=0.01)( + delayed(sleep)(10) for x in range(10)) + + +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_error_capture(backend): + # Check that error are captured, and that correct exceptions + # are raised. + if mp is not None: + with raises(ZeroDivisionError): + Parallel(n_jobs=2, backend=backend)( + [delayed(division)(x, y) + for x, y in zip((0, 1), (1, 0))]) + with raises(WorkerInterrupt): + Parallel(n_jobs=2, backend=backend)( + [delayed(interrupt_raiser)(x) for x in (1, 0)]) + + # Try again with the context manager API + with Parallel(n_jobs=2, backend=backend) as parallel: + assert get_workers(parallel._backend) is not None + original_workers = get_workers(parallel._backend) + + with raises(ZeroDivisionError): + parallel([delayed(division)(x, y) + for x, y in zip((0, 1), (1, 0))]) + + # The managed pool should still be available and be in a working + # state despite the previously raised (and caught) exception + assert get_workers(parallel._backend) is not None + + # The pool should have been interrupted and restarted: + assert get_workers(parallel._backend) is not original_workers + + assert ([f(x, y=1) for x in range(10)] == + parallel(delayed(f)(x, y=1) for x in range(10))) + + original_workers = get_workers(parallel._backend) + with raises(WorkerInterrupt): + parallel([delayed(interrupt_raiser)(x) for x in (1, 0)]) + + # The pool should still be available despite the exception + assert get_workers(parallel._backend) is not None + + # The pool should have been interrupted and restarted: + assert get_workers(parallel._backend) is not original_workers + + assert ([f(x, y=1) for x in range(10)] == + parallel(delayed(f)(x, y=1) for x in range(10))) + + # Check that the inner pool has been terminated when exiting the + # context manager + assert get_workers(parallel._backend) is None + else: + with raises(KeyboardInterrupt): + Parallel(n_jobs=2)( + [delayed(interrupt_raiser)(x) for x in (1, 0)]) + + # wrapped exceptions should inherit from the class of the original + # exception to make it easy to catch them + with raises(ZeroDivisionError): + Parallel(n_jobs=2)( + [delayed(division)(x, y) for x, y in zip((0, 1), (1, 0))]) + + with raises(MyExceptionWithFinickyInit): + Parallel(n_jobs=2, verbose=0)( + (delayed(exception_raiser)(i, custom_exception=True) + for i in range(30))) + + try: + # JoblibException wrapping is disabled in sequential mode: + Parallel(n_jobs=1)( + delayed(division)(x, y) for x, y in zip((0, 1), (1, 0))) + except Exception as ex: + assert not isinstance(ex, JoblibException) + else: + raise ValueError("The excepted error has not been raised.") + + +def consumer(queue, item): + queue.append('Consumed %s' % item) + + +@parametrize('backend', BACKENDS) +@parametrize('batch_size, expected_queue', + [(1, ['Produced 0', 'Consumed 0', + 'Produced 1', 'Consumed 1', + 'Produced 2', 'Consumed 2', + 'Produced 3', 'Consumed 3', + 'Produced 4', 'Consumed 4', + 'Produced 5', 'Consumed 5']), + (4, [ # First Batch + 'Produced 0', 'Produced 1', 'Produced 2', 'Produced 3', + 'Consumed 0', 'Consumed 1', 'Consumed 2', 'Consumed 3', + # Second batch + 'Produced 4', 'Produced 5', 'Consumed 4', 'Consumed 5'])]) +def test_dispatch_one_job(backend, batch_size, expected_queue): + """ Test that with only one job, Parallel does act as a iterator. + """ + queue = list() + + def producer(): + for i in range(6): + queue.append('Produced %i' % i) + yield i + + Parallel(n_jobs=1, batch_size=batch_size, backend=backend)( + delayed(consumer)(queue, x) for x in producer()) + assert queue == expected_queue + assert len(queue) == 12 + + +@with_multiprocessing +@parametrize('backend', PARALLEL_BACKENDS) +def test_dispatch_multiprocessing(backend): + """ Check that using pre_dispatch Parallel does indeed dispatch items + lazily. + """ + manager = mp.Manager() + queue = manager.list() + + def producer(): + for i in range(6): + queue.append('Produced %i' % i) + yield i + + Parallel(n_jobs=2, batch_size=1, pre_dispatch=3, backend=backend)( + delayed(consumer)(queue, 'any') for _ in producer()) + + queue_contents = list(queue) + assert queue_contents[0] == 'Produced 0' + + # Only 3 tasks are pre-dispatched out of 6. The 4th task is dispatched only + # after any of the first 3 jobs have completed. + first_consumption_index = queue_contents[:4].index('Consumed any') + assert first_consumption_index > -1 + + produced_3_index = queue_contents.index('Produced 3') # 4th task produced + assert produced_3_index > first_consumption_index + + assert len(queue) == 12 + + +def test_batching_auto_threading(): + # batching='auto' with the threading backend leaves the effective batch + # size to 1 (no batching) as it has been found to never be beneficial with + # this low-overhead backend. + + with Parallel(n_jobs=2, batch_size='auto', backend='threading') as p: + p(delayed(id)(i) for i in range(5000)) # many very fast tasks + assert p._backend.compute_batch_size() == 1 + + +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_batching_auto_subprocesses(backend): + with Parallel(n_jobs=2, batch_size='auto', backend=backend) as p: + p(delayed(id)(i) for i in range(5000)) # many very fast tasks + + # It should be strictly larger than 1 but as we don't want heisen + # failures on clogged CI worker environment be safe and only check that + # it's a strictly positive number. + assert p._backend.compute_batch_size() > 0 + + +def test_exception_dispatch(): + """Make sure that exception raised during dispatch are indeed captured""" + with raises(ValueError): + Parallel(n_jobs=2, pre_dispatch=16, verbose=0)( + delayed(exception_raiser)(i) for i in range(30)) + + +def nested_function_inner(i): + Parallel(n_jobs=2)( + delayed(exception_raiser)(j) for j in range(30)) + + +def nested_function_outer(i): + Parallel(n_jobs=2)( + delayed(nested_function_inner)(j) for j in range(30)) + + +@with_multiprocessing +@parametrize('backend', PARALLEL_BACKENDS) +@pytest.mark.xfail(reason="https://github.com/joblib/loky/pull/255") +def test_nested_exception_dispatch(backend): + """Ensure errors for nested joblib cases gets propagated + + We rely on the Python 3 built-in __cause__ system that already + report this kind of information to the user. + """ + with raises(ValueError) as excinfo: + Parallel(n_jobs=2, backend=backend)( + delayed(nested_function_outer)(i) for i in range(30)) + + # Check that important information such as function names are visible + # in the final error message reported to the user + report_lines = format_exception(excinfo.type, excinfo.value, excinfo.tb) + report = "".join(report_lines) + assert 'nested_function_outer' in report + assert 'nested_function_inner' in report + assert 'exception_raiser' in report + + assert type(excinfo.value) is ValueError + + +class FakeParallelBackend(SequentialBackend): + """Pretends to run concurrently while running sequentially.""" + + def configure(self, n_jobs=1, parallel=None, **backend_args): + self.n_jobs = self.effective_n_jobs(n_jobs) + self.parallel = parallel + return n_jobs + + def effective_n_jobs(self, n_jobs=1): + if n_jobs < 0: + n_jobs = max(mp.cpu_count() + 1 + n_jobs, 1) + return n_jobs + + +def test_invalid_backend(): + with raises(ValueError): + Parallel(backend='unit-testing') + + +@parametrize('backend', ALL_VALID_BACKENDS) +def test_invalid_njobs(backend): + with raises(ValueError) as excinfo: + Parallel(n_jobs=0, backend=backend)._initialize_backend() + assert "n_jobs == 0 in Parallel has no meaning" in str(excinfo.value) + + +def test_register_parallel_backend(): + try: + register_parallel_backend("test_backend", FakeParallelBackend) + assert "test_backend" in BACKENDS + assert BACKENDS["test_backend"] == FakeParallelBackend + finally: + del BACKENDS["test_backend"] + + +def test_overwrite_default_backend(): + assert _active_backend_type() == DefaultBackend + try: + register_parallel_backend("threading", BACKENDS["threading"], + make_default=True) + assert _active_backend_type() == ThreadingBackend + finally: + # Restore the global default manually + parallel.DEFAULT_BACKEND = DEFAULT_BACKEND + assert _active_backend_type() == DefaultBackend + + +def check_backend_context_manager(backend_name): + with parallel_backend(backend_name, n_jobs=3): + active_backend, active_n_jobs = parallel.get_active_backend() + assert active_n_jobs == 3 + assert effective_n_jobs(3) == 3 + p = Parallel() + assert p.n_jobs == 3 + if backend_name == 'multiprocessing': + assert type(active_backend) == MultiprocessingBackend + assert type(p._backend) == MultiprocessingBackend + elif backend_name == 'loky': + assert type(active_backend) == LokyBackend + assert type(p._backend) == LokyBackend + elif backend_name == 'threading': + assert type(active_backend) == ThreadingBackend + assert type(p._backend) == ThreadingBackend + elif backend_name.startswith('test_'): + assert type(active_backend) == FakeParallelBackend + assert type(p._backend) == FakeParallelBackend + + +all_backends_for_context_manager = PARALLEL_BACKENDS[:] +all_backends_for_context_manager.extend( + ['test_backend_%d' % i for i in range(3)] +) + + +@with_multiprocessing +@parametrize('backend', all_backends_for_context_manager) +def test_backend_context_manager(monkeypatch, backend): + if backend not in BACKENDS: + monkeypatch.setitem(BACKENDS, backend, FakeParallelBackend) + + assert _active_backend_type() == DefaultBackend + # check that this possible to switch parallel backends sequentially + check_backend_context_manager(backend) + + # The default backend is restored + assert _active_backend_type() == DefaultBackend + + # Check that context manager switching is thread safe: + Parallel(n_jobs=2, backend='threading')( + delayed(check_backend_context_manager)(b) + for b in all_backends_for_context_manager if not b) + + # The default backend is again restored + assert _active_backend_type() == DefaultBackend + + +class ParameterizedParallelBackend(SequentialBackend): + """Pretends to run conncurrently while running sequentially.""" + + def __init__(self, param=None): + if param is None: + raise ValueError('param should not be None') + self.param = param + + +def test_parameterized_backend_context_manager(monkeypatch): + monkeypatch.setitem(BACKENDS, 'param_backend', + ParameterizedParallelBackend) + assert _active_backend_type() == DefaultBackend + + with parallel_backend('param_backend', param=42, n_jobs=3): + active_backend, active_n_jobs = parallel.get_active_backend() + assert type(active_backend) == ParameterizedParallelBackend + assert active_backend.param == 42 + assert active_n_jobs == 3 + p = Parallel() + assert p.n_jobs == 3 + assert p._backend is active_backend + results = p(delayed(sqrt)(i) for i in range(5)) + assert results == [sqrt(i) for i in range(5)] + + # The default backend is again restored + assert _active_backend_type() == DefaultBackend + + +def test_directly_parameterized_backend_context_manager(): + assert _active_backend_type() == DefaultBackend + + # Check that it's possible to pass a backend instance directly, + # without registration + with parallel_backend(ParameterizedParallelBackend(param=43), n_jobs=5): + active_backend, active_n_jobs = parallel.get_active_backend() + assert type(active_backend) == ParameterizedParallelBackend + assert active_backend.param == 43 + assert active_n_jobs == 5 + p = Parallel() + assert p.n_jobs == 5 + assert p._backend is active_backend + results = p(delayed(sqrt)(i) for i in range(5)) + assert results == [sqrt(i) for i in range(5)] + + # The default backend is again restored + assert _active_backend_type() == DefaultBackend + + +def sleep_and_return_pid(): + sleep(.1) + return os.getpid() + + +def get_nested_pids(): + assert _active_backend_type() == ThreadingBackend + # Assert that the nested backend does not change the default number of + # jobs used in Parallel + assert Parallel()._effective_n_jobs() == 1 + + # Assert that the tasks are running only on one process + return Parallel(n_jobs=2)(delayed(sleep_and_return_pid)() + for _ in range(2)) + + +class MyBackend(joblib._parallel_backends.LokyBackend): + """Backend to test backward compatibility with older backends""" + def get_nested_backend(self, ): + # Older backends only return a backend, without n_jobs indications. + return super(MyBackend, self).get_nested_backend()[0] + + +register_parallel_backend('back_compat_backend', MyBackend) + + +@with_multiprocessing +@parametrize('backend', ['threading', 'loky', 'multiprocessing', + 'back_compat_backend']) +def test_nested_backend_context_manager(backend): + # Check that by default, nested parallel calls will always use the + # ThreadingBackend + + with parallel_backend(backend): + pid_groups = Parallel(n_jobs=2)( + delayed(get_nested_pids)() + for _ in range(10) + ) + for pid_group in pid_groups: + assert len(set(pid_group)) == 1 + + +@with_multiprocessing +@parametrize('n_jobs', [2, -1, None]) +@parametrize('backend', PARALLEL_BACKENDS) +def test_nested_backend_in_sequential(backend, n_jobs): + # Check that by default, nested parallel calls will always use the + # ThreadingBackend + + def check_nested_backend(expected_backend_type, expected_n_job): + # Assert that the sequential backend at top level, does not change the + # backend for nested calls. + assert _active_backend_type() == BACKENDS[expected_backend_type] + + # Assert that the nested backend in SequentialBackend does not change + # the default number of jobs used in Parallel + expected_n_job = effective_n_jobs(expected_n_job) + assert Parallel()._effective_n_jobs() == expected_n_job + + Parallel(n_jobs=1)( + delayed(check_nested_backend)('loky', 1) + for _ in range(10) + ) + + with parallel_backend(backend, n_jobs=n_jobs): + Parallel(n_jobs=1)( + delayed(check_nested_backend)(backend, n_jobs) + for _ in range(10) + ) + + +def check_nesting_level(inner_backend, expected_level): + with parallel_backend(inner_backend) as (backend, n_jobs): + assert backend.nesting_level == expected_level + + +@with_multiprocessing +@parametrize('outer_backend', PARALLEL_BACKENDS) +@parametrize('inner_backend', PARALLEL_BACKENDS) +def test_backend_nesting_level(outer_backend, inner_backend): + # Check that the nesting level for the backend is correctly set + check_nesting_level(outer_backend, 0) + + Parallel(n_jobs=2, backend=outer_backend)( + delayed(check_nesting_level)(inner_backend, 1) + for _ in range(10) + ) + + with parallel_backend(inner_backend, n_jobs=2): + Parallel()(delayed(check_nesting_level)(inner_backend, 1) + for _ in range(10)) + + +@with_multiprocessing +def test_retrieval_context(): + import contextlib + + class MyBackend(ThreadingBackend): + i = 0 + + @contextlib.contextmanager + def retrieval_context(self): + self.i += 1 + yield + + register_parallel_backend("retrieval", MyBackend) + + def nested_call(n): + return Parallel(n_jobs=2)(delayed(id)(i) for i in range(n)) + + with parallel_backend("retrieval") as (ba, _): + Parallel(n_jobs=2)( + delayed(nested_call)(i) + for i in range(5) + ) + assert ba.i == 1 + + +############################################################################### +# Test helpers +def test_joblib_exception(): + # Smoke-test the custom exception + e = JoblibException('foobar') + # Test the repr + repr(e) + # Test the pickle + pickle.dumps(e) + + +def test_safe_function(): + safe_division = SafeFunction(division) + with raises(ZeroDivisionError): + safe_division(1, 0) + + safe_interrupt = SafeFunction(interrupt_raiser) + with raises(WorkerInterrupt): + safe_interrupt('x') + + +@parametrize('batch_size', [0, -1, 1.42]) +def test_invalid_batch_size(batch_size): + with raises(ValueError): + Parallel(batch_size=batch_size) + + +@parametrize('n_tasks, n_jobs, pre_dispatch, batch_size', + [(2, 2, 'all', 'auto'), + (2, 2, 'n_jobs', 'auto'), + (10, 2, 'n_jobs', 'auto'), + (517, 2, 'n_jobs', 'auto'), + (10, 2, 'n_jobs', 'auto'), + (10, 4, 'n_jobs', 'auto'), + (200, 12, 'n_jobs', 'auto'), + (25, 12, '2 * n_jobs', 1), + (250, 12, 'all', 1), + (250, 12, '2 * n_jobs', 7), + (200, 12, '2 * n_jobs', 'auto')]) +def test_dispatch_race_condition(n_tasks, n_jobs, pre_dispatch, batch_size): + # Check that using (async-)dispatch does not yield a race condition on the + # iterable generator that is not thread-safe natively. + # This is a non-regression test for the "Pool seems closed" class of error + params = {'n_jobs': n_jobs, 'pre_dispatch': pre_dispatch, + 'batch_size': batch_size} + expected = [square(i) for i in range(n_tasks)] + results = Parallel(**params)(delayed(square)(i) for i in range(n_tasks)) + assert results == expected + + +@with_multiprocessing +def test_default_mp_context(): + mp_start_method = mp.get_start_method() + p = Parallel(n_jobs=2, backend='multiprocessing') + context = p._backend_args.get('context') + start_method = context.get_start_method() + assert start_method == mp_start_method + + +@with_numpy +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_no_blas_crash_or_freeze_with_subprocesses(backend): + if backend == 'multiprocessing': + # Use the spawn backend that is both robust and available on all + # platforms + backend = mp.get_context('spawn') + + # Check that on recent Python version, the 'spawn' start method can make + # it possible to use multiprocessing in conjunction of any BLAS + # implementation that happens to be used by numpy with causing a freeze or + # a crash + rng = np.random.RandomState(42) + + # call BLAS DGEMM to force the initialization of the internal thread-pool + # in the main process + a = rng.randn(1000, 1000) + np.dot(a, a.T) + + # check that the internal BLAS thread-pool is not in an inconsistent state + # in the worker processes managed by multiprocessing + Parallel(n_jobs=2, backend=backend)( + delayed(np.dot)(a, a.T) for i in range(2)) + + +UNPICKLABLE_CALLABLE_SCRIPT_TEMPLATE_NO_MAIN = """\ +from joblib import Parallel, delayed + +def square(x): + return x ** 2 + +backend = "{}" +if backend == "spawn": + from multiprocessing import get_context + backend = get_context(backend) + +print(Parallel(n_jobs=2, backend=backend)( + delayed(square)(i) for i in range(5))) +""" + + +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_parallel_with_interactively_defined_functions(backend): + # When using the "-c" flag, interactive functions defined in __main__ + # should work with any backend. + if backend == "multiprocessing" and mp.get_start_method() != "fork": + pytest.skip("Require fork start method to use interactively defined " + "functions with multiprocessing.") + code = UNPICKLABLE_CALLABLE_SCRIPT_TEMPLATE_NO_MAIN.format(backend) + check_subprocess_call( + [sys.executable, '-c', code], timeout=10, + stdout_regex=r'\[0, 1, 4, 9, 16\]') + + +UNPICKLABLE_CALLABLE_SCRIPT_TEMPLATE_MAIN = """\ +import sys +# Make sure that joblib is importable in the subprocess launching this +# script. This is needed in case we run the tests from the joblib root +# folder without having installed joblib +sys.path.insert(0, {joblib_root_folder!r}) + +from joblib import Parallel, delayed + +def run(f, x): + return f(x) + +{define_func} + +if __name__ == "__main__": + backend = "{backend}" + if backend == "spawn": + from multiprocessing import get_context + backend = get_context(backend) + + callable_position = "{callable_position}" + if callable_position == "delayed": + print(Parallel(n_jobs=2, backend=backend)( + delayed(square)(i) for i in range(5))) + elif callable_position == "args": + print(Parallel(n_jobs=2, backend=backend)( + delayed(run)(square, i) for i in range(5))) + else: + print(Parallel(n_jobs=2, backend=backend)( + delayed(run)(f=square, x=i) for i in range(5))) +""" + +SQUARE_MAIN = """\ +def square(x): + return x ** 2 +""" +SQUARE_LOCAL = """\ +def gen_square(): + def square(x): + return x ** 2 + return square +square = gen_square() +""" +SQUARE_LAMBDA = """\ +square = lambda x: x ** 2 +""" + + +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS + ([] if mp is None else ['spawn'])) +@parametrize('define_func', [SQUARE_MAIN, SQUARE_LOCAL, SQUARE_LAMBDA]) +@parametrize('callable_position', ['delayed', 'args', 'kwargs']) +def test_parallel_with_unpicklable_functions_in_args( + backend, define_func, callable_position, tmpdir): + if backend in ['multiprocessing', 'spawn'] and ( + define_func != SQUARE_MAIN or sys.platform == "win32"): + pytest.skip("Not picklable with pickle") + code = UNPICKLABLE_CALLABLE_SCRIPT_TEMPLATE_MAIN.format( + define_func=define_func, backend=backend, + callable_position=callable_position, + joblib_root_folder=os.path.dirname(os.path.dirname(joblib.__file__))) + code_file = tmpdir.join("unpicklable_func_script.py") + code_file.write(code) + check_subprocess_call( + [sys.executable, code_file.strpath], timeout=10, + stdout_regex=r'\[0, 1, 4, 9, 16\]') + + +INTERACTIVE_DEFINED_FUNCTION_AND_CLASS_SCRIPT_CONTENT = """\ +import sys +# Make sure that joblib is importable in the subprocess launching this +# script. This is needed in case we run the tests from the joblib root +# folder without having installed joblib +sys.path.insert(0, {joblib_root_folder!r}) + +from joblib import Parallel, delayed +from functools import partial + +class MyClass: + '''Class defined in the __main__ namespace''' + def __init__(self, value): + self.value = value + + +def square(x, ignored=None, ignored2=None): + '''Function defined in the __main__ namespace''' + return x.value ** 2 + + +square2 = partial(square, ignored2='something') + +# Here, we do not need the `if __name__ == "__main__":` safeguard when +# using the default `loky` backend (even on Windows). + +# The following baroque function call is meant to check that joblib +# introspection rightfully uses cloudpickle instead of the (faster) pickle +# module of the standard library when necessary. In particular cloudpickle is +# necessary for functions and instances of classes interactively defined in the +# __main__ module. + +print(Parallel(n_jobs=2)( + delayed(square2)(MyClass(i), ignored=[dict(a=MyClass(1))]) + for i in range(5) +)) +""".format(joblib_root_folder=os.path.dirname( + os.path.dirname(joblib.__file__))) + + +@with_multiprocessing +def test_parallel_with_interactively_defined_functions_default_backend(tmpdir): + # The default backend (loky) accepts interactive functions defined in + # __main__ and does not require if __name__ == '__main__' even when + # the __main__ module is defined by the result of the execution of a + # filesystem script. + script = tmpdir.join('joblib_interactively_defined_function.py') + script.write(INTERACTIVE_DEFINED_FUNCTION_AND_CLASS_SCRIPT_CONTENT) + check_subprocess_call([sys.executable, script.strpath], + stdout_regex=r'\[0, 1, 4, 9, 16\]', + timeout=5) + + +INTERACTIVELY_DEFINED_SUBCLASS_WITH_METHOD_SCRIPT_CONTENT = """\ +import sys +# Make sure that joblib is importable in the subprocess launching this +# script. This is needed in case we run the tests from the joblib root +# folder without having installed joblib +sys.path.insert(0, {joblib_root_folder!r}) + +from joblib import Parallel, delayed, hash +import multiprocessing as mp +mp.util.log_to_stderr(5) + +class MyList(list): + '''MyList is interactively defined by MyList.append is a built-in''' + def __hash__(self): + # XXX: workaround limitation in cloudpickle + return hash(self).__hash__() + +l = MyList() + +print(Parallel(n_jobs=2)( + delayed(l.append)(i) for i in range(3) +)) +""".format(joblib_root_folder=os.path.dirname( + os.path.dirname(joblib.__file__))) + + +@with_multiprocessing +def test_parallel_with_interactively_defined_bound_method(tmpdir): + script = tmpdir.join('joblib_interactive_bound_method_script.py') + script.write(INTERACTIVELY_DEFINED_SUBCLASS_WITH_METHOD_SCRIPT_CONTENT) + check_subprocess_call([sys.executable, script.strpath], + stdout_regex=r'\[None, None, None\]', + stderr_regex=r'LokyProcess', + timeout=15) + + +def test_parallel_with_exhausted_iterator(): + exhausted_iterator = iter([]) + assert Parallel(n_jobs=2)(exhausted_iterator) == [] + + +def check_memmap(a): + if not isinstance(a, np.memmap): + raise TypeError('Expected np.memmap instance, got %r', + type(a)) + return a.copy() # return a regular array instead of a memmap + + +@with_numpy +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_auto_memmap_on_arrays_from_generator(backend): + # Non-regression test for a problem with a bad interaction between the + # GC collecting arrays recently created during iteration inside the + # parallel dispatch loop and the auto-memmap feature of Parallel. + # See: https://github.com/joblib/joblib/pull/294 + def generate_arrays(n): + for i in range(n): + yield np.ones(10, dtype=np.float32) * i + # Use max_nbytes=1 to force the use of memory-mapping even for small + # arrays + results = Parallel(n_jobs=2, max_nbytes=1, backend=backend)( + delayed(check_memmap)(a) for a in generate_arrays(100)) + for result, expected in zip(results, generate_arrays(len(results))): + np.testing.assert_array_equal(expected, result) + + # Second call to force loky to adapt the executor by growing the number + # of worker processes. This is a non-regression test for: + # https://github.com/joblib/joblib/issues/629. + results = Parallel(n_jobs=4, max_nbytes=1, backend=backend)( + delayed(check_memmap)(a) for a in generate_arrays(100)) + for result, expected in zip(results, generate_arrays(len(results))): + np.testing.assert_array_equal(expected, result) + + +def identity(arg): + return arg + + +@with_numpy +@with_multiprocessing +def test_memmap_with_big_offset(tmpdir): + fname = tmpdir.join('test.mmap').strpath + size = mmap.ALLOCATIONGRANULARITY + obj = [np.zeros(size, dtype='uint8'), np.ones(size, dtype='uint8')] + dump(obj, fname) + memmap = load(fname, mmap_mode='r') + result, = Parallel(n_jobs=2)(delayed(identity)(memmap) for _ in [0]) + assert isinstance(memmap[1], np.memmap) + assert memmap[1].offset > size + np.testing.assert_array_equal(obj, result) + + +def test_warning_about_timeout_not_supported_by_backend(): + with warns(None) as warninfo: + Parallel(timeout=1)(delayed(square)(i) for i in range(50)) + assert len(warninfo) == 1 + w = warninfo[0] + assert isinstance(w.message, UserWarning) + assert str(w.message) == ( + "The backend class 'SequentialBackend' does not support timeout. " + "You have set 'timeout=1' in Parallel but the 'timeout' parameter " + "will not be used.") + + +@parametrize('backend', ALL_VALID_BACKENDS) +@parametrize('n_jobs', [1, 2, -2, -1]) +def test_abort_backend(n_jobs, backend): + delays = ["a"] + [10] * 100 + with raises(TypeError): + t_start = time.time() + Parallel(n_jobs=n_jobs, backend=backend)( + delayed(time.sleep)(i) for i in delays) + dt = time.time() - t_start + assert dt < 20 + + +@with_numpy +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_memmapping_leaks(backend, tmpdir): + # Non-regression test for memmapping backends. Ensure that the data + # does not stay too long in memory + tmpdir = tmpdir.strpath + + # Use max_nbytes=1 to force the use of memory-mapping even for small + # arrays + with Parallel(n_jobs=2, max_nbytes=1, backend=backend, + temp_folder=tmpdir) as p: + p(delayed(check_memmap)(a) for a in [np.random.random(10)] * 2) + + # The memmap folder should not be clean in the context scope + assert len(os.listdir(tmpdir)) > 0 + + # Make sure that the shared memory is cleaned at the end when we exit + # the context + for _ in range(100): + if not os.listdir(tmpdir): + break + sleep(.1) + else: + raise AssertionError('temporary directory of Parallel was not removed') + + # Make sure that the shared memory is cleaned at the end of a call + p = Parallel(n_jobs=2, max_nbytes=1, backend=backend) + p(delayed(check_memmap)(a) for a in [np.random.random(10)] * 2) + + for _ in range(100): + if not os.listdir(tmpdir): + break + sleep(.1) + else: + raise AssertionError('temporary directory of Parallel was not removed') + + +@parametrize('backend', [None, 'loky', 'threading']) +def test_lambda_expression(backend): + # cloudpickle is used to pickle delayed callables + results = Parallel(n_jobs=2, backend=backend)( + delayed(lambda x: x ** 2)(i) for i in range(10)) + assert results == [i ** 2 for i in range(10)] + + +@with_multiprocessing +@parametrize('backend', PROCESS_BACKENDS) +def test_backend_batch_statistics_reset(backend): + """Test that a parallel backend correctly resets its batch statistics.""" + n_jobs = 2 + n_inputs = 500 + task_time = 2. / n_inputs + + p = Parallel(verbose=10, n_jobs=n_jobs, backend=backend) + p(delayed(time.sleep)(task_time) for i in range(n_inputs)) + assert (p._backend._effective_batch_size == + p._backend._DEFAULT_EFFECTIVE_BATCH_SIZE) + assert (p._backend._smoothed_batch_duration == + p._backend._DEFAULT_SMOOTHED_BATCH_DURATION) + + p(delayed(time.sleep)(task_time) for i in range(n_inputs)) + assert (p._backend._effective_batch_size == + p._backend._DEFAULT_EFFECTIVE_BATCH_SIZE) + assert (p._backend._smoothed_batch_duration == + p._backend._DEFAULT_SMOOTHED_BATCH_DURATION) + + +def test_backend_hinting_and_constraints(): + for n_jobs in [1, 2, -1]: + assert type(Parallel(n_jobs=n_jobs)._backend) == LokyBackend + + p = Parallel(n_jobs=n_jobs, prefer='threads') + assert type(p._backend) == ThreadingBackend + + p = Parallel(n_jobs=n_jobs, prefer='processes') + assert type(p._backend) == LokyBackend + + p = Parallel(n_jobs=n_jobs, require='sharedmem') + assert type(p._backend) == ThreadingBackend + + # Explicit backend selection can override backend hinting although it + # is useless to pass a hint when selecting a backend. + p = Parallel(n_jobs=2, backend='loky', prefer='threads') + assert type(p._backend) == LokyBackend + + with parallel_backend('loky', n_jobs=2): + # Explicit backend selection by the user with the context manager + # should be respected when combined with backend hints only. + p = Parallel(prefer='threads') + assert type(p._backend) == LokyBackend + assert p.n_jobs == 2 + + with parallel_backend('loky', n_jobs=2): + # Locally hard-coded n_jobs value is respected. + p = Parallel(n_jobs=3, prefer='threads') + assert type(p._backend) == LokyBackend + assert p.n_jobs == 3 + + with parallel_backend('loky', n_jobs=2): + # Explicit backend selection by the user with the context manager + # should be ignored when the Parallel call has hard constraints. + # In this case, the default backend that supports shared mem is + # used an the default number of processes is used. + p = Parallel(require='sharedmem') + assert type(p._backend) == ThreadingBackend + assert p.n_jobs == 1 + + with parallel_backend('loky', n_jobs=2): + p = Parallel(n_jobs=3, require='sharedmem') + assert type(p._backend) == ThreadingBackend + assert p.n_jobs == 3 + + +def test_backend_hinting_and_constraints_with_custom_backends(capsys): + # Custom backends can declare that they use threads and have shared memory + # semantics: + class MyCustomThreadingBackend(ParallelBackendBase): + supports_sharedmem = True + use_threads = True + + def apply_async(self): + pass + + def effective_n_jobs(self, n_jobs): + return n_jobs + + with parallel_backend(MyCustomThreadingBackend()): + p = Parallel(n_jobs=2, prefer='processes') # ignored + assert type(p._backend) == MyCustomThreadingBackend + + p = Parallel(n_jobs=2, require='sharedmem') + assert type(p._backend) == MyCustomThreadingBackend + + class MyCustomProcessingBackend(ParallelBackendBase): + supports_sharedmem = False + use_threads = False + + def apply_async(self): + pass + + def effective_n_jobs(self, n_jobs): + return n_jobs + + with parallel_backend(MyCustomProcessingBackend()): + p = Parallel(n_jobs=2, prefer='processes') + assert type(p._backend) == MyCustomProcessingBackend + + out, err = capsys.readouterr() + assert out == "" + assert err == "" + + p = Parallel(n_jobs=2, require='sharedmem', verbose=10) + assert type(p._backend) == ThreadingBackend + + out, err = capsys.readouterr() + expected = ("Using ThreadingBackend as joblib.Parallel backend " + "instead of MyCustomProcessingBackend as the latter " + "does not provide shared memory semantics.") + assert out.strip() == expected + assert err == "" + + with raises(ValueError): + Parallel(backend=MyCustomProcessingBackend(), require='sharedmem') + + +def test_invalid_backend_hinting_and_constraints(): + with raises(ValueError): + Parallel(prefer='invalid') + + with raises(ValueError): + Parallel(require='invalid') + + with raises(ValueError): + # It is inconsistent to prefer process-based parallelism while + # requiring shared memory semantics. + Parallel(prefer='processes', require='sharedmem') + + # It is inconsistent to ask explictly for a process-based parallelism + # while requiring shared memory semantics. + with raises(ValueError): + Parallel(backend='loky', require='sharedmem') + with raises(ValueError): + Parallel(backend='multiprocessing', require='sharedmem') + + +def test_global_parallel_backend(): + default = Parallel()._backend + + pb = parallel_backend('threading') + assert isinstance(Parallel()._backend, ThreadingBackend) + + pb.unregister() + assert type(Parallel()._backend) is type(default) + + +def test_external_backends(): + def register_foo(): + BACKENDS['foo'] = ThreadingBackend + + EXTERNAL_BACKENDS['foo'] = register_foo + + with parallel_backend('foo'): + assert isinstance(Parallel()._backend, ThreadingBackend) + + +def _recursive_backend_info(limit=3, **kwargs): + """Perform nested parallel calls and introspect the backend on the way""" + + with Parallel(n_jobs=2) as p: + this_level = [(type(p._backend).__name__, p._backend.nesting_level)] + if limit == 0: + return this_level + results = p(delayed(_recursive_backend_info)(limit=limit - 1, **kwargs) + for i in range(1)) + return this_level + results[0] + + +@with_multiprocessing +@parametrize('backend', ['loky', 'threading']) +def test_nested_parallelism_limit(backend): + with parallel_backend(backend, n_jobs=2): + backend_types_and_levels = _recursive_backend_info() + + if cpu_count() == 1: + second_level_backend_type = 'SequentialBackend' + max_level = 1 + else: + second_level_backend_type = 'ThreadingBackend' + max_level = 2 + + top_level_backend_type = backend.title() + 'Backend' + expected_types_and_levels = [ + (top_level_backend_type, 0), + (second_level_backend_type, 1), + ('SequentialBackend', max_level), + ('SequentialBackend', max_level) + ] + assert backend_types_and_levels == expected_types_and_levels + + +@with_numpy +@skipif(distributed is None, reason='This test requires dask') +def test_nested_parallelism_with_dask(): + client = distributed.Client(n_workers=2, threads_per_worker=2) # noqa + + # 10 MB of data as argument to trigger implicit scattering + data = np.ones(int(1e7), dtype=np.uint8) + for i in range(2): + with parallel_backend('dask'): + backend_types_and_levels = _recursive_backend_info(data=data) + assert len(backend_types_and_levels) == 4 + assert all(name == 'DaskDistributedBackend' + for name, _ in backend_types_and_levels) + + # No argument + with parallel_backend('dask'): + backend_types_and_levels = _recursive_backend_info() + assert len(backend_types_and_levels) == 4 + assert all(name == 'DaskDistributedBackend' + for name, _ in backend_types_and_levels) + + +def _recursive_parallel(nesting_limit=None): + """A horrible function that does recursive parallel calls""" + return Parallel()(delayed(_recursive_parallel)() for i in range(2)) + + +@parametrize('backend', ['loky', 'threading']) +def test_thread_bomb_mitigation(backend): + # Test that recursive parallelism raises a recursion rather than + # saturating the operating system resources by creating a unbounded number + # of threads. + with parallel_backend(backend, n_jobs=2): + with raises(BaseException) as excinfo: + _recursive_parallel() + exc = excinfo.value + if backend == "loky" and isinstance(exc, TerminatedWorkerError): + # The recursion exception can itself cause an error when pickling it to + # be send back to the parent process. In this case the worker crashes + # but the original traceback is still printed on stderr. This could be + # improved but does not seem simple to do and this is is not critical + # for users (as long as there is no process or thread bomb happening). + pytest.xfail("Loky worker crash when serializing RecursionError") + else: + assert isinstance(exc, RecursionError) + + +def _run_parallel_sum(): + env_vars = {} + for var in ['OMP_NUM_THREADS', 'OPENBLAS_NUM_THREADS', 'MKL_NUM_THREADS', + 'VECLIB_MAXIMUM_THREADS', 'NUMEXPR_NUM_THREADS', + 'NUMBA_NUM_THREADS', 'ENABLE_IPC']: + env_vars[var] = os.environ.get(var) + return env_vars, parallel_sum(100) + + +@parametrize("backend", [None, 'loky']) +@skipif(parallel_sum is None, reason="Need OpenMP helper compiled") +def test_parallel_thread_limit(backend): + results = Parallel(n_jobs=2, backend=backend)( + delayed(_run_parallel_sum)() for _ in range(2) + ) + expected_num_threads = max(cpu_count() // 2, 1) + for worker_env_vars, omp_num_threads in results: + assert omp_num_threads == expected_num_threads + for name, value in worker_env_vars.items(): + if name.endswith("_THREADS"): + assert value == str(expected_num_threads) + else: + assert name == "ENABLE_IPC" + assert value == "1" + + +@skipif(distributed is not None, + reason='This test requires dask NOT installed') +def test_dask_backend_when_dask_not_installed(): + with raises(ValueError, match='Please install dask'): + parallel_backend('dask') + + +def test_zero_worker_backend(): + # joblib.Parallel should reject with an explicit error message parallel + # backends that have no worker. + class ZeroWorkerBackend(ThreadingBackend): + def configure(self, *args, **kwargs): + return 0 + + def apply_async(self, func, callback=None): # pragma: no cover + raise TimeoutError("No worker available") + + def effective_n_jobs(self, n_jobs): # pragma: no cover + return 0 + + expected_msg = "ZeroWorkerBackend has no active worker" + with parallel_backend(ZeroWorkerBackend()): + with pytest.raises(RuntimeError, match=expected_msg): + Parallel(n_jobs=2)(delayed(id)(i) for i in range(2)) + + +def test_globals_update_at_each_parallel_call(): + # This is a non-regression test related to joblib issues #836 and #833. + # Cloudpickle versions between 0.5.4 and 0.7 introduced a bug where global + # variables changes in a parent process between two calls to + # joblib.Parallel would not be propagated into the workers. + global MY_GLOBAL_VARIABLE + MY_GLOBAL_VARIABLE = "original value" + + def check_globals(): + global MY_GLOBAL_VARIABLE + return MY_GLOBAL_VARIABLE + + assert check_globals() == "original value" + + workers_global_variable = Parallel(n_jobs=2)( + delayed(check_globals)() for i in range(2)) + assert set(workers_global_variable) == {"original value"} + + # Change the value of MY_GLOBAL_VARIABLE, and make sure this change gets + # propagated into the workers environment + MY_GLOBAL_VARIABLE = "changed value" + assert check_globals() == "changed value" + + workers_global_variable = Parallel(n_jobs=2)( + delayed(check_globals)() for i in range(2)) + assert set(workers_global_variable) == {"changed value"} + + +############################################################################## +# Test environment variable in child env, in particular for limiting +# the maximal number of threads in C-library threadpools. +# + +def _check_numpy_threadpool_limits(): + import numpy as np + # Let's call BLAS on a Matrix Matrix multiplication with dimensions large + # enough to ensure that the threadpool managed by the underlying BLAS + # implementation is actually used so as to force its initialization. + a = np.random.randn(100, 100) + np.dot(a, a) + from threadpoolctl import threadpool_info + return threadpool_info() + + +def _parent_max_num_threads_for(child_module, parent_info): + for parent_module in parent_info: + if parent_module['filepath'] == child_module['filepath']: + return parent_module['num_threads'] + raise ValueError("An unexpected module was loaded in child:\n{}" + .format(child_module)) + + +def check_child_num_threads(workers_info, parent_info, num_threads): + # Check that the number of threads reported in workers_info is consistent + # with the expectation. We need to be carefull to handle the cases where + # the requested number of threads is below max_num_thread for the library. + for child_threadpool_info in workers_info: + for child_module in child_threadpool_info: + parent_max_num_threads = _parent_max_num_threads_for( + child_module, parent_info) + expected = {min(num_threads, parent_max_num_threads), num_threads} + assert child_module['num_threads'] in expected + + +@with_numpy +@with_multiprocessing +@parametrize('n_jobs', [2, 4, -2, -1]) +def test_threadpool_limitation_in_child(n_jobs): + # Check that the protection against oversubscription in workers is working + # using threadpoolctl functionalities. + + # Skip this test if numpy is not linked to a BLAS library + parent_info = _check_numpy_threadpool_limits() + if len(parent_info) == 0: + pytest.skip(msg="Need a version of numpy linked to BLAS") + + workers_threadpool_infos = Parallel(n_jobs=n_jobs)( + delayed(_check_numpy_threadpool_limits)() for i in range(2)) + + n_jobs = effective_n_jobs(n_jobs) + expected_child_num_threads = max(cpu_count() // n_jobs, 1) + + check_child_num_threads(workers_threadpool_infos, parent_info, + expected_child_num_threads) + + +@with_numpy +@with_multiprocessing +@parametrize('inner_max_num_threads', [1, 2, 4, None]) +@parametrize('n_jobs', [2, -1]) +def test_threadpool_limitation_in_child_context(n_jobs, inner_max_num_threads): + # Check that the protection against oversubscription in workers is working + # using threadpoolctl functionalities. + + # Skip this test if numpy is not linked to a BLAS library + parent_info = _check_numpy_threadpool_limits() + if len(parent_info) == 0: + pytest.skip(msg="Need a version of numpy linked to BLAS") + + with parallel_backend('loky', inner_max_num_threads=inner_max_num_threads): + workers_threadpool_infos = Parallel(n_jobs=n_jobs)( + delayed(_check_numpy_threadpool_limits)() for i in range(2)) + + n_jobs = effective_n_jobs(n_jobs) + if inner_max_num_threads is None: + expected_child_num_threads = max(cpu_count() // n_jobs, 1) + else: + expected_child_num_threads = inner_max_num_threads + + check_child_num_threads(workers_threadpool_infos, parent_info, + expected_child_num_threads) + + +@with_multiprocessing +@parametrize('n_jobs', [2, -1]) +@parametrize('var_name', ["OPENBLAS_NUM_THREADS", + "MKL_NUM_THREADS", + "OMP_NUM_THREADS"]) +def test_threadpool_limitation_in_child_override(n_jobs, var_name): + # Check that environment variables set by the user on the main process + # always have the priority. + + # Clean up the existing executor because we change the environment of the + # parent at runtime and it is not detected in loky intentionally. + get_reusable_executor(reuse=True).shutdown() + + def _get_env(var_name): + return os.environ.get(var_name) + + original_var_value = os.environ.get(var_name) + try: + os.environ[var_name] = "4" + # Skip this test if numpy is not linked to a BLAS library + results = Parallel(n_jobs=n_jobs)( + delayed(_get_env)(var_name) for i in range(2)) + assert results == ["4", "4"] + + with parallel_backend('loky', inner_max_num_threads=1): + results = Parallel(n_jobs=n_jobs)( + delayed(_get_env)(var_name) for i in range(2)) + assert results == ["1", "1"] + + finally: + if original_var_value is None: + del os.environ[var_name] + else: + os.environ[var_name] = original_var_value + + +@with_numpy +@with_multiprocessing +@parametrize('backend', ['multiprocessing', 'threading', + MultiprocessingBackend(), ThreadingBackend()]) +def test_threadpool_limitation_in_child_context_error(backend): + + with raises(AssertionError, match=r"does not acc.*inner_max_num_threads"): + parallel_backend(backend, inner_max_num_threads=1) + + +@with_multiprocessing +@parametrize('n_jobs', [2, 4, -1]) +def test_loky_reuse_workers(n_jobs): + # Non-regression test for issue #967 where the workers are not reused when + # calling multiple Parallel loops. + + def parallel_call(n_jobs): + x = range(10) + Parallel(n_jobs=n_jobs)(delayed(sum)(x) for i in range(10)) + + # Run a parallel loop and get the workers used for computations + parallel_call(n_jobs) + first_executor = get_reusable_executor(reuse=True) + + # Ensure that the workers are reused for the next calls, as the executor is + # not restarted. + for _ in range(10): + parallel_call(n_jobs) + executor = get_reusable_executor(reuse=True) + assert executor == first_executor diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_store_backends.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_store_backends.py new file mode 100644 index 0000000..2c6198f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_store_backends.py @@ -0,0 +1,56 @@ +try: + # Python 2.7: use the C pickle to speed up + # test_concurrency_safe_write which pickles big python objects + import cPickle as cpickle +except ImportError: + import pickle as cpickle +import functools +import time + +from joblib.testing import parametrize, timeout +from joblib.test.common import with_multiprocessing +from joblib.backports import concurrency_safe_rename +from joblib import Parallel, delayed +from joblib._store_backends import concurrency_safe_write + + +def write_func(output, filename): + with open(filename, 'wb') as f: + cpickle.dump(output, f) + + +def load_func(expected, filename): + for i in range(10): + try: + with open(filename, 'rb') as f: + reloaded = cpickle.load(f) + break + except (OSError, IOError): + # On Windows you can have WindowsError ([Error 5] Access + # is denied or [Error 13] Permission denied) when reading the file, + # probably because a writer process has a lock on the file + time.sleep(0.1) + else: + raise + assert expected == reloaded + + +def concurrency_safe_write_rename(to_write, filename, write_func): + temporary_filename = concurrency_safe_write(to_write, + filename, write_func) + concurrency_safe_rename(temporary_filename, filename) + + +@timeout(0) # No timeout as this test can be long +@with_multiprocessing +@parametrize('backend', ['multiprocessing', 'loky', 'threading']) +def test_concurrency_safe_write(tmpdir, backend): + # Add one item to cache + filename = tmpdir.join('test.pkl').strpath + + obj = {str(i): i for i in range(int(1e5))} + funcs = [functools.partial(concurrency_safe_write_rename, + write_func=write_func) + if i % 3 != 2 else load_func for i in range(12)] + Parallel(n_jobs=2, backend=backend)( + delayed(func)(obj, filename) for func in funcs) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/test_testing.py b/minor_project/lib/python3.6/site-packages/joblib/test/test_testing.py new file mode 100644 index 0000000..39ac880 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/test_testing.py @@ -0,0 +1,73 @@ +import sys +import re + +from joblib.testing import raises, check_subprocess_call + + +def test_check_subprocess_call(): + code = '\n'.join(['result = 1 + 2 * 3', + 'print(result)', + 'my_list = [1, 2, 3]', + 'print(my_list)']) + + check_subprocess_call([sys.executable, '-c', code]) + + # Now checking stdout with a regex + check_subprocess_call([sys.executable, '-c', code], + # Regex needed for platform-specific line endings + stdout_regex=r'7\s{1,2}\[1, 2, 3\]') + + +def test_check_subprocess_call_non_matching_regex(): + code = '42' + non_matching_pattern = '_no_way_this_matches_anything_' + + with raises(ValueError) as excinfo: + check_subprocess_call([sys.executable, '-c', code], + stdout_regex=non_matching_pattern) + excinfo.match('Unexpected stdout.+{}'.format(non_matching_pattern)) + + +def test_check_subprocess_call_wrong_command(): + wrong_command = '_a_command_that_does_not_exist_' + with raises(OSError): + check_subprocess_call([wrong_command]) + + +def test_check_subprocess_call_non_zero_return_code(): + code_with_non_zero_exit = '\n'.join([ + 'import sys', + 'print("writing on stdout")', + 'sys.stderr.write("writing on stderr")', + 'sys.exit(123)']) + + pattern = re.compile('Non-zero return code: 123.+' + 'Stdout:\nwriting on stdout.+' + 'Stderr:\nwriting on stderr', re.DOTALL) + + with raises(ValueError) as excinfo: + check_subprocess_call([sys.executable, '-c', code_with_non_zero_exit]) + excinfo.match(pattern) + + +def test_check_subprocess_call_timeout(): + code_timing_out = '\n'.join([ + 'import time', + 'import sys', + 'print("before sleep on stdout")', + 'sys.stdout.flush()', + 'sys.stderr.write("before sleep on stderr")', + 'sys.stderr.flush()', + 'time.sleep(1.1)', + 'print("process should have be killed before")', + 'sys.stdout.flush()']) + + pattern = re.compile('Non-zero return code:.+' + 'Stdout:\nbefore sleep on stdout\\s+' + 'Stderr:\nbefore sleep on stderr', + re.DOTALL) + + with raises(ValueError) as excinfo: + check_subprocess_call([sys.executable, '-c', code_timing_out], + timeout=1) + excinfo.match(pattern) diff --git a/minor_project/lib/python3.6/site-packages/joblib/test/testutils.py b/minor_project/lib/python3.6/site-packages/joblib/test/testutils.py new file mode 100644 index 0000000..20ec8c1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/test/testutils.py @@ -0,0 +1,8 @@ +def return_slice_of_data(arr, start_idx, end_idx): + return arr[start_idx:end_idx] + + +def print_filename_and_raise(arr): + from joblib._memmapping_reducer import _get_backing_memmap + print(_get_backing_memmap(arr).filename) + raise ValueError diff --git a/minor_project/lib/python3.6/site-packages/joblib/testing.py b/minor_project/lib/python3.6/site-packages/joblib/testing.py new file mode 100644 index 0000000..28f7931 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/joblib/testing.py @@ -0,0 +1,77 @@ +""" +Helper for testing. +""" + +import sys +import warnings +import os.path +import re +import subprocess +import threading + +import pytest +import _pytest + + +raises = pytest.raises +warns = pytest.warns +SkipTest = _pytest.runner.Skipped +skipif = pytest.mark.skipif +fixture = pytest.fixture +parametrize = pytest.mark.parametrize +timeout = pytest.mark.timeout +xfail = pytest.mark.xfail +param = pytest.param + + +def warnings_to_stdout(): + """ Redirect all warnings to stdout. + """ + showwarning_orig = warnings.showwarning + + def showwarning(msg, cat, fname, lno, file=None, line=0): + showwarning_orig(msg, cat, os.path.basename(fname), line, sys.stdout) + + warnings.showwarning = showwarning + # warnings.simplefilter('always') + + +def check_subprocess_call(cmd, timeout=5, stdout_regex=None, + stderr_regex=None): + """Runs a command in a subprocess with timeout in seconds. + + Also checks returncode is zero, stdout if stdout_regex is set, and + stderr if stderr_regex is set. + """ + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + def kill_process(): + warnings.warn("Timeout running {}".format(cmd)) + proc.kill() + + timer = threading.Timer(timeout, kill_process) + try: + timer.start() + stdout, stderr = proc.communicate() + stdout, stderr = stdout.decode(), stderr.decode() + if proc.returncode != 0: + message = ( + 'Non-zero return code: {}.\nStdout:\n{}\n' + 'Stderr:\n{}').format( + proc.returncode, stdout, stderr) + raise ValueError(message) + + if (stdout_regex is not None and + not re.search(stdout_regex, stdout)): + raise ValueError( + "Unexpected stdout: {!r} does not match:\n{!r}".format( + stdout_regex, stdout)) + if (stderr_regex is not None and + not re.search(stderr_regex, stderr)): + raise ValueError( + "Unexpected stderr: {!r} does not match:\n{!r}".format( + stderr_regex, stderr)) + + finally: + timer.cancel() diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/LICENSE b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/LICENSE new file mode 100644 index 0000000..c34aff7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/LICENSE @@ -0,0 +1,71 @@ +========================= + The Kiwi licensing terms +========================= +Kiwi is licensed under the terms of the Modified BSD License (also known as +New or Revised BSD), as follows: + +Copyright (c) 2013, Nucleic Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Nucleic Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +About Kiwi +---------- +Chris Colbert began the Kiwi project in December 2013 in an effort to +create a blisteringly fast UI constraint solver. Chris is still the +project lead. + +The Nucleic Development Team is the set of all contributors to the Nucleic +project and its subprojects. + +The core team that coordinates development on GitHub can be found here: +http://github.com/nucleic. The current team consists of: + +* Chris Colbert + +Our Copyright Policy +-------------------- +Nucleic uses a shared copyright model. Each contributor maintains copyright +over their contributions to Nucleic. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Nucleic +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Nucleic +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Nucleic repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + +#------------------------------------------------------------------------------ +# Copyright (c) 2013, Nucleic Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file LICENSE, distributed with this software. +#------------------------------------------------------------------------------ diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/METADATA new file mode 100644 index 0000000..8a554f5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/METADATA @@ -0,0 +1,45 @@ +Metadata-Version: 2.1 +Name: kiwisolver +Version: 1.3.1 +Summary: A fast implementation of the Cassowary constraint solver +Home-page: https://github.com/nucleic/kiwi +Author: The Nucleic Development Team +Author-email: sccolbert@gmail.com +License: BSD +Platform: UNKNOWN +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.6 + +Welcome to Kiwi +=============== + +.. image:: https://travis-ci.org/nucleic/kiwi.svg?branch=master + :target: https://travis-ci.org/nucleic/kiwi +.. image:: https://github.com/nucleic/kiwi/workflows/Continuous%20Integration/badge.svg + :target: https://github.com/nucleic/kiwi/actions +.. image:: https://github.com/nucleic/kiwi/workflows/Documentation%20building/badge.svg + :target: https://github.com/nucleic/kiwi/actions +.. image:: https://codecov.io/gh/nucleic/kiwi/branch/master/graph/badge.svg + :target: https://codecov.io/gh/nucleic/kiwi +.. image:: https://readthedocs.org/projects/kiwisolver/badge/?version=latest + :target: https://kiwisolver.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +Kiwi is an efficient C++ implementation of the Cassowary constraint solving +algorithm. Kiwi is an implementation of the algorithm based on the seminal +Cassowary paper. It is *not* a refactoring of the original C++ solver. Kiwi +has been designed from the ground up to be lightweight and fast. Kiwi ranges +from 10x to 500x faster than the original Cassowary solver with typical use +cases gaining a 40x improvement. Memory savings are consistently > 5x. + +In addition to the C++ solver, Kiwi ships with hand-rolled Python bindings for +Python 3.6+. + + diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/RECORD new file mode 100644 index 0000000..1f0263a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/RECORD @@ -0,0 +1,7 @@ +kiwisolver-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +kiwisolver-1.3.1.dist-info/LICENSE,sha256=JtqNSXRHlvJgH7X-nl2LnoECBkymCo3Bglthd8sFRQw,3279 +kiwisolver-1.3.1.dist-info/METADATA,sha256=oT4PghUIpLXL3xTjn_-d8mARicDSwM4uzaUC-EMYNLc,1978 +kiwisolver-1.3.1.dist-info/RECORD,, +kiwisolver-1.3.1.dist-info/WHEEL,sha256=ZCgRoodM6cLv8Z9Z-kuTK9QreeQu8GHOdQFY_br7fEU,109 +kiwisolver-1.3.1.dist-info/top_level.txt,sha256=xqwWj7oSHlpIjcw2QMJb8puTFPdjDBO78AZp9gjTh9c,11 +kiwisolver.cpython-36m-x86_64-linux-gnu.so,sha256=iHZm8poJJrE1f-YcnTXsAaSlG-Un1iFAgS8gDS3a618,4162183 diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/WHEEL new file mode 100644 index 0000000..a5338f5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-manylinux1_x86_64 + diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/top_level.txt new file mode 100644 index 0000000..9b85884 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/kiwisolver-1.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +kiwisolver diff --git a/minor_project/lib/python3.6/site-packages/kiwisolver.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/kiwisolver.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..c892a73 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/kiwisolver.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/__init__.py b/minor_project/lib/python3.6/site-packages/libfuturize/__init__.py new file mode 100644 index 0000000..4cb1cbc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/__init__.py @@ -0,0 +1 @@ +# empty to make this a package diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..ce31654 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/fixer_util.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/fixer_util.cpython-36.pyc new file mode 100644 index 0000000..db44ec8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/fixer_util.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/main.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/main.cpython-36.pyc new file mode 100644 index 0000000..3ef207b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/__pycache__/main.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixer_util.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixer_util.py new file mode 100644 index 0000000..48e4689 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixer_util.py @@ -0,0 +1,520 @@ +""" +Utility functions from 2to3, 3to2 and python-modernize (and some home-grown +ones). + +Licences: +2to3: PSF License v2 +3to2: Apache Software License (from 3to2/setup.py) +python-modernize licence: BSD (from python-modernize/LICENSE) +""" + +from lib2to3.fixer_util import (FromImport, Newline, is_import, + find_root, does_tree_import, Comma) +from lib2to3.pytree import Leaf, Node +from lib2to3.pygram import python_symbols as syms, python_grammar +from lib2to3.pygram import token +from lib2to3.fixer_util import (Node, Call, Name, syms, Comma, Number) +import re + + +def canonical_fix_name(fix, avail_fixes): + """ + Examples: + >>> canonical_fix_name('fix_wrap_text_literals') + 'libfuturize.fixes.fix_wrap_text_literals' + >>> canonical_fix_name('wrap_text_literals') + 'libfuturize.fixes.fix_wrap_text_literals' + >>> canonical_fix_name('wrap_te') + ValueError("unknown fixer name") + >>> canonical_fix_name('wrap') + ValueError("ambiguous fixer name") + """ + if ".fix_" in fix: + return fix + else: + if fix.startswith('fix_'): + fix = fix[4:] + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + raise ValueError("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found)) + elif len(found) == 0: + raise ValueError("Unknown fixer. Use --list-fixes or -l for a list.") + return found[0] + + + +## These functions are from 3to2 by Joe Amenta: + +def Star(prefix=None): + return Leaf(token.STAR, u'*', prefix=prefix) + +def DoubleStar(prefix=None): + return Leaf(token.DOUBLESTAR, u'**', prefix=prefix) + +def Minus(prefix=None): + return Leaf(token.MINUS, u'-', prefix=prefix) + +def commatize(leafs): + """ + Accepts/turns: (Name, Name, ..., Name, Name) + Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name) + """ + new_leafs = [] + for leaf in leafs: + new_leafs.append(leaf) + new_leafs.append(Comma()) + del new_leafs[-1] + return new_leafs + +def indentation(node): + """ + Returns the indentation for this node + Iff a node is in a suite, then it has indentation. + """ + while node.parent is not None and node.parent.type != syms.suite: + node = node.parent + if node.parent is None: + return u"" + # The first three children of a suite are NEWLINE, INDENT, (some other node) + # INDENT.value contains the indentation for this suite + # anything after (some other node) has the indentation as its prefix. + if node.type == token.INDENT: + return node.value + elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT: + return node.prev_sibling.value + elif node.prev_sibling is None: + return u"" + else: + return node.prefix + +def indentation_step(node): + """ + Dirty little trick to get the difference between each indentation level + Implemented by finding the shortest indentation string + (technically, the "least" of all of the indentation strings, but + tabs and spaces mixed won't get this far, so those are synonymous.) + """ + r = find_root(node) + # Collect all indentations into one set. + all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT) + if not all_indents: + # nothing is indented anywhere, so we get to pick what we want + return u" " # four spaces is a popular convention + else: + return min(all_indents) + +def suitify(parent): + """ + Turn the stuff after the first colon in parent's children + into a suite, if it wasn't already + """ + for node in parent.children: + if node.type == syms.suite: + # already in the prefered format, do nothing + return + + # One-liners have no suite node, we have to fake one up + for i, node in enumerate(parent.children): + if node.type == token.COLON: + break + else: + raise ValueError(u"No class suite and no ':'!") + # Move everything into a suite node + suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))]) + one_node = parent.children[i+1] + one_node.remove() + one_node.prefix = u'' + suite.append_child(one_node) + parent.append_child(suite) + +def NameImport(package, as_name=None, prefix=None): + """ + Accepts a package (Name node), name to import it as (string), and + optional prefix and returns a node: + import [as ] + """ + if prefix is None: + prefix = u"" + children = [Name(u"import", prefix=prefix), package] + if as_name is not None: + children.extend([Name(u"as", prefix=u" "), + Name(as_name, prefix=u" ")]) + return Node(syms.import_name, children) + +_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt) +_import_stmts = (syms.import_name, syms.import_from) + +def import_binding_scope(node): + """ + Generator yields all nodes for which a node (an import_stmt) has scope + The purpose of this is for a call to _find() on each of them + """ + # import_name / import_from are small_stmts + assert node.type in _import_stmts + test = node.next_sibling + # A small_stmt can only be followed by a SEMI or a NEWLINE. + while test.type == token.SEMI: + nxt = test.next_sibling + # A SEMI can only be followed by a small_stmt or a NEWLINE + if nxt.type == token.NEWLINE: + break + else: + yield nxt + # A small_stmt can only be followed by either a SEMI or a NEWLINE + test = nxt.next_sibling + # Covered all subsequent small_stmts after the import_stmt + # Now to cover all subsequent stmts after the parent simple_stmt + parent = node.parent + assert parent.type == syms.simple_stmt + test = parent.next_sibling + while test is not None: + # Yes, this will yield NEWLINE and DEDENT. Deal with it. + yield test + test = test.next_sibling + + context = parent.parent + # Recursively yield nodes following imports inside of a if/while/for/try/with statement + if context.type in _compound_stmts: + # import is in a one-liner + c = context + while c.next_sibling is not None: + yield c.next_sibling + c = c.next_sibling + context = context.parent + + # Can't chain one-liners on one line, so that takes care of that. + + p = context.parent + if p is None: + return + + # in a multi-line suite + + while p.type in _compound_stmts: + + if context.type == syms.suite: + yield context + + context = context.next_sibling + + if context is None: + context = p.parent + p = context.parent + if p is None: + break + +def ImportAsName(name, as_name, prefix=None): + new_name = Name(name) + new_as = Name(u"as", prefix=u" ") + new_as_name = Name(as_name, prefix=u" ") + new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name]) + if prefix is not None: + new_node.prefix = prefix + return new_node + + +def is_docstring(node): + """ + Returns True if the node appears to be a docstring + """ + return (node.type == syms.simple_stmt and + len(node.children) > 0 and node.children[0].type == token.STRING) + + +def future_import(feature, node): + """ + This seems to work + """ + root = find_root(node) + + if does_tree_import(u"__future__", feature, node): + return + + # Look for a shebang or encoding line + shebang_encoding_idx = None + + for idx, node in enumerate(root.children): + # Is it a shebang or encoding line? + if is_shebang_comment(node) or is_encoding_comment(node): + shebang_encoding_idx = idx + if is_docstring(node): + # skip over docstring + continue + names = check_future_import(node) + if not names: + # not a future statement; need to insert before this + break + if feature in names: + # already imported + return + + import_ = FromImport(u'__future__', [Leaf(token.NAME, feature, prefix=" ")]) + if shebang_encoding_idx == 0 and idx == 0: + # If this __future__ import would go on the first line, + # detach the shebang / encoding prefix from the current first line. + # and attach it to our new __future__ import node. + import_.prefix = root.children[0].prefix + root.children[0].prefix = u'' + # End the __future__ import line with a newline and add a blank line + # afterwards: + children = [import_ , Newline()] + root.insert_child(idx, Node(syms.simple_stmt, children)) + + +def future_import2(feature, node): + """ + An alternative to future_import() which might not work ... + """ + root = find_root(node) + + if does_tree_import(u"__future__", feature, node): + return + + insert_pos = 0 + for idx, node in enumerate(root.children): + if node.type == syms.simple_stmt and node.children and \ + node.children[0].type == token.STRING: + insert_pos = idx + 1 + break + + for thing_after in root.children[insert_pos:]: + if thing_after.type == token.NEWLINE: + insert_pos += 1 + continue + + prefix = thing_after.prefix + thing_after.prefix = u"" + break + else: + prefix = u"" + + import_ = FromImport(u"__future__", [Leaf(token.NAME, feature, prefix=u" ")]) + + children = [import_, Newline()] + root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix)) + +def parse_args(arglist, scheme): + u""" + Parse a list of arguments into a dict + """ + arglist = [i for i in arglist if i.type != token.COMMA] + + ret_mapping = dict([(k, None) for k in scheme]) + + for i, arg in enumerate(arglist): + if arg.type == syms.argument and arg.children[1].type == token.EQUAL: + # argument < NAME '=' any > + slot = arg.children[0].value + ret_mapping[slot] = arg.children[2] + else: + slot = scheme[i] + ret_mapping[slot] = arg + + return ret_mapping + + +# def is_import_from(node): +# """Returns true if the node is a statement "from ... import ..." +# """ +# return node.type == syms.import_from + + +def is_import_stmt(node): + return (node.type == syms.simple_stmt and node.children and + is_import(node.children[0])) + + +def touch_import_top(package, name_to_import, node): + """Works like `does_tree_import` but adds an import statement at the + top if it was not imported (but below any __future__ imports) and below any + comments such as shebang lines). + + Based on lib2to3.fixer_util.touch_import() + + Calling this multiple times adds the imports in reverse order. + + Also adds "standard_library.install_aliases()" after "from future import + standard_library". This should probably be factored into another function. + """ + + root = find_root(node) + + if does_tree_import(package, name_to_import, root): + return + + # Ideally, we would look for whether futurize --all-imports has been run, + # as indicated by the presence of ``from builtins import (ascii, ..., + # zip)`` -- and, if it has, we wouldn't import the name again. + + # Look for __future__ imports and insert below them + found = False + for name in ['absolute_import', 'division', 'print_function', + 'unicode_literals']: + if does_tree_import('__future__', name, root): + found = True + break + if found: + # At least one __future__ import. We want to loop until we've seen them + # all. + start, end = None, None + for idx, node in enumerate(root.children): + if check_future_import(node): + start = idx + # Start looping + idx2 = start + while node: + node = node.next_sibling + idx2 += 1 + if not check_future_import(node): + end = idx2 + break + break + assert start is not None + assert end is not None + insert_pos = end + else: + # No __future__ imports. + # We look for a docstring and insert the new node below that. If no docstring + # exists, just insert the node at the top. + for idx, node in enumerate(root.children): + if node.type != syms.simple_stmt: + break + if not is_docstring(node): + # This is the usual case. + break + insert_pos = idx + + if package is None: + import_ = Node(syms.import_name, [ + Leaf(token.NAME, u"import"), + Leaf(token.NAME, name_to_import, prefix=u" ") + ]) + else: + import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) + if name_to_import == u'standard_library': + # Add: + # standard_library.install_aliases() + # after: + # from future import standard_library + install_hooks = Node(syms.simple_stmt, + [Node(syms.power, + [Leaf(token.NAME, u'standard_library'), + Node(syms.trailer, [Leaf(token.DOT, u'.'), + Leaf(token.NAME, u'install_aliases')]), + Node(syms.trailer, [Leaf(token.LPAR, u'('), + Leaf(token.RPAR, u')')]) + ]) + ] + ) + children_hooks = [install_hooks, Newline()] + else: + children_hooks = [] + + # FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")]) + + children_import = [import_, Newline()] + old_prefix = root.children[insert_pos].prefix + root.children[insert_pos].prefix = u'' + root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix)) + if len(children_hooks) > 0: + root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks)) + + +## The following functions are from python-modernize by Armin Ronacher: +# (a little edited). + +def check_future_import(node): + """If this is a future import, return set of symbols that are imported, + else return None.""" + # node should be the import statement here + savenode = node + if not (node.type == syms.simple_stmt and node.children): + return set() + node = node.children[0] + # now node is the import_from node + if not (node.type == syms.import_from and + # node.type == token.NAME and # seems to break it + hasattr(node.children[1], 'value') and + node.children[1].value == u'__future__'): + return set() + if node.children[3].type == token.LPAR: + node = node.children[4] + else: + node = node.children[3] + # now node is the import_as_name[s] + # print(python_grammar.number2symbol[node.type]) # breaks sometimes + if node.type == syms.import_as_names: + result = set() + for n in node.children: + if n.type == token.NAME: + result.add(n.value) + elif n.type == syms.import_as_name: + n = n.children[0] + assert n.type == token.NAME + result.add(n.value) + return result + elif node.type == syms.import_as_name: + node = node.children[0] + assert node.type == token.NAME + return set([node.value]) + elif node.type == token.NAME: + return set([node.value]) + else: + # TODO: handle brackets like this: + # from __future__ import (absolute_import, division) + assert False, "strange import: %s" % savenode + + +SHEBANG_REGEX = r'^#!.*python' +ENCODING_REGEX = r"^#.*coding[:=]\s*([-\w.]+)" + + +def is_shebang_comment(node): + """ + Comments are prefixes for Leaf nodes. Returns whether the given node has a + prefix that looks like a shebang line or an encoding line: + + #!/usr/bin/env python + #!/usr/bin/python3 + """ + return bool(re.match(SHEBANG_REGEX, node.prefix)) + + +def is_encoding_comment(node): + """ + Comments are prefixes for Leaf nodes. Returns whether the given node has a + prefix that looks like an encoding line: + + # coding: utf-8 + # encoding: utf-8 + # -*- coding: -*- + # vim: set fileencoding= : + """ + return bool(re.match(ENCODING_REGEX, node.prefix)) + + +def wrap_in_fn_call(fn_name, args, prefix=None): + """ + Example: + >>> wrap_in_fn_call("oldstr", (arg,)) + oldstr(arg) + + >>> wrap_in_fn_call("olddiv", (arg1, arg2)) + olddiv(arg1, arg2) + + >>> wrap_in_fn_call("olddiv", [arg1, comma, arg2, comma, arg3]) + olddiv(arg1, arg2, arg3) + """ + assert len(args) > 0 + if len(args) == 2: + expr1, expr2 = args + newargs = [expr1, Comma(), expr2] + else: + newargs = args + return Call(Name(fn_name), newargs, prefix=prefix) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__init__.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__init__.py new file mode 100644 index 0000000..0b56250 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__init__.py @@ -0,0 +1,97 @@ +import sys +from lib2to3 import refactor + +# The following fixers are "safe": they convert Python 2 code to more +# modern Python 2 code. They should be uncontroversial to apply to most +# projects that are happy to drop support for Py2.5 and below. Applying +# them first will reduce the size of the patch set for the real porting. +lib2to3_fix_names_stage1 = set([ + 'lib2to3.fixes.fix_apply', + 'lib2to3.fixes.fix_except', + 'lib2to3.fixes.fix_exec', + 'lib2to3.fixes.fix_exitfunc', + 'lib2to3.fixes.fix_funcattrs', + 'lib2to3.fixes.fix_has_key', + 'lib2to3.fixes.fix_idioms', + # 'lib2to3.fixes.fix_import', # makes any implicit relative imports explicit. (Use with ``from __future__ import absolute_import) + 'lib2to3.fixes.fix_intern', + 'lib2to3.fixes.fix_isinstance', + 'lib2to3.fixes.fix_methodattrs', + 'lib2to3.fixes.fix_ne', + # 'lib2to3.fixes.fix_next', # would replace ``next`` method names + # with ``__next__``. + 'lib2to3.fixes.fix_numliterals', # turns 1L into 1, 0755 into 0o755 + 'lib2to3.fixes.fix_paren', + # 'lib2to3.fixes.fix_print', # see the libfuturize fixer that also + # adds ``from __future__ import print_function`` + # 'lib2to3.fixes.fix_raise', # uses incompatible with_traceback() method on exceptions + 'lib2to3.fixes.fix_reduce', # reduce is available in functools on Py2.6/Py2.7 + 'lib2to3.fixes.fix_renames', # sys.maxint -> sys.maxsize + # 'lib2to3.fixes.fix_set_literal', # this is unnecessary and breaks Py2.6 support + 'lib2to3.fixes.fix_repr', + 'lib2to3.fixes.fix_standarderror', + 'lib2to3.fixes.fix_sys_exc', + 'lib2to3.fixes.fix_throw', + 'lib2to3.fixes.fix_tuple_params', + 'lib2to3.fixes.fix_types', + 'lib2to3.fixes.fix_ws_comma', # can perhaps decrease readability: see issue #58 + 'lib2to3.fixes.fix_xreadlines', +]) + +# The following fixers add a dependency on the ``future`` package on order to +# support Python 2: +lib2to3_fix_names_stage2 = set([ + # 'lib2to3.fixes.fix_buffer', # perhaps not safe. Test this. + # 'lib2to3.fixes.fix_callable', # not needed in Py3.2+ + 'lib2to3.fixes.fix_dict', # TODO: add support for utils.viewitems() etc. and move to stage2 + # 'lib2to3.fixes.fix_execfile', # some problems: see issue #37. + # We use a custom fixer instead (see below) + # 'lib2to3.fixes.fix_future', # we don't want to remove __future__ imports + 'lib2to3.fixes.fix_getcwdu', + # 'lib2to3.fixes.fix_imports', # called by libfuturize.fixes.fix_future_standard_library + # 'lib2to3.fixes.fix_imports2', # we don't handle this yet (dbm) + # 'lib2to3.fixes.fix_input', # Called conditionally by libfuturize.fixes.fix_input + 'lib2to3.fixes.fix_itertools', + 'lib2to3.fixes.fix_itertools_imports', + 'lib2to3.fixes.fix_filter', + 'lib2to3.fixes.fix_long', + 'lib2to3.fixes.fix_map', + # 'lib2to3.fixes.fix_metaclass', # causes SyntaxError in Py2! Use the one from ``six`` instead + 'lib2to3.fixes.fix_next', + 'lib2to3.fixes.fix_nonzero', # TODO: cause this to import ``object`` and/or add a decorator for mapping __bool__ to __nonzero__ + 'lib2to3.fixes.fix_operator', # we will need support for this by e.g. extending the Py2 operator module to provide those functions in Py3 + 'lib2to3.fixes.fix_raw_input', + # 'lib2to3.fixes.fix_unicode', # strips off the u'' prefix, which removes a potentially helpful source of information for disambiguating unicode/byte strings + # 'lib2to3.fixes.fix_urllib', # included in libfuturize.fix_future_standard_library_urllib + # 'lib2to3.fixes.fix_xrange', # custom one because of a bug with Py3.3's lib2to3 + 'lib2to3.fixes.fix_zip', +]) + +libfuturize_fix_names_stage1 = set([ + 'libfuturize.fixes.fix_absolute_import', + 'libfuturize.fixes.fix_next_call', # obj.next() -> next(obj). Unlike + # lib2to3.fixes.fix_next, doesn't change + # the ``next`` method to ``__next__``. + 'libfuturize.fixes.fix_print_with_import', + 'libfuturize.fixes.fix_raise', + # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing +]) + +libfuturize_fix_names_stage2 = set([ + 'libfuturize.fixes.fix_basestring', + # 'libfuturize.fixes.fix_add__future__imports_except_unicode_literals', # just in case + 'libfuturize.fixes.fix_cmp', + 'libfuturize.fixes.fix_division_safe', + 'libfuturize.fixes.fix_execfile', + 'libfuturize.fixes.fix_future_builtins', + 'libfuturize.fixes.fix_future_standard_library', + 'libfuturize.fixes.fix_future_standard_library_urllib', + 'libfuturize.fixes.fix_input', + 'libfuturize.fixes.fix_metaclass', + 'libpasteurize.fixes.fix_newstyle', + 'libfuturize.fixes.fix_object', + # 'libfuturize.fixes.fix_order___future__imports', # TODO: consolidate to a single line to simplify testing + 'libfuturize.fixes.fix_unicode_keep_u', + # 'libfuturize.fixes.fix_unicode_literals_import', + 'libfuturize.fixes.fix_xrange_with_import', # custom one because of a bug with Py3.3's lib2to3 +]) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..4e4cf72 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_UserDict.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_UserDict.cpython-36.pyc new file mode 100644 index 0000000..1612c74 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_UserDict.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_absolute_import.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_absolute_import.cpython-36.pyc new file mode 100644 index 0000000..c53e25f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_absolute_import.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-36.pyc new file mode 100644 index 0000000..674efb9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_add__future__imports_except_unicode_literals.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_basestring.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_basestring.cpython-36.pyc new file mode 100644 index 0000000..526dd03 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_basestring.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_bytes.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_bytes.cpython-36.pyc new file mode 100644 index 0000000..f8706e8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_bytes.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_cmp.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_cmp.cpython-36.pyc new file mode 100644 index 0000000..0e00698 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_cmp.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_division.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_division.cpython-36.pyc new file mode 100644 index 0000000..eacc593 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_division.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_division_safe.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_division_safe.cpython-36.pyc new file mode 100644 index 0000000..1cff1a6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_division_safe.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_execfile.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_execfile.cpython-36.pyc new file mode 100644 index 0000000..3801bdd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_execfile.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc new file mode 100644 index 0000000..0e00379 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-36.pyc new file mode 100644 index 0000000..ad960e2 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-36.pyc new file mode 100644 index 0000000..42303ca Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_future_standard_library_urllib.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_input.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_input.cpython-36.pyc new file mode 100644 index 0000000..b2c8329 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_input.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_metaclass.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_metaclass.cpython-36.pyc new file mode 100644 index 0000000..ad02ceb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_metaclass.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_next_call.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_next_call.cpython-36.pyc new file mode 100644 index 0000000..6f82736 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_next_call.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_object.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_object.cpython-36.pyc new file mode 100644 index 0000000..8764444 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_object.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-36.pyc new file mode 100644 index 0000000..5c4671c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_oldstr_wrap.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-36.pyc new file mode 100644 index 0000000..429328f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_order___future__imports.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_print.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_print.cpython-36.pyc new file mode 100644 index 0000000..376b4fd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_print.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_print_with_import.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_print_with_import.cpython-36.pyc new file mode 100644 index 0000000..b6965ab Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_print_with_import.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_raise.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_raise.cpython-36.pyc new file mode 100644 index 0000000..b883635 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_raise.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-36.pyc new file mode 100644 index 0000000..0535a2e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_remove_old__future__imports.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-36.pyc new file mode 100644 index 0000000..f4143ac Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_unicode_keep_u.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-36.pyc new file mode 100644 index 0000000..6f2a5af Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_unicode_literals_import.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-36.pyc new file mode 100644 index 0000000..331b2a9 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/__pycache__/fix_xrange_with_import.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_UserDict.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_UserDict.py new file mode 100644 index 0000000..cb0cfac --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_UserDict.py @@ -0,0 +1,102 @@ +"""Fix UserDict. + +Incomplete! + +TODO: base this on fix_urllib perhaps? +""" + + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, attr_chain +from lib2to3.fixes.fix_imports import alternates, build_pattern, FixImports + +MAPPING = {'UserDict': 'collections', +} + +# def alternates(members): +# return "(" + "|".join(map(repr, members)) + ")" +# +# +# def build_pattern(mapping=MAPPING): +# mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) +# bare_names = alternates(mapping.keys()) +# +# yield """name_import=import_name< 'import' ((%s) | +# multiple_imports=dotted_as_names< any* (%s) any* >) > +# """ % (mod_list, mod_list) +# yield """import_from< 'from' (%s) 'import' ['('] +# ( any | import_as_name< any 'as' any > | +# import_as_names< any* >) [')'] > +# """ % mod_list +# yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | +# multiple_imports=dotted_as_names< +# any* dotted_as_name< (%s) 'as' any > any* >) > +# """ % (mod_list, mod_list) +# +# # Find usages of module members in code e.g. thread.foo(bar) +# yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names + + +# class FixUserDict(fixer_base.BaseFix): +class FixUserdict(FixImports): + + BM_compatible = True + keep_line_order = True + # This is overridden in fix_imports2. + mapping = MAPPING + + # We want to run this fixer late, so fix_import doesn't try to make stdlib + # renames into relative imports. + run_order = 6 + + def build_pattern(self): + return "|".join(build_pattern(self.mapping)) + + def compile_pattern(self): + # We override this, so MAPPING can be pragmatically altered and the + # changes will be reflected in PATTERN. + self.PATTERN = self.build_pattern() + super(FixImports, self).compile_pattern() + + # Don't match the node if it's within another match. + def match(self, node): + match = super(FixImports, self).match + results = match(node) + if results: + # Module usage could be in the trailer of an attribute lookup, so we + # might have nested matches when "bare_with_attr" is present. + if "bare_with_attr" not in results and \ + any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixImports, self).start_tree(tree, filename) + self.replace = {} + + def transform(self, node, results): + import_mod = results.get("module_name") + if import_mod: + mod_name = import_mod.value + new_name = unicode(self.mapping[mod_name]) + import_mod.replace(Name(new_name, prefix=import_mod.prefix)) + if "name_import" in results: + # If it's not a "from x import x, y" or "import x as y" import, + # marked its usage to be replaced. + self.replace[mod_name] = new_name + if "multiple_imports" in results: + # This is a nasty hack to fix multiple imports on a line (e.g., + # "import StringIO, urlparse"). The problem is that I can't + # figure out an easy way to make a pattern recognize the keys of + # MAPPING randomly sprinkled in an import statement. + results = self.match(node) + if results: + self.transform(node, results) + else: + # Replace usage of the module. + bare_name = results["bare_with_attr"][0] + new_name = self.replace.get(bare_name.value) + if new_name: + bare_name.replace(Name(new_name, prefix=bare_name.prefix)) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_absolute_import.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_absolute_import.py new file mode 100644 index 0000000..eab9c52 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_absolute_import.py @@ -0,0 +1,91 @@ +""" +Fixer for import statements, with a __future__ import line. + +Based on lib2to3/fixes/fix_import.py, but extended slightly so it also +supports Cython modules. + +If spam is being imported from the local directory, this import: + from spam import eggs +becomes: + from __future__ import absolute_import + from .spam import eggs + +and this import: + import spam +becomes: + from __future__ import absolute_import + from . import spam +""" + +from os.path import dirname, join, exists, sep +from lib2to3.fixes.fix_import import FixImport +from lib2to3.fixer_util import FromImport, syms +from lib2to3.fixes.fix_import import traverse_imports + +from libfuturize.fixer_util import future_import + + +class FixAbsoluteImport(FixImport): + run_order = 9 + + def transform(self, node, results): + """ + Copied from FixImport.transform(), but with this line added in + any modules that had implicit relative imports changed: + + from __future__ import absolute_import" + """ + if self.skip: + return + imp = results['imp'] + + if node.type == syms.import_from: + # Some imps are top-level (eg: 'import ham') + # some are first level (eg: 'import ham.eggs') + # some are third level (eg: 'import ham.eggs as spam') + # Hence, the loop + while not hasattr(imp, 'value'): + imp = imp.children[0] + if self.probably_a_local_import(imp.value): + imp.value = u"." + imp.value + imp.changed() + future_import(u"absolute_import", node) + else: + have_local = False + have_absolute = False + for mod_name in traverse_imports(imp): + if self.probably_a_local_import(mod_name): + have_local = True + else: + have_absolute = True + if have_absolute: + if have_local: + # We won't handle both sibling and absolute imports in the + # same statement at the moment. + self.warning(node, "absolute and local imports together") + return + + new = FromImport(u".", [imp]) + new.prefix = node.prefix + future_import(u"absolute_import", node) + return new + + def probably_a_local_import(self, imp_name): + """ + Like the corresponding method in the base class, but this also + supports Cython modules. + """ + if imp_name.startswith(u"."): + # Relative imports are certainly not local imports. + return False + imp_name = imp_name.split(u".", 1)[0] + base_path = dirname(self.filename) + base_path = join(base_path, imp_name) + # If there is no __init__.py next to the file its not in a package + # so can't be a relative import. + if not exists(join(dirname(base_path), "__init__.py")): + return False + for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]: + if exists(base_path + ext): + return True + return False diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py new file mode 100644 index 0000000..37d7fee --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py @@ -0,0 +1,26 @@ +""" +Fixer for adding: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + +This is "stage 1": hopefully uncontroversial changes. + +Stage 2 adds ``unicode_literals``. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixAddFutureImportsExceptUnicodeLiterals(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 9 + + def transform(self, node, results): + # Reverse order: + future_import(u"absolute_import", node) + future_import(u"division", node) + future_import(u"print_function", node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_basestring.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_basestring.py new file mode 100644 index 0000000..5676d08 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_basestring.py @@ -0,0 +1,17 @@ +""" +Fixer that adds ``from past.builtins import basestring`` if there is a +reference to ``basestring`` +""" + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixBasestring(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = "'basestring'" + + def transform(self, node, results): + touch_import_top(u'past.builtins', 'basestring', node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_bytes.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_bytes.py new file mode 100644 index 0000000..4202122 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_bytes.py @@ -0,0 +1,24 @@ +"""Optional fixer that changes all unprefixed string literals "..." to b"...". + +br'abcd' is a SyntaxError on Python 2 but valid on Python 3. +ur'abcd' is a SyntaxError on Python 3 but valid on Python 2. + +""" +from __future__ import unicode_literals + +import re +from lib2to3.pgen2 import token +from lib2to3 import fixer_base + +_literal_re = re.compile(r"[^bBuUrR]?[\'\"]") + +class FixBytes(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING" + + def transform(self, node, results): + if node.type == token.STRING: + if _literal_re.match(node.value): + new = node.clone() + new.value = u'b' + new.value + return new diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_cmp.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_cmp.py new file mode 100644 index 0000000..762eb4b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_cmp.py @@ -0,0 +1,33 @@ +# coding: utf-8 +""" +Fixer for the cmp() function on Py2, which was removed in Py3. + +Adds this import line:: + + from past.builtins import cmp + +if cmp() is called in the code. +""" + +from __future__ import unicode_literals +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +expression = "name='cmp'" + + +class FixCmp(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'past.builtins', name.value, node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_division.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_division.py new file mode 100644 index 0000000..6975a52 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_division.py @@ -0,0 +1,12 @@ +""" +UNFINISHED +For the ``future`` package. + +Adds this import line: + + from __future__ import division + +at the top so the code runs identically on Py3 and Py2.6/2.7 +""" + +from libpasteurize.fixes.fix_division import FixDivision diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_division_safe.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_division_safe.py new file mode 100644 index 0000000..3d5909c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_division_safe.py @@ -0,0 +1,104 @@ +""" +For the ``future`` package. + +Adds this import line: + + from __future__ import division + +at the top and changes any old-style divisions to be calls to +past.utils.old_div so the code runs as before on Py2.6/2.7 and has the same +behaviour on Py3. + +If "from __future__ import division" is already in effect, this fixer does +nothing. +""" + +import re +from lib2to3.fixer_util import Leaf, Node, Comma +from lib2to3 import fixer_base +from libfuturize.fixer_util import (token, future_import, touch_import_top, + wrap_in_fn_call) + + +def match_division(node): + u""" + __future__.division redefines the meaning of a single slash for division, + so we match that and only that. + """ + slash = token.SLASH + return node.type == slash and not node.next_sibling.type == slash and \ + not node.prev_sibling.type == slash + +const_re = re.compile('^[0-9]*[.][0-9]*$') + +def is_floaty(node): + return _is_floaty(node.prev_sibling) or _is_floaty(node.next_sibling) + + +def _is_floaty(expr): + if isinstance(expr, list): + expr = expr[0] + + if isinstance(expr, Leaf): + # If it's a leaf, let's see if it's a numeric constant containing a '.' + return const_re.match(expr.value) + elif isinstance(expr, Node): + # If the expression is a node, let's see if it's a direct cast to float + if isinstance(expr.children[0], Leaf): + return expr.children[0].value == u'float' + return False + + +class FixDivisionSafe(fixer_base.BaseFix): + # BM_compatible = True + run_order = 4 # this seems to be ignored? + + _accept_type = token.SLASH + + PATTERN = """ + term<(not('/') any)+ '/' ((not('/') any))> + """ + + def start_tree(self, tree, name): + """ + Skip this fixer if "__future__.division" is already imported. + """ + super(FixDivisionSafe, self).start_tree(tree, name) + self.skip = "division" in tree.future_features + + def match(self, node): + u""" + Since the tree needs to be fixed once and only once if and only if it + matches, we can start discarding matches after the first. + """ + if node.type == self.syms.term: + matched = False + skip = False + children = [] + for child in node.children: + if skip: + skip = False + continue + if match_division(child) and not is_floaty(child): + matched = True + + # Strip any leading space for the first number: + children[0].prefix = u'' + + children = [wrap_in_fn_call("old_div", + children + [Comma(), child.next_sibling.clone()], + prefix=node.prefix)] + skip = True + else: + children.append(child.clone()) + if matched: + return Node(node.type, children, fixers_applied=node.fixers_applied) + + return False + + def transform(self, node, results): + if self.skip: + return + future_import(u"division", node) + touch_import_top(u'past.utils', u'old_div', node) + return results diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_execfile.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_execfile.py new file mode 100644 index 0000000..cfe9d8d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_execfile.py @@ -0,0 +1,37 @@ +# coding: utf-8 +""" +Fixer for the execfile() function on Py2, which was removed in Py3. + +The Lib/lib2to3/fixes/fix_execfile.py module has some problems: see +python-future issue #37. This fixer merely imports execfile() from +past.builtins and leaves the code alone. + +Adds this import line:: + + from past.builtins import execfile + +for the function execfile() that was removed from Py3. +""" + +from __future__ import unicode_literals +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +expression = "name='execfile'" + + +class FixExecfile(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'past.builtins', name.value, node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_builtins.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_builtins.py new file mode 100644 index 0000000..eea6c6a --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_builtins.py @@ -0,0 +1,59 @@ +""" +For the ``future`` package. + +Adds this import line:: + + from builtins import XYZ + +for each of the functions XYZ that is used in the module. + +Adds these imports after any other imports (in an initial block of them). +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base +from lib2to3.pygram import python_symbols as syms +from lib2to3.fixer_util import Name, Call, in_special_context + +from libfuturize.fixer_util import touch_import_top + +# All builtins are: +# from future.builtins.iterators import (filter, map, zip) +# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) +# from future.types import (bytes, dict, int, range, str) +# We don't need isinstance any more. + +replaced_builtin_fns = '''filter map zip + ascii chr hex input next oct + bytes range str raw_input'''.split() + # This includes raw_input as a workaround for the + # lib2to3 fixer for raw_input on Py3 (only), allowing + # the correct import to be included. (Py3 seems to run + # the fixers the wrong way around, perhaps ignoring the + # run_order class attribute below ...) + +expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtin_fns]) + + +class FixFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + run_order = 7 + + # Currently we only match uses as a function. This doesn't match e.g.: + # if isinstance(s, str): + # ... + PATTERN = """ + power< + ({0}) trailer< '(' [arglist=any] ')' > + rest=any* > + | + power< + 'map' trailer< '(' [arglist=any] ')' > + > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'builtins', name.value, node) + # name.replace(Name(u"input", prefix=name.prefix)) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_standard_library.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_standard_library.py new file mode 100644 index 0000000..a1c3f3d --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_standard_library.py @@ -0,0 +1,24 @@ +""" +For the ``future`` package. + +Changes any imports needed to reflect the standard library reorganization. Also +Also adds these import lines: + + from future import standard_library + standard_library.install_aliases() + +after any __future__ imports but before any other imports. +""" + +from lib2to3.fixes.fix_imports import FixImports +from libfuturize.fixer_util import touch_import_top + + +class FixFutureStandardLibrary(FixImports): + run_order = 8 + + def transform(self, node, results): + result = super(FixFutureStandardLibrary, self).transform(node, results) + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', node) + return result diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_standard_library_urllib.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_standard_library_urllib.py new file mode 100644 index 0000000..cf67388 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_future_standard_library_urllib.py @@ -0,0 +1,28 @@ +""" +For the ``future`` package. + +A special fixer that ensures that these lines have been added:: + + from future import standard_library + standard_library.install_hooks() + +even if the only module imported was ``urllib``, in which case the regular fixer +wouldn't have added these lines. + +""" + +from lib2to3.fixes.fix_urllib import FixUrllib +from libfuturize.fixer_util import touch_import_top, find_root + + +class FixFutureStandardLibraryUrllib(FixUrllib): # not a subclass of FixImports + run_order = 8 + + def transform(self, node, results): + # transform_member() in lib2to3/fixes/fix_urllib.py breaks node so find_root(node) + # no longer works after the super() call below. So we find the root first: + root = find_root(node) + result = super(FixFutureStandardLibraryUrllib, self).transform(node, results) + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', root) + return result diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_input.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_input.py new file mode 100644 index 0000000..8a43882 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_input.py @@ -0,0 +1,32 @@ +""" +Fixer for input. + +Does a check for `from builtins import input` before running the lib2to3 fixer. +The fixer will not run when the input is already present. + + +this: + a = input() +becomes: + from builtins import input + a = eval(input()) + +and this: + from builtins import input + a = input() +becomes (no change): + from builtins import input + a = input() +""" + +import lib2to3.fixes.fix_input +from lib2to3.fixer_util import does_tree_import + + +class FixInput(lib2to3.fixes.fix_input.FixInput): + def transform(self, node, results): + + if does_tree_import('builtins', 'input', node): + return + + return super(FixInput, self).transform(node, results) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_metaclass.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_metaclass.py new file mode 100644 index 0000000..2ac41c9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_metaclass.py @@ -0,0 +1,262 @@ +# coding: utf-8 +"""Fixer for __metaclass__ = X -> (future.utils.with_metaclass(X)) methods. + + The various forms of classef (inherits nothing, inherits once, inherints + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. +""" +# This is a derived work of Lib/lib2to3/fixes/fix_metaclass.py under the +# copyright of the Python Software Foundation, licensed under the Python +# Software Foundation License 2. +# +# Copyright notice: +# +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012, 2013 Python Software Foundation. All rights reserved. +# +# Full license text: http://docs.python.org/3.4/license.html + +# Author: Jack Diederich, Daniel Neuhäuser + +# Local imports +from lib2to3 import fixer_base +from lib2to3.pygram import token +from lib2to3.fixer_util import Name, syms, Node, Leaf, touch_import, Call, \ + String, Comma, parenthesize + + +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possiblities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ + for node in parent.children: + if node.type == syms.suite: + return has_metaclass(node) + elif node.type == syms.simple_stmt and node.children: + expr_node = node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + left_side = expr_node.children[0] + if isinstance(left_side, Leaf) and \ + left_side.value == '__metaclass__': + return True + return False + + +def fixup_parse_tree(cls_node): + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ + for node in cls_node.children: + if node.type == syms.suite: + # already in the preferred format, do nothing + return + + # !%@#! oneliners have no suite node, we have to fake one up + for i, node in enumerate(cls_node.children): + if node.type == token.COLON: + break + else: + raise ValueError("No class suite and no ':'!") + + # move everything into a suite node + suite = Node(syms.suite, []) + while cls_node.children[i+1:]: + move_node = cls_node.children[i+1] + suite.append_child(move_node.clone()) + move_node.remove() + cls_node.append_child(suite) + node = suite + + +def fixup_simple_stmt(parent, i, stmt_node): + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything efter the semi-colon into its own simple_stmt node + """ + for semi_ind, node in enumerate(stmt_node.children): + if node.type == token.SEMI: # *sigh* + break + else: + return + + node.remove() # kill the semicolon + new_expr = Node(syms.expr_stmt, []) + new_stmt = Node(syms.simple_stmt, [new_expr]) + while stmt_node.children[semi_ind:]: + move_node = stmt_node.children[semi_ind] + new_expr.append_child(move_node.clone()) + move_node.remove() + parent.insert_child(i, new_stmt) + new_leaf1 = new_stmt.children[0].children[0] + old_leaf1 = stmt_node.children[0].children[0] + new_leaf1.prefix = old_leaf1.prefix + + +def remove_trailing_newline(node): + if node.children and node.children[-1].type == token.NEWLINE: + node.children[-1].remove() + + +def find_metas(cls_node): + # find the suite node (Mmm, sweet nodes) + for node in cls_node.children: + if node.type == syms.suite: + break + else: + raise ValueError("No class suite!") + + # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] + for i, simple_node in list(enumerate(node.children)): + if simple_node.type == syms.simple_stmt and simple_node.children: + expr_node = simple_node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + # Check if the expr_node is a simple assignment. + left_node = expr_node.children[0] + if isinstance(left_node, Leaf) and \ + left_node.value == u'__metaclass__': + # We found a assignment to __metaclass__. + fixup_simple_stmt(node, i, simple_node) + remove_trailing_newline(simple_node) + yield (node, i, simple_node) + + +def fixup_indent(suite): + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ + kids = suite.children[::-1] + # find the first indent + while kids: + node = kids.pop() + if node.type == token.INDENT: + break + + # find the first Leaf + while kids: + node = kids.pop() + if isinstance(node, Leaf) and node.type != token.DEDENT: + if node.prefix: + node.prefix = u'' + return + else: + kids.extend(node.children[::-1]) + + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + classdef + """ + + def transform(self, node, results): + if not has_metaclass(node): + return + + fixup_parse_tree(node) + + # find metaclasses, keep the last one + last_metaclass = None + for suite, i, stmt in find_metas(node): + last_metaclass = stmt + stmt.remove() + + text_type = node.children[0].type # always Leaf(nnn, 'class') + + # figure out what kind of classdef we have + if len(node.children) == 7: + # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) + # 0 1 2 3 4 5 6 + if node.children[3].type == syms.arglist: + arglist = node.children[3] + # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) + else: + parent = node.children[3].clone() + arglist = Node(syms.arglist, [parent]) + node.set_child(3, arglist) + elif len(node.children) == 6: + # Node(classdef, ['class', 'name', '(', ')', ':', suite]) + # 0 1 2 3 4 5 + arglist = Node(syms.arglist, []) + node.insert_child(3, arglist) + elif len(node.children) == 4: + # Node(classdef, ['class', 'name', ':', suite]) + # 0 1 2 3 + arglist = Node(syms.arglist, []) + node.insert_child(2, Leaf(token.RPAR, u')')) + node.insert_child(2, arglist) + node.insert_child(2, Leaf(token.LPAR, u'(')) + else: + raise ValueError("Unexpected class definition") + + # now stick the metaclass in the arglist + meta_txt = last_metaclass.children[0].children[0] + meta_txt.value = 'metaclass' + orig_meta_prefix = meta_txt.prefix + + # Was: touch_import(None, u'future.utils', node) + touch_import(u'future.utils', u'with_metaclass', node) + + metaclass = last_metaclass.children[0].children[2].clone() + metaclass.prefix = u'' + + arguments = [metaclass] + + if arglist.children: + if len(arglist.children) == 1: + base = arglist.children[0].clone() + base.prefix = u' ' + else: + # Unfortunately six.with_metaclass() only allows one base + # class, so we have to dynamically generate a base class if + # there is more than one. + bases = parenthesize(arglist.clone()) + bases.prefix = u' ' + base = Call(Name('type'), [ + String("'NewBase'"), + Comma(), + bases, + Comma(), + Node( + syms.atom, + [Leaf(token.LBRACE, u'{'), Leaf(token.RBRACE, u'}')], + prefix=u' ' + ) + ], prefix=u' ') + arguments.extend([Comma(), base]) + + arglist.replace(Call( + Name(u'with_metaclass', prefix=arglist.prefix), + arguments + )) + + fixup_indent(suite) + + # check for empty suite + if not suite.children: + # one-liner that was just __metaclass_ + suite.remove() + pass_leaf = Leaf(text_type, u'pass') + pass_leaf.prefix = orig_meta_prefix + node.append_child(pass_leaf) + node.append_child(Leaf(token.NEWLINE, u'\n')) + + elif len(suite.children) > 1 and \ + (suite.children[-2].type == token.INDENT and + suite.children[-1].type == token.DEDENT): + # there was only one line in the class body and it was __metaclass__ + pass_leaf = Leaf(text_type, u'pass') + suite.insert_child(-1, pass_leaf) + suite.insert_child(-1, Leaf(token.NEWLINE, u'\n')) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_next_call.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_next_call.py new file mode 100644 index 0000000..282f185 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_next_call.py @@ -0,0 +1,104 @@ +""" +Based on fix_next.py by Collin Winter. + +Replaces it.next() -> next(it), per PEP 3114. + +Unlike fix_next.py, this fixer doesn't replace the name of a next method with __next__, +which would break Python 2 compatibility without further help from fixers in +stage 2. +""" + +# Local imports +from lib2to3.pgen2 import token +from lib2to3.pygram import python_symbols as syms +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, Call, find_binding + +bind_warning = "Calls to builtin next() possibly shadowed by global binding" + + +class FixNextCall(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > + | + power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > + | + global=global_stmt< 'global' any* 'next' any* > + """ + + order = "pre" # Pre-order tree traversal + + def start_tree(self, tree, filename): + super(FixNextCall, self).start_tree(tree, filename) + + n = find_binding('next', tree) + if n: + self.warning(n, bind_warning) + self.shadowed_next = True + else: + self.shadowed_next = False + + def transform(self, node, results): + assert results + + base = results.get("base") + attr = results.get("attr") + name = results.get("name") + + if base: + if self.shadowed_next: + # Omit this: + # attr.replace(Name("__next__", prefix=attr.prefix)) + pass + else: + base = [n.clone() for n in base] + base[0].prefix = "" + node.replace(Call(Name("next", prefix=node.prefix), base)) + elif name: + # Omit this: + # n = Name("__next__", prefix=name.prefix) + # name.replace(n) + pass + elif attr: + # We don't do this transformation if we're assigning to "x.next". + # Unfortunately, it doesn't seem possible to do this in PATTERN, + # so it's being done here. + if is_assign_target(node): + head = results["head"] + if "".join([str(n) for n in head]).strip() == '__builtin__': + self.warning(node, bind_warning) + return + # Omit this: + # attr.replace(Name("__next__")) + elif "global" in results: + self.warning(node, bind_warning) + self.shadowed_next = True + + +### The following functions help test if node is part of an assignment +### target. + +def is_assign_target(node): + assign = find_assign(node) + if assign is None: + return False + + for child in assign.children: + if child.type == token.EQUAL: + return False + elif is_subtree(child, node): + return True + return False + +def find_assign(node): + if node.type == syms.expr_stmt: + return node + if node.type == syms.simple_stmt or node.parent is None: + return None + return find_assign(node.parent) + +def is_subtree(root, node): + if root == node: + return True + return any(is_subtree(c, node) for c in root.children) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_object.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_object.py new file mode 100644 index 0000000..accf2c5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_object.py @@ -0,0 +1,17 @@ +""" +Fixer that adds ``from builtins import object`` if there is a line +like this: + class Foo(object): +""" + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixObject(fixer_base.BaseFix): + + PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >" + + def transform(self, node, results): + touch_import_top(u'builtins', 'object', node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_oldstr_wrap.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_oldstr_wrap.py new file mode 100644 index 0000000..ad58771 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_oldstr_wrap.py @@ -0,0 +1,39 @@ +""" +For the ``future`` package. + +Adds this import line: + + from past.builtins import str as oldstr + +at the top and wraps any unadorned string literals 'abc' or explicit byte-string +literals b'abc' in oldstr() calls so the code has the same behaviour on Py3 as +on Py2.6/2.7. +""" + +from __future__ import unicode_literals +import re +from lib2to3 import fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import syms +from libfuturize.fixer_util import (future_import, touch_import_top, + wrap_in_fn_call) + + +_literal_re = re.compile(r"[^uUrR]?[\'\"]") + + +class FixOldstrWrap(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING" + + def transform(self, node, results): + if node.type == token.STRING: + touch_import_top(u'past.types', u'oldstr', node) + if _literal_re.match(node.value): + new = node.clone() + # Strip any leading space or comments: + # TODO: check: do we really want to do this? + new.prefix = u'' + new.value = u'b' + new.value + wrapped = wrap_in_fn_call("oldstr", [new], prefix=node.prefix) + return wrapped diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_order___future__imports.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_order___future__imports.py new file mode 100644 index 0000000..00d7ef6 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_order___future__imports.py @@ -0,0 +1,36 @@ +""" +UNFINISHED + +Fixer for turning multiple lines like these: + + from __future__ import division + from __future__ import absolute_import + from __future__ import print_function + +into a single line like this: + + from __future__ import (absolute_import, division, print_function) + +This helps with testing of ``futurize``. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixOrderFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 10 + + # def match(self, node): + # """ + # Match only once per file + # """ + # if hasattr(node, 'type') and node.type == syms.file_input: + # return True + # return False + + def transform(self, node, results): + # TODO # write me + pass diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_print.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_print.py new file mode 100644 index 0000000..247b91b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_print.py @@ -0,0 +1,94 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for print. + +Change: + "print" into "print()" + "print ..." into "print(...)" + "print(...)" not changed + "print ... ," into "print(..., end=' ')" + "print >>x, ..." into "print(..., file=x)" + +No changes are applied if print_function is imported from __future__ + +""" + +# Local imports +from lib2to3 import patcomp, pytree, fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Name, Call, Comma, String +# from libmodernize import add_future + +parend_expr = patcomp.compile_pattern( + """atom< '(' [arith_expr|atom|power|term|STRING|NAME] ')' >""" + ) + + +class FixPrint(fixer_base.BaseFix): + + BM_compatible = True + + PATTERN = """ + simple_stmt< any* bare='print' any* > | print_stmt + """ + + def transform(self, node, results): + assert results + + bare_print = results.get("bare") + + if bare_print: + # Special-case print all by itself. + bare_print.replace(Call(Name(u"print"), [], + prefix=bare_print.prefix)) + # The "from __future__ import print_function"" declaration is added + # by the fix_print_with_import fixer, so we skip it here. + # add_future(node, u'print_function') + return + assert node.children[0] == Name(u"print") + args = node.children[1:] + if len(args) == 1 and parend_expr.match(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return + + sep = end = file = None + if args and args[-1] == Comma(): + args = args[:-1] + end = " " + if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"): + assert len(args) >= 2 + file = args[1].clone() + args = args[3:] # Strip a possible comma after the file expression + # Now synthesize a print(args, sep=..., end=..., file=...) node. + l_args = [arg.clone() for arg in args] + if l_args: + l_args[0].prefix = u"" + if sep is not None or end is not None or file is not None: + if sep is not None: + self.add_kwarg(l_args, u"sep", String(repr(sep))) + if end is not None: + self.add_kwarg(l_args, u"end", String(repr(end))) + if file is not None: + self.add_kwarg(l_args, u"file", file) + n_stmt = Call(Name(u"print"), l_args) + n_stmt.prefix = node.prefix + + # Note that there are corner cases where adding this future-import is + # incorrect, for example when the file also has a 'print ()' statement + # that was intended to print "()". + # add_future(node, u'print_function') + return n_stmt + + def add_kwarg(self, l_nodes, s_kwd, n_expr): + # XXX All this prefix-setting may lose comments (though rarely) + n_expr.prefix = u"" + n_argument = pytree.Node(self.syms.argument, + (Name(s_kwd), + pytree.Leaf(token.EQUAL, u"="), + n_expr)) + if l_nodes: + l_nodes.append(Comma()) + n_argument.prefix = u" " + l_nodes.append(n_argument) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_print_with_import.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_print_with_import.py new file mode 100644 index 0000000..3449046 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_print_with_import.py @@ -0,0 +1,22 @@ +""" +For the ``future`` package. + +Turns any print statements into functions and adds this import line: + + from __future__ import print_function + +at the top to retain compatibility with Python 2.6+. +""" + +from libfuturize.fixes.fix_print import FixPrint +from libfuturize.fixer_util import future_import + +class FixPrintWithImport(FixPrint): + run_order = 7 + def transform(self, node, results): + # Add the __future__ import first. (Otherwise any shebang or encoding + # comment line attached as a prefix to the print statement will be + # copied twice and appear twice.) + future_import(u'print_function', node) + n_stmt = super(FixPrintWithImport, self).transform(node, results) + return n_stmt diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_raise.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_raise.py new file mode 100644 index 0000000..f751841 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_raise.py @@ -0,0 +1,107 @@ +"""Fixer for 'raise E, V' + +From Armin Ronacher's ``python-modernize``. + +raise -> raise +raise E -> raise E +raise E, 5 -> raise E(5) +raise E, 5, T -> raise E(5).with_traceback(T) +raise E, None, T -> raise E.with_traceback(T) + +raise (((E, E'), E''), E'''), 5 -> raise E(5) +raise "foo", V, T -> warns about string exceptions + +raise E, (V1, V2) -> raise E(V1, V2) +raise E, (V1, V2), T -> raise E(V1, V2).with_traceback(T) + + +CAVEATS: +1) "raise E, V, T" cannot be translated safely in general. If V + is not a tuple or a (number, string, None) literal, then: + + raise E, V, T -> from future.utils import raise_ + raise_(E, V, T) +""" +# Author: Collin Winter, Armin Ronacher, Mark Huang + +# Local imports +from lib2to3 import pytree, fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Name, Call, is_tuple, Comma, Attr, ArgList + +from libfuturize.fixer_util import touch_import_top + + +class FixRaise(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type == token.STRING: + msg = "Python 3 does not support string exceptions" + self.cannot_convert(node, msg) + return + + # Python 2 supports + # raise ((((E1, E2), E3), E4), E5), V + # as a synonym for + # raise E1, V + # Since Python 3 will not support this, we recurse down any tuple + # literals, always taking the first element. + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.prefix = u" " + + if "tb" in results: + tb = results["tb"].clone() + else: + tb = None + + if "val" in results: + val = results["val"].clone() + if is_tuple(val): + # Assume that exc is a subclass of Exception and call exc(*val). + args = [c.clone() for c in val.children[1:-1]] + exc = Call(exc, args) + elif val.type in (token.NUMBER, token.STRING): + # Handle numeric and string literals specially, e.g. + # "raise Exception, 5" -> "raise Exception(5)". + val.prefix = u"" + exc = Call(exc, [val]) + elif val.type == token.NAME and val.value == u"None": + # Handle None specially, e.g. + # "raise Exception, None" -> "raise Exception". + pass + else: + # val is some other expression. If val evaluates to an instance + # of exc, it should just be raised. If val evaluates to None, + # a default instance of exc should be raised (as above). If val + # evaluates to a tuple, exc(*val) should be called (as + # above). Otherwise, exc(val) should be called. We can only + # tell what to do at runtime, so defer to future.utils.raise_(), + # which handles all of these cases. + touch_import_top(u"future.utils", u"raise_", node) + exc.prefix = u"" + args = [exc, Comma(), val] + if tb is not None: + args += [Comma(), tb] + return Call(Name(u"raise_"), args) + + if tb is not None: + tb.prefix = "" + exc_list = Attr(exc, Name('with_traceback')) + [ArgList([tb])] + else: + exc_list = [exc] + + return pytree.Node(syms.raise_stmt, + [Name(u"raise")] + exc_list, + prefix=node.prefix) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_remove_old__future__imports.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_remove_old__future__imports.py new file mode 100644 index 0000000..9336f75 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_remove_old__future__imports.py @@ -0,0 +1,26 @@ +""" +Fixer for removing any of these lines: + + from __future__ import with_statement + from __future__ import nested_scopes + from __future__ import generators + +The reason is that __future__ imports like these are required to be the first +line of code (after docstrings) on Python 2.6+, which can get in the way. + +These imports are always enabled in Python 2.6+, which is the minimum sane +version to target for Py2/3 compatibility. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import remove_future_import + +class FixRemoveOldFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + remove_future_import(u"with_statement", node) + remove_future_import(u"nested_scopes", node) + remove_future_import(u"generators", node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_unicode_keep_u.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_unicode_keep_u.py new file mode 100644 index 0000000..2e9a4e4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_unicode_keep_u.py @@ -0,0 +1,24 @@ +"""Fixer that changes unicode to str and unichr to chr, but -- unlike the +lib2to3 fix_unicode.py fixer, does not change u"..." into "...". + +The reason is that Py3.3+ supports the u"..." string prefix, and, if +present, the prefix may provide useful information for disambiguating +between byte strings and unicode strings, which is often the hardest part +of the porting task. + +""" + +from lib2to3.pgen2 import token +from lib2to3 import fixer_base + +_mapping = {u"unichr" : u"chr", u"unicode" : u"str"} + +class FixUnicodeKeepU(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "'unicode' | 'unichr'" + + def transform(self, node, results): + if node.type == token.NAME: + new = node.clone() + new.value = _mapping[node.value] + return new diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_unicode_literals_import.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_unicode_literals_import.py new file mode 100644 index 0000000..51c5062 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_unicode_literals_import.py @@ -0,0 +1,18 @@ +""" +Adds this import: + + from __future__ import unicode_literals + +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixUnicodeLiteralsImport(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + + run_order = 9 + + def transform(self, node, results): + future_import(u"unicode_literals", node) diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_xrange_with_import.py b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_xrange_with_import.py new file mode 100644 index 0000000..c910f81 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/fixes/fix_xrange_with_import.py @@ -0,0 +1,20 @@ +""" +For the ``future`` package. + +Turns any xrange calls into range calls and adds this import line: + + from builtins import range + +at the top. +""" + +from lib2to3.fixes.fix_xrange import FixXrange + +from libfuturize.fixer_util import touch_import_top + + +class FixXrangeWithImport(FixXrange): + def transform(self, node, results): + result = super(FixXrangeWithImport, self).transform(node, results) + touch_import_top('builtins', 'range', node) + return result diff --git a/minor_project/lib/python3.6/site-packages/libfuturize/main.py b/minor_project/lib/python3.6/site-packages/libfuturize/main.py new file mode 100644 index 0000000..634c2f2 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libfuturize/main.py @@ -0,0 +1,322 @@ +""" +futurize: automatic conversion to clean 2/3 code using ``python-future`` +====================================================================== + +Like Armin Ronacher's modernize.py, ``futurize`` attempts to produce clean +standard Python 3 code that runs on both Py2 and Py3. + +One pass +-------- + +Use it like this on Python 2 code: + + $ futurize --verbose mypython2script.py + +This will attempt to port the code to standard Py3 code that also +provides Py2 compatibility with the help of the right imports from +``future``. + +To write changes to the files, use the -w flag. + +Two stages +---------- + +The ``futurize`` script can also be called in two separate stages. First: + + $ futurize --stage1 mypython2script.py + +This produces more modern Python 2 code that is not yet compatible with Python +3. The tests should still run and the diff should be uncontroversial to apply to +most Python projects that are willing to drop support for Python 2.5 and lower. + +After this, the recommended approach is to explicitly mark all strings that must +be byte-strings with a b'' prefix and all text (unicode) strings with a u'' +prefix, and then invoke the second stage of Python 2 to 2/3 conversion with:: + + $ futurize --stage2 mypython2script.py + +Stage 2 adds a dependency on ``future``. It converts most remaining Python +2-specific code to Python 3 code and adds appropriate imports from ``future`` +to restore Py2 support. + +The command above leaves all unadorned string literals as native strings +(byte-strings on Py2, unicode strings on Py3). If instead you would like all +unadorned string literals to be promoted to unicode, you can also pass this +flag: + + $ futurize --stage2 --unicode-literals mypython2script.py + +This adds the declaration ``from __future__ import unicode_literals`` to the +top of each file, which implicitly declares all unadorned string literals to be +unicode strings (``unicode`` on Py2). + +All imports +----------- + +The --all-imports option forces adding all ``__future__`` imports, +``builtins`` imports, and standard library aliases, even if they don't +seem necessary for the current state of each module. (This can simplify +testing, and can reduce the need to think about Py2 compatibility when editing +the code further.) + +""" + +from __future__ import (absolute_import, print_function, unicode_literals) +import future.utils +from future import __version__ + +import sys +import logging +import optparse +import os + +from lib2to3.main import warn, StdoutRefactoringTool +from lib2to3 import refactor + +from libfuturize.fixes import (lib2to3_fix_names_stage1, + lib2to3_fix_names_stage2, + libfuturize_fix_names_stage1, + libfuturize_fix_names_stage2) + +fixer_pkg = 'libfuturize.fixes' + + +def main(args=None): + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ + + # Set up option parser + parser = optparse.OptionParser(usage="futurize [options] file|dir ...") + parser.add_option("-V", "--version", action="store_true", + help="Report the version number of futurize") + parser.add_option("-a", "--all-imports", action="store_true", + help="Add all __future__ and future imports to each module") + parser.add_option("-1", "--stage1", action="store_true", + help="Modernize Python 2 code only; no compatibility with Python 3 (or dependency on ``future``)") + parser.add_option("-2", "--stage2", action="store_true", + help="Take modernized (stage1) code and add a dependency on ``future`` to provide Py3 compatibility.") + parser.add_option("-0", "--both-stages", action="store_true", + help="Apply both stages 1 and 2") + parser.add_option("-u", "--unicode-literals", action="store_true", + help="Add ``from __future__ import unicode_literals`` to implicitly convert all unadorned string literals '' into unicode strings") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all.\nEither use '-f division -f metaclass' etc. or use the fully-qualified module name: '-f lib2to3.fixes.fix_types -f libfuturize.fixes.fix_unicode_keep_u'") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a fixer from being run.") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + parser.add_option("-p", "--print-function", action="store_true", + help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files.") + parser.add_option("-o", "--output-dir", action="store", type="str", + default="", help="Put output files in this directory " + "instead of overwriting the input files. Requires -n. " + "For Python >= 2.7 only.") + parser.add_option("-W", "--write-unchanged-files", action="store_true", + help="Also write files even if no changes were required" + " (useful with --output-dir); implies -w.") + parser.add_option("--add-suffix", action="store", type="str", default="", + help="Append this string to all output filenames." + " Requires -n if non-empty. For Python >= 2.7 only." + "ex: --add-suffix='3' will generate .py3 files.") + + # Parse command line arguments + flags = {} + refactor_stdin = False + options, args = parser.parse_args(args) + + if options.write_unchanged_files: + flags["write_unchanged_files"] = True + if not options.write: + warn("--write-unchanged-files/-W implies -w.") + options.write = True + # If we allowed these, the original files would be renamed to backup names + # but not replaced. + if options.output_dir and not options.nobackups: + parser.error("Can't use --output-dir/-o without -n.") + if options.add_suffix and not options.nobackups: + parser.error("Can't use --add-suffix without -n.") + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + # Is this ever necessary? + if options.print_function: + flags["print_function"] = True + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + logger = logging.getLogger('libfuturize.main') + + if options.stage1 or options.stage2: + assert options.both_stages is None + options.both_stages = False + else: + options.both_stages = True + + avail_fixes = set() + + if options.stage1 or options.both_stages: + avail_fixes.update(lib2to3_fix_names_stage1) + avail_fixes.update(libfuturize_fix_names_stage1) + if options.stage2 or options.both_stages: + avail_fixes.update(lib2to3_fix_names_stage2) + avail_fixes.update(libfuturize_fix_names_stage2) + + if options.unicode_literals: + avail_fixes.add('libfuturize.fixes.fix_unicode_literals_import') + + if options.version: + print(__version__) + return 0 + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + # for fixname in sorted(refactor.get_all_fix_names(fixer_pkg)): + for fixname in sorted(avail_fixes): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", + file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) + + extra_fixes = set() + if options.all_imports: + if options.stage1: + prefix = 'libfuturize.fixes.' + extra_fixes.add(prefix + + 'fix_add__future__imports_except_unicode_literals') + else: + # In case the user hasn't run stage1 for some reason: + prefix = 'libpasteurize.fixes.' + extra_fixes.add(prefix + 'fix_add_all__future__imports') + extra_fixes.add(prefix + 'fix_add_future_standard_library_import') + extra_fixes.add(prefix + 'fix_add_all_future_builtins') + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + fixer_names = (requested | extra_fixes) - unwanted_fixes + + input_base_dir = os.path.commonprefix(args) + if (input_base_dir and not input_base_dir.endswith(os.sep) + and not os.path.isdir(input_base_dir)): + # One or more similar names were passed, their directory is the base. + # os.path.commonprefix() is ignorant of path elements, this corrects + # for that weird API. + input_base_dir = os.path.dirname(input_base_dir) + if options.output_dir: + input_base_dir = input_base_dir.rstrip(os.sep) + logger.info('Output in %r will mirror the input directory %r layout.', + options.output_dir, input_base_dir) + + # Initialize the refactoring tool + if future.utils.PY26: + extra_kwargs = {} + else: + extra_kwargs = { + 'append_suffix': options.add_suffix, + 'output_dir': options.output_dir, + 'input_base_dir': input_base_dir, + } + + rt = StdoutRefactoringTool( + sorted(fixer_names), flags, sorted(explicit), + options.nobackups, not options.no_diffs, + **extra_kwargs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, None, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/__init__.py b/minor_project/lib/python3.6/site-packages/libpasteurize/__init__.py new file mode 100644 index 0000000..4cb1cbc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/__init__.py @@ -0,0 +1 @@ +# empty to make this a package diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..a6ee58c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/__pycache__/main.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/__pycache__/main.cpython-36.pyc new file mode 100644 index 0000000..cdb2a11 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/__pycache__/main.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__init__.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__init__.py new file mode 100644 index 0000000..905aec4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__init__.py @@ -0,0 +1,54 @@ +import sys +from lib2to3 import refactor + +# The original set of these fixes comes from lib3to2 (https://bitbucket.org/amentajo/lib3to2): +fix_names = set([ + 'libpasteurize.fixes.fix_add_all__future__imports', # from __future__ import absolute_import etc. on separate lines + 'libpasteurize.fixes.fix_add_future_standard_library_import', # we force adding this import for now, even if it doesn't seem necessary to the fix_future_standard_library fixer, for ease of testing + # 'libfuturize.fixes.fix_order___future__imports', # consolidates to a single line to simplify testing -- UNFINISHED + 'libpasteurize.fixes.fix_future_builtins', # adds "from future.builtins import *" + 'libfuturize.fixes.fix_future_standard_library', # adds "from future import standard_library" + + 'libpasteurize.fixes.fix_annotations', + # 'libpasteurize.fixes.fix_bitlength', # ints have this in Py2.7 + # 'libpasteurize.fixes.fix_bool', # need a decorator or Mixin + # 'libpasteurize.fixes.fix_bytes', # leave bytes as bytes + # 'libpasteurize.fixes.fix_classdecorator', # available in + # Py2.6+ + # 'libpasteurize.fixes.fix_collections', hmmm ... + # 'libpasteurize.fixes.fix_dctsetcomp', # avail in Py27 + 'libpasteurize.fixes.fix_division', # yes + # 'libpasteurize.fixes.fix_except', # avail in Py2.6+ + # 'libpasteurize.fixes.fix_features', # ? + 'libpasteurize.fixes.fix_fullargspec', + # 'libpasteurize.fixes.fix_funcattrs', + 'libpasteurize.fixes.fix_getcwd', + 'libpasteurize.fixes.fix_imports', # adds "from future import standard_library" + 'libpasteurize.fixes.fix_imports2', + # 'libpasteurize.fixes.fix_input', + # 'libpasteurize.fixes.fix_int', + # 'libpasteurize.fixes.fix_intern', + # 'libpasteurize.fixes.fix_itertools', + 'libpasteurize.fixes.fix_kwargs', # yes, we want this + # 'libpasteurize.fixes.fix_memoryview', + # 'libpasteurize.fixes.fix_metaclass', # write a custom handler for + # this + # 'libpasteurize.fixes.fix_methodattrs', # __func__ and __self__ seem to be defined on Py2.7 already + 'libpasteurize.fixes.fix_newstyle', # yes, we want this: explicit inheritance from object. Without new-style classes in Py2, super() will break etc. + # 'libpasteurize.fixes.fix_next', # use a decorator for this + # 'libpasteurize.fixes.fix_numliterals', # prob not + # 'libpasteurize.fixes.fix_open', # huh? + # 'libpasteurize.fixes.fix_print', # no way + 'libpasteurize.fixes.fix_printfunction', # adds __future__ import print_function + # 'libpasteurize.fixes.fix_raise_', # TODO: get this working! + + # 'libpasteurize.fixes.fix_range', # nope + # 'libpasteurize.fixes.fix_reduce', + # 'libpasteurize.fixes.fix_setliteral', + # 'libpasteurize.fixes.fix_str', + # 'libpasteurize.fixes.fix_super', # maybe, if our magic super() isn't robust enough + 'libpasteurize.fixes.fix_throw', # yes, if Py3 supports it + # 'libpasteurize.fixes.fix_unittest', + 'libpasteurize.fixes.fix_unpacking', # yes, this is useful + # 'libpasteurize.fixes.fix_with' # way out of date + ]) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..3e54183 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/feature_base.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/feature_base.cpython-36.pyc new file mode 100644 index 0000000..ced827c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/feature_base.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-36.pyc new file mode 100644 index 0000000..9840153 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_all__future__imports.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-36.pyc new file mode 100644 index 0000000..5a6ecfa Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_all_future_builtins.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-36.pyc new file mode 100644 index 0000000..85b12ac Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_add_future_standard_library_import.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_annotations.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_annotations.cpython-36.pyc new file mode 100644 index 0000000..92b244f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_annotations.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_division.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_division.cpython-36.pyc new file mode 100644 index 0000000..9a17cab Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_division.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_features.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_features.cpython-36.pyc new file mode 100644 index 0000000..22c1c9a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_features.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-36.pyc new file mode 100644 index 0000000..d6e6615 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_fullargspec.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc new file mode 100644 index 0000000..3d53c5c Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_future_builtins.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_getcwd.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_getcwd.cpython-36.pyc new file mode 100644 index 0000000..da9956a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_getcwd.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_imports.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_imports.cpython-36.pyc new file mode 100644 index 0000000..e7cb96e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_imports.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_imports2.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_imports2.cpython-36.pyc new file mode 100644 index 0000000..65c2bdb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_imports2.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_kwargs.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_kwargs.cpython-36.pyc new file mode 100644 index 0000000..9ca6acf Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_kwargs.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_memoryview.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_memoryview.cpython-36.pyc new file mode 100644 index 0000000..00be495 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_memoryview.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_metaclass.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_metaclass.cpython-36.pyc new file mode 100644 index 0000000..8622a6f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_metaclass.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_newstyle.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_newstyle.cpython-36.pyc new file mode 100644 index 0000000..9748049 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_newstyle.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_next.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_next.cpython-36.pyc new file mode 100644 index 0000000..ef37f5a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_next.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_printfunction.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_printfunction.cpython-36.pyc new file mode 100644 index 0000000..a61ada7 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_printfunction.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_raise.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_raise.cpython-36.pyc new file mode 100644 index 0000000..9b02c19 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_raise.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_raise_.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_raise_.cpython-36.pyc new file mode 100644 index 0000000..62ede47 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_raise_.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_throw.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_throw.cpython-36.pyc new file mode 100644 index 0000000..5844d8a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_throw.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_unpacking.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_unpacking.cpython-36.pyc new file mode 100644 index 0000000..8e60ffc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/__pycache__/fix_unpacking.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/feature_base.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/feature_base.py new file mode 100644 index 0000000..c36d9a9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/feature_base.py @@ -0,0 +1,57 @@ +u""" +Base classes for features that are backwards-incompatible. + +Usage: +features = Features() +features.add(Feature("py3k_feature", "power< 'py3k' any* >", "2.7")) +PATTERN = features.PATTERN +""" + +pattern_unformatted = u"%s=%s" # name=pattern, for dict lookups +message_unformatted = u""" +%s is only supported in Python %s and above.""" + +class Feature(object): + u""" + A feature has a name, a pattern, and a minimum version of Python 2.x + required to use the feature (or 3.x if there is no backwards-compatible + version of 2.x) + """ + def __init__(self, name, PATTERN, version): + self.name = name + self._pattern = PATTERN + self.version = version + + def message_text(self): + u""" + Format the above text with the name and minimum version required. + """ + return message_unformatted % (self.name, self.version) + +class Features(set): + u""" + A set of features that generates a pattern for the features it contains. + This set will act like a mapping in that we map names to patterns. + """ + mapping = {} + + def update_mapping(self): + u""" + Called every time we care about the mapping of names to features. + """ + self.mapping = dict([(f.name, f) for f in iter(self)]) + + @property + def PATTERN(self): + u""" + Uses the mapping of names to features to return a PATTERN suitable + for using the lib2to3 patcomp. + """ + self.update_mapping() + return u" |\n".join([pattern_unformatted % (f.name, f._pattern) for f in iter(self)]) + + def __getitem__(self, key): + u""" + Implement a simple mapping to get patterns from names. + """ + return self.mapping[key] diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py new file mode 100644 index 0000000..a151f9f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_all__future__imports.py @@ -0,0 +1,24 @@ +""" +Fixer for adding: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + +This is done when converting from Py3 to both Py3/Py2. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixAddAllFutureImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + future_import(u"absolute_import", node) + future_import(u"division", node) + future_import(u"print_function", node) + future_import(u"unicode_literals", node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_all_future_builtins.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_all_future_builtins.py new file mode 100644 index 0000000..22911ba --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_all_future_builtins.py @@ -0,0 +1,37 @@ +""" +For the ``future`` package. + +Adds this import line:: + + from builtins import (ascii, bytes, chr, dict, filter, hex, input, + int, list, map, next, object, oct, open, pow, + range, round, str, super, zip) + +to a module, irrespective of whether each definition is used. + +Adds these imports after any other imports (in an initial block of them). +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base + +from libfuturize.fixer_util import touch_import_top + + +class FixAddAllFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 1 + + def transform(self, node, results): + # import_str = """(ascii, bytes, chr, dict, filter, hex, input, + # int, list, map, next, object, oct, open, pow, + # range, round, str, super, zip)""" + touch_import_top(u'builtins', '*', node) + + # builtins = """ascii bytes chr dict filter hex input + # int list map next object oct open pow + # range round str super zip""" + # for builtin in sorted(builtins.split(), reverse=True): + # touch_import_top(u'builtins', builtin, node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_future_standard_library_import.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_future_standard_library_import.py new file mode 100644 index 0000000..0778406 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_add_future_standard_library_import.py @@ -0,0 +1,23 @@ +""" +For the ``future`` package. + +Adds this import line: + + from future import standard_library + +after any __future__ imports but before any other imports. Doesn't actually +change the imports to Py3 style. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import touch_import_top + +class FixAddFutureStandardLibraryImport(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "file_input" + run_order = 8 + + def transform(self, node, results): + # TODO: add a blank line between any __future__ imports and this? + touch_import_top(u'future', u'standard_library', node) + # TODO: also add standard_library.install_hooks() diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_annotations.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_annotations.py new file mode 100644 index 0000000..884b674 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_annotations.py @@ -0,0 +1,48 @@ +u""" +Fixer to remove function annotations +""" + +from lib2to3 import fixer_base +from lib2to3.pgen2 import token +from lib2to3.fixer_util import syms + +warning_text = u"Removing function annotations completely." + +def param_without_annotations(node): + return node.children[0] + +class FixAnnotations(fixer_base.BaseFix): + + warned = False + + def warn_once(self, node, reason): + if not self.warned: + self.warned = True + self.warning(node, reason=reason) + + PATTERN = u""" + funcdef< 'def' any parameters< '(' [params=any] ')' > ['->' ret=any] ':' any* > + """ + + def transform(self, node, results): + u""" + This just strips annotations from the funcdef completely. + """ + params = results.get(u"params") + ret = results.get(u"ret") + if ret is not None: + assert ret.prev_sibling.type == token.RARROW, u"Invalid return annotation" + self.warn_once(node, reason=warning_text) + ret.prev_sibling.remove() + ret.remove() + if params is None: return + if params.type == syms.typedargslist: + # more than one param in a typedargslist + for param in params.children: + if param.type == syms.tname: + self.warn_once(node, reason=warning_text) + param.replace(param_without_annotations(param)) + elif params.type == syms.tname: + # one param + self.warn_once(node, reason=warning_text) + params.replace(param_without_annotations(params)) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_division.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_division.py new file mode 100644 index 0000000..6a04871 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_division.py @@ -0,0 +1,28 @@ +u""" +Fixer for division: from __future__ import division if needed +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import token, future_import + +def match_division(node): + u""" + __future__.division redefines the meaning of a single slash for division, + so we match that and only that. + """ + slash = token.SLASH + return node.type == slash and not node.next_sibling.type == slash and \ + not node.prev_sibling.type == slash + +class FixDivision(fixer_base.BaseFix): + run_order = 4 # this seems to be ignored? + + def match(self, node): + u""" + Since the tree needs to be fixed once and only once if and only if it + matches, then we can start discarding matches after we make the first. + """ + return match_division(node) + + def transform(self, node, results): + future_import(u"division", node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_features.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_features.py new file mode 100644 index 0000000..52630f9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_features.py @@ -0,0 +1,86 @@ +u""" +Warn about features that are not present in Python 2.5, giving a message that +points to the earliest version of Python 2.x (or 3.x, if none) that supports it +""" + +from .feature_base import Feature, Features +from lib2to3 import fixer_base + +FEATURES = [ + #(FeatureName, + # FeaturePattern, + # FeatureMinVersion, + #), + (u"memoryview", + u"power < 'memoryview' trailer < '(' any* ')' > any* >", + u"2.7", + ), + (u"numbers", + u"""import_from< 'from' 'numbers' 'import' any* > | + import_name< 'import' ('numbers' dotted_as_names< any* 'numbers' any* >) >""", + u"2.6", + ), + (u"abc", + u"""import_name< 'import' ('abc' dotted_as_names< any* 'abc' any* >) > | + import_from< 'from' 'abc' 'import' any* >""", + u"2.6", + ), + (u"io", + u"""import_name< 'import' ('io' dotted_as_names< any* 'io' any* >) > | + import_from< 'from' 'io' 'import' any* >""", + u"2.6", + ), + (u"bin", + u"power< 'bin' trailer< '(' any* ')' > any* >", + u"2.6", + ), + (u"formatting", + u"power< any trailer< '.' 'format' > trailer< '(' any* ')' > >", + u"2.6", + ), + (u"nonlocal", + u"global_stmt< 'nonlocal' any* >", + u"3.0", + ), + (u"with_traceback", + u"trailer< '.' 'with_traceback' >", + u"3.0", + ), +] + +class FixFeatures(fixer_base.BaseFix): + + run_order = 9 # Wait until all other fixers have run to check for these + + # To avoid spamming, we only want to warn for each feature once. + features_warned = set() + + # Build features from the list above + features = Features([Feature(name, pattern, version) for \ + name, pattern, version in FEATURES]) + + PATTERN = features.PATTERN + + def match(self, node): + to_ret = super(FixFeatures, self).match(node) + # We want the mapping only to tell us the node's specific information. + try: + del to_ret[u'node'] + except Exception: + # We want it to delete the 'node' from the results + # if it's there, so we don't care if it fails for normal reasons. + pass + return to_ret + + def transform(self, node, results): + for feature_name in results: + if feature_name in self.features_warned: + continue + else: + curr_feature = self.features[feature_name] + if curr_feature.version >= u"3": + fail = self.cannot_convert + else: + fail = self.warning + fail(node, reason=curr_feature.message_text()) + self.features_warned.add(feature_name) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_fullargspec.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_fullargspec.py new file mode 100644 index 0000000..4bd37e1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_fullargspec.py @@ -0,0 +1,16 @@ +u""" +Fixer for getfullargspec -> getargspec +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + +warn_msg = u"some of the values returned by getfullargspec are not valid in Python 2 and have no equivalent." + +class FixFullargspec(fixer_base.BaseFix): + + PATTERN = u"'getfullargspec'" + + def transform(self, node, results): + self.warning(node, warn_msg) + return Name(u"getargspec", prefix=node.prefix) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_future_builtins.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_future_builtins.py new file mode 100644 index 0000000..6849679 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_future_builtins.py @@ -0,0 +1,46 @@ +""" +Adds this import line: + + from builtins import XYZ + +for each of the functions XYZ that is used in the module. +""" + +from __future__ import unicode_literals + +from lib2to3 import fixer_base +from lib2to3.pygram import python_symbols as syms +from lib2to3.fixer_util import Name, Call, in_special_context + +from libfuturize.fixer_util import touch_import_top + +# All builtins are: +# from future.builtins.iterators import (filter, map, zip) +# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super) +# from future.types import (bytes, dict, int, range, str) +# We don't need isinstance any more. + +replaced_builtins = '''filter map zip + ascii chr hex input next oct open round super + bytes dict int range str'''.split() + +expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins]) + + +class FixFutureBuiltins(fixer_base.BaseFix): + BM_compatible = True + run_order = 9 + + # Currently we only match uses as a function. This doesn't match e.g.: + # if isinstance(s, str): + # ... + PATTERN = """ + power< + ({0}) trailer< '(' args=[any] ')' > + rest=any* > + """.format(expression) + + def transform(self, node, results): + name = results["name"] + touch_import_top(u'builtins', name.value, node) + # name.replace(Name(u"input", prefix=name.prefix)) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_getcwd.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_getcwd.py new file mode 100644 index 0000000..9b7f002 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_getcwd.py @@ -0,0 +1,26 @@ +u""" +Fixer for os.getcwd() -> os.getcwdu(). +Also warns about "from os import getcwd", suggesting the above form. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + +class FixGetcwd(fixer_base.BaseFix): + + PATTERN = u""" + power< 'os' trailer< dot='.' name='getcwd' > any* > + | + import_from< 'from' 'os' 'import' bad='getcwd' > + """ + + def transform(self, node, results): + if u"name" in results: + name = results[u"name"] + name.replace(Name(u"getcwdu", prefix=name.prefix)) + elif u"bad" in results: + # Can't convert to getcwdu and then expect to catch every use. + self.cannot_convert(node, u"import os, use os.getcwd() instead.") + return + else: + raise ValueError(u"For some reason, the pattern matcher failed.") diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_imports.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_imports.py new file mode 100644 index 0000000..2d6718f --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_imports.py @@ -0,0 +1,112 @@ +u""" +Fixer for standard library imports renamed in Python 3 +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, is_probably_builtin, Newline, does_tree_import +from lib2to3.pygram import python_symbols as syms +from lib2to3.pgen2 import token +from lib2to3.pytree import Node, Leaf + +from libfuturize.fixer_util import touch_import_top +# from ..fixer_util import NameImport + +# used in simple_mapping_to_pattern() +MAPPING = {u"reprlib": u"repr", + u"winreg": u"_winreg", + u"configparser": u"ConfigParser", + u"copyreg": u"copy_reg", + u"queue": u"Queue", + u"socketserver": u"SocketServer", + u"_markupbase": u"markupbase", + u"test.support": u"test.test_support", + u"dbm.bsd": u"dbhash", + u"dbm.ndbm": u"dbm", + u"dbm.dumb": u"dumbdbm", + u"dbm.gnu": u"gdbm", + u"html.parser": u"HTMLParser", + u"html.entities": u"htmlentitydefs", + u"http.client": u"httplib", + u"http.cookies": u"Cookie", + u"http.cookiejar": u"cookielib", +# "tkinter": "Tkinter", + u"tkinter.dialog": u"Dialog", + u"tkinter._fix": u"FixTk", + u"tkinter.scrolledtext": u"ScrolledText", + u"tkinter.tix": u"Tix", + u"tkinter.constants": u"Tkconstants", + u"tkinter.dnd": u"Tkdnd", + u"tkinter.__init__": u"Tkinter", + u"tkinter.colorchooser": u"tkColorChooser", + u"tkinter.commondialog": u"tkCommonDialog", + u"tkinter.font": u"tkFont", + u"tkinter.ttk": u"ttk", + u"tkinter.messagebox": u"tkMessageBox", + u"tkinter.turtle": u"turtle", + u"urllib.robotparser": u"robotparser", + u"xmlrpc.client": u"xmlrpclib", + u"builtins": u"__builtin__", +} + +# generic strings to help build patterns +# these variables mean (with http.client.HTTPConnection as an example): +# name = http +# attr = client +# used = HTTPConnection +# fmt_name is a formatted subpattern (simple_name_match or dotted_name_match) + +# helps match 'queue', as in 'from queue import ...' +simple_name_match = u"name='%s'" +# helps match 'client', to be used if client has been imported from http +subname_match = u"attr='%s'" +# helps match 'http.client', as in 'import urllib.request' +dotted_name_match = u"dotted_name=dotted_name< %s '.' %s >" +# helps match 'queue', as in 'queue.Queue(...)' +power_onename_match = u"%s" +# helps match 'http.client', as in 'http.client.HTTPConnection(...)' +power_twoname_match = u"power< %s trailer< '.' %s > any* >" +# helps match 'client.HTTPConnection', if 'client' has been imported from http +power_subname_match = u"power< %s any* >" +# helps match 'from http.client import HTTPConnection' +from_import_match = u"from_import=import_from< 'from' %s 'import' imported=any >" +# helps match 'from http import client' +from_import_submod_match = u"from_import_submod=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* > ) >" +# helps match 'import urllib.request' +name_import_match = u"name_import=import_name< 'import' %s > | name_import=import_name< 'import' dotted_as_name< %s 'as' renamed=any > >" +# helps match 'import http.client, winreg' +multiple_name_import_match = u"name_import=import_name< 'import' dotted_as_names< names=any* > >" + +def all_patterns(name): + u""" + Accepts a string and returns a pattern of possible patterns involving that name + Called by simple_mapping_to_pattern for each name in the mapping it receives. + """ + + # i_ denotes an import-like node + # u_ denotes a node that appears to be a usage of the name + if u'.' in name: + name, attr = name.split(u'.', 1) + simple_name = simple_name_match % (name) + simple_attr = subname_match % (attr) + dotted_name = dotted_name_match % (simple_name, simple_attr) + i_from = from_import_match % (dotted_name) + i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr) + i_name = name_import_match % (dotted_name, dotted_name) + u_name = power_twoname_match % (simple_name, simple_attr) + u_subname = power_subname_match % (simple_attr) + return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname)) + else: + simple_name = simple_name_match % (name) + i_name = name_import_match % (simple_name, simple_name) + i_from = from_import_match % (simple_name) + u_name = power_onename_match % (simple_name) + return u' | \n'.join((i_name, i_from, u_name)) + + +class FixImports(fixer_base.BaseFix): + + PATTERN = u' | \n'.join([all_patterns(name) for name in MAPPING]) + PATTERN = u' | \n'.join((PATTERN, multiple_name_import_match)) + + def transform(self, node, results): + touch_import_top(u'future', u'standard_library', node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_imports2.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_imports2.py new file mode 100644 index 0000000..70444e9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_imports2.py @@ -0,0 +1,174 @@ +u""" +Fixer for complicated imports +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, String, FromImport, Newline, Comma +from libfuturize.fixer_util import touch_import_top + + +TK_BASE_NAMES = (u'ACTIVE', u'ALL', u'ANCHOR', u'ARC',u'BASELINE', u'BEVEL', u'BOTH', + u'BOTTOM', u'BROWSE', u'BUTT', u'CASCADE', u'CENTER', u'CHAR', + u'CHECKBUTTON', u'CHORD', u'COMMAND', u'CURRENT', u'DISABLED', + u'DOTBOX', u'E', u'END', u'EW', u'EXCEPTION', u'EXTENDED', u'FALSE', + u'FIRST', u'FLAT', u'GROOVE', u'HIDDEN', u'HORIZONTAL', u'INSERT', + u'INSIDE', u'LAST', u'LEFT', u'MITER', u'MOVETO', u'MULTIPLE', u'N', + u'NE', u'NO', u'NONE', u'NORMAL', u'NS', u'NSEW', u'NUMERIC', u'NW', + u'OFF', u'ON', u'OUTSIDE', u'PAGES', u'PIESLICE', u'PROJECTING', + u'RADIOBUTTON', u'RAISED', u'READABLE', u'RIDGE', u'RIGHT', + u'ROUND', u'S', u'SCROLL', u'SE', u'SEL', u'SEL_FIRST', u'SEL_LAST', + u'SEPARATOR', u'SINGLE', u'SOLID', u'SUNKEN', u'SW', u'StringTypes', + u'TOP', u'TRUE', u'TclVersion', u'TkVersion', u'UNDERLINE', + u'UNITS', u'VERTICAL', u'W', u'WORD', u'WRITABLE', u'X', u'Y', u'YES', + u'wantobjects') + +PY2MODULES = { + u'urllib2' : ( + u'AbstractBasicAuthHandler', u'AbstractDigestAuthHandler', + u'AbstractHTTPHandler', u'BaseHandler', u'CacheFTPHandler', + u'FTPHandler', u'FileHandler', u'HTTPBasicAuthHandler', + u'HTTPCookieProcessor', u'HTTPDefaultErrorHandler', + u'HTTPDigestAuthHandler', u'HTTPError', u'HTTPErrorProcessor', + u'HTTPHandler', u'HTTPPasswordMgr', + u'HTTPPasswordMgrWithDefaultRealm', u'HTTPRedirectHandler', + u'HTTPSHandler', u'OpenerDirector', u'ProxyBasicAuthHandler', + u'ProxyDigestAuthHandler', u'ProxyHandler', u'Request', + u'StringIO', u'URLError', u'UnknownHandler', u'addinfourl', + u'build_opener', u'install_opener', u'parse_http_list', + u'parse_keqv_list', u'randombytes', u'request_host', u'urlopen'), + u'urllib' : ( + u'ContentTooShortError', u'FancyURLopener',u'URLopener', + u'basejoin', u'ftperrors', u'getproxies', + u'getproxies_environment', u'localhost', u'pathname2url', + u'quote', u'quote_plus', u'splitattr', u'splithost', + u'splitnport', u'splitpasswd', u'splitport', u'splitquery', + u'splittag', u'splittype', u'splituser', u'splitvalue', + u'thishost', u'unquote', u'unquote_plus', u'unwrap', + u'url2pathname', u'urlcleanup', u'urlencode', u'urlopen', + u'urlretrieve',), + u'urlparse' : ( + u'parse_qs', u'parse_qsl', u'urldefrag', u'urljoin', + u'urlparse', u'urlsplit', u'urlunparse', u'urlunsplit'), + u'dbm' : ( + u'ndbm', u'gnu', u'dumb'), + u'anydbm' : ( + u'error', u'open'), + u'whichdb' : ( + u'whichdb',), + u'BaseHTTPServer' : ( + u'BaseHTTPRequestHandler', u'HTTPServer'), + u'CGIHTTPServer' : ( + u'CGIHTTPRequestHandler',), + u'SimpleHTTPServer' : ( + u'SimpleHTTPRequestHandler',), + u'FileDialog' : TK_BASE_NAMES + ( + u'FileDialog', u'LoadFileDialog', u'SaveFileDialog', + u'dialogstates', u'test'), + u'tkFileDialog' : ( + u'Directory', u'Open', u'SaveAs', u'_Dialog', u'askdirectory', + u'askopenfile', u'askopenfilename', u'askopenfilenames', + u'askopenfiles', u'asksaveasfile', u'asksaveasfilename'), + u'SimpleDialog' : TK_BASE_NAMES + ( + u'SimpleDialog',), + u'tkSimpleDialog' : TK_BASE_NAMES + ( + u'askfloat', u'askinteger', u'askstring', u'Dialog'), + u'SimpleXMLRPCServer' : ( + u'CGIXMLRPCRequestHandler', u'SimpleXMLRPCDispatcher', + u'SimpleXMLRPCRequestHandler', u'SimpleXMLRPCServer', + u'list_public_methods', u'remove_duplicates', + u'resolve_dotted_attribute'), + u'DocXMLRPCServer' : ( + u'DocCGIXMLRPCRequestHandler', u'DocXMLRPCRequestHandler', + u'DocXMLRPCServer', u'ServerHTMLDoc',u'XMLRPCDocGenerator'), + } + +MAPPING = { u'urllib.request' : + (u'urllib2', u'urllib'), + u'urllib.error' : + (u'urllib2', u'urllib'), + u'urllib.parse' : + (u'urllib2', u'urllib', u'urlparse'), + u'dbm.__init__' : + (u'anydbm', u'whichdb'), + u'http.server' : + (u'CGIHTTPServer', u'SimpleHTTPServer', u'BaseHTTPServer'), + u'tkinter.filedialog' : + (u'tkFileDialog', u'FileDialog'), + u'tkinter.simpledialog' : + (u'tkSimpleDialog', u'SimpleDialog'), + u'xmlrpc.server' : + (u'DocXMLRPCServer', u'SimpleXMLRPCServer'), + } + +# helps match 'http', as in 'from http.server import ...' +simple_name = u"name='%s'" +# helps match 'server', as in 'from http.server import ...' +simple_attr = u"attr='%s'" +# helps match 'HTTPServer', as in 'from http.server import HTTPServer' +simple_using = u"using='%s'" +# helps match 'urllib.request', as in 'import urllib.request' +dotted_name = u"dotted_name=dotted_name< %s '.' %s >" +# helps match 'http.server', as in 'http.server.HTTPServer(...)' +power_twoname = u"pow=power< %s trailer< '.' %s > trailer< '.' using=any > any* >" +# helps match 'dbm.whichdb', as in 'dbm.whichdb(...)' +power_onename = u"pow=power< %s trailer< '.' using=any > any* >" +# helps match 'from http.server import HTTPServer' +# also helps match 'from http.server import HTTPServer, SimpleHTTPRequestHandler' +# also helps match 'from http.server import *' +from_import = u"from_import=import_from< 'from' %s 'import' (import_as_name< using=any 'as' renamed=any> | in_list=import_as_names< using=any* > | using='*' | using=NAME) >" +# helps match 'import urllib.request' +name_import = u"name_import=import_name< 'import' (%s | in_list=dotted_as_names< imp_list=any* >) >" + +############# +# WON'T FIX # +############# + +# helps match 'import urllib.request as name' +name_import_rename = u"name_import_rename=dotted_as_name< %s 'as' renamed=any >" +# helps match 'from http import server' +from_import_rename = u"from_import_rename=import_from< 'from' %s 'import' (%s | import_as_name< %s 'as' renamed=any > | in_list=import_as_names< any* (%s | import_as_name< %s 'as' renamed=any >) any* >) >" + + +def all_modules_subpattern(): + u""" + Builds a pattern for all toplevel names + (urllib, http, etc) + """ + names_dot_attrs = [mod.split(u".") for mod in MAPPING] + ret = u"( " + u" | ".join([dotted_name % (simple_name % (mod[0]), + simple_attr % (mod[1])) for mod in names_dot_attrs]) + ret += u" | " + ret += u" | ".join([simple_name % (mod[0]) for mod in names_dot_attrs if mod[1] == u"__init__"]) + u" )" + return ret + + +def build_import_pattern(mapping1, mapping2): + u""" + mapping1: A dict mapping py3k modules to all possible py2k replacements + mapping2: A dict mapping py2k modules to the things they do + This builds a HUGE pattern to match all ways that things can be imported + """ + # py3k: urllib.request, py2k: ('urllib2', 'urllib') + yield from_import % (all_modules_subpattern()) + for py3k, py2k in mapping1.items(): + name, attr = py3k.split(u'.') + s_name = simple_name % (name) + s_attr = simple_attr % (attr) + d_name = dotted_name % (s_name, s_attr) + yield name_import % (d_name) + yield power_twoname % (s_name, s_attr) + if attr == u'__init__': + yield name_import % (s_name) + yield power_onename % (s_name) + yield name_import_rename % (d_name) + yield from_import_rename % (s_name, s_attr, s_attr, s_attr, s_attr) + + +class FixImports2(fixer_base.BaseFix): + + run_order = 4 + + PATTERN = u" | \n".join(build_import_pattern(MAPPING, PY2MODULES)) + + def transform(self, node, results): + touch_import_top(u'future', u'standard_library', node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_kwargs.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_kwargs.py new file mode 100644 index 0000000..290f991 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_kwargs.py @@ -0,0 +1,147 @@ +u""" +Fixer for Python 3 function parameter syntax +This fixer is rather sensitive to incorrect py3k syntax. +""" + +# Note: "relevant" parameters are parameters following the first STAR in the list. + +from lib2to3 import fixer_base +from lib2to3.fixer_util import token, String, Newline, Comma, Name +from libfuturize.fixer_util import indentation, suitify, DoubleStar + +_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']" +_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s" +_else_template = u"else: %(name)s = %(default)s" +_kwargs_default_name = u"_3to2kwargs" + +def gen_params(raw_params): + u""" + Generator that yields tuples of (name, default_value) for each parameter in the list + If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None')) + """ + assert raw_params[0].type == token.STAR and len(raw_params) > 2 + curr_idx = 2 # the first place a keyword-only parameter name can be is index 2 + max_idx = len(raw_params) + while curr_idx < max_idx: + curr_item = raw_params[curr_idx] + prev_item = curr_item.prev_sibling + if curr_item.type != token.NAME: + curr_idx += 1 + continue + if prev_item is not None and prev_item.type == token.DOUBLESTAR: + break + name = curr_item.value + nxt = curr_item.next_sibling + if nxt is not None and nxt.type == token.EQUAL: + default_value = nxt.next_sibling + curr_idx += 2 + else: + default_value = None + yield (name, default_value) + curr_idx += 1 + +def remove_params(raw_params, kwargs_default=_kwargs_default_name): + u""" + Removes all keyword-only args from the params list and a bare star, if any. + Does not add the kwargs dict if needed. + Returns True if more action is needed, False if not + (more action is needed if no kwargs dict exists) + """ + assert raw_params[0].type == token.STAR + if raw_params[1].type == token.COMMA: + raw_params[0].remove() + raw_params[1].remove() + kw_params = raw_params[2:] + else: + kw_params = raw_params[3:] + for param in kw_params: + if param.type != token.DOUBLESTAR: + param.remove() + else: + return False + else: + return True + +def needs_fixing(raw_params, kwargs_default=_kwargs_default_name): + u""" + Returns string with the name of the kwargs dict if the params after the first star need fixing + Otherwise returns empty string + """ + found_kwargs = False + needs_fix = False + + for t in raw_params[2:]: + if t.type == token.COMMA: + # Commas are irrelevant at this stage. + continue + elif t.type == token.NAME and not found_kwargs: + # Keyword-only argument: definitely need to fix. + needs_fix = True + elif t.type == token.NAME and found_kwargs: + # Return 'foobar' of **foobar, if needed. + return t.value if needs_fix else u'' + elif t.type == token.DOUBLESTAR: + # Found either '*' from **foobar. + found_kwargs = True + else: + # Never found **foobar. Return a synthetic name, if needed. + return kwargs_default if needs_fix else u'' + +class FixKwargs(fixer_base.BaseFix): + + run_order = 7 # Run after function annotations are removed + + PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >" + + def transform(self, node, results): + params_rawlist = results[u"params"] + for i, item in enumerate(params_rawlist): + if item.type == token.STAR: + params_rawlist = params_rawlist[i:] + break + else: + return + # params is guaranteed to be a list starting with *. + # if fixing is needed, there will be at least 3 items in this list: + # [STAR, COMMA, NAME] is the minimum that we need to worry about. + new_kwargs = needs_fixing(params_rawlist) + # new_kwargs is the name of the kwargs dictionary. + if not new_kwargs: + return + suitify(node) + + # At this point, params_rawlist is guaranteed to be a list + # beginning with a star that includes at least one keyword-only param + # e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or + # [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME] + + # Anatomy of a funcdef: ['def', 'name', parameters, ':', suite] + # Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts] + # We need to insert our new stuff before the first_stmt and change the + # first_stmt's prefix. + + suite = node.children[4] + first_stmt = suite.children[2] + ident = indentation(first_stmt) + + for name, default_value in gen_params(params_rawlist): + if default_value is None: + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_assign_template %{u'name':name, u'kwargs':new_kwargs}, prefix=ident)) + else: + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_else_template %{u'name':name, u'default':default_value}, prefix=ident)) + suite.insert_child(2, Newline()) + suite.insert_child(2, String(_if_template %{u'assign':_assign_template %{u'name':name, u'kwargs':new_kwargs}, u'name':name, u'kwargs':new_kwargs}, prefix=ident)) + first_stmt.prefix = ident + suite.children[2].prefix = u"" + + # Now, we need to fix up the list of params. + + must_add_kwargs = remove_params(params_rawlist) + if must_add_kwargs: + arglist = results[u'arglist'] + if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA: + arglist.append_child(Comma()) + arglist.append_child(DoubleStar(prefix=u" ")) + arglist.append_child(Name(new_kwargs)) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_memoryview.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_memoryview.py new file mode 100644 index 0000000..a20f6f3 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_memoryview.py @@ -0,0 +1,21 @@ +u""" +Fixer for memoryview(s) -> buffer(s). +Explicit because some memoryview methods are invalid on buffer objects. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name + + +class FixMemoryview(fixer_base.BaseFix): + + explicit = True # User must specify that they want this. + + PATTERN = u""" + power< name='memoryview' trailer< '(' [any] ')' > + rest=any* > + """ + + def transform(self, node, results): + name = results[u"name"] + name.replace(Name(u"buffer", prefix=name.prefix)) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_metaclass.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_metaclass.py new file mode 100644 index 0000000..52dd1d1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_metaclass.py @@ -0,0 +1,78 @@ +u""" +Fixer for (metaclass=X) -> __metaclass__ = X +Some semantics (see PEP 3115) may be altered in the translation.""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, syms, Node, Leaf, Newline, find_root +from lib2to3.pygram import token +from libfuturize.fixer_util import indentation, suitify +# from ..fixer_util import Name, syms, Node, Leaf, Newline, find_root, indentation, suitify + +def has_metaclass(parent): + results = None + for node in parent.children: + kids = node.children + if node.type == syms.argument: + if kids[0] == Leaf(token.NAME, u"metaclass") and \ + kids[1] == Leaf(token.EQUAL, u"=") and \ + kids[2]: + #Hack to avoid "class X(=):" with this case. + results = [node] + kids + break + elif node.type == syms.arglist: + # Argument list... loop through it looking for: + # Node(*, [*, Leaf(token.NAME, u"metaclass"), Leaf(token.EQUAL, u"="), Leaf(*, *)] + for child in node.children: + if results: break + if child.type == token.COMMA: + #Store the last comma, which precedes the metaclass + comma = child + elif type(child) == Node: + meta = equal = name = None + for arg in child.children: + if arg == Leaf(token.NAME, u"metaclass"): + #We have the (metaclass) part + meta = arg + elif meta and arg == Leaf(token.EQUAL, u"="): + #We have the (metaclass=) part + equal = arg + elif meta and equal: + #Here we go, we have (metaclass=X) + name = arg + results = (comma, meta, equal, name) + break + return results + + +class FixMetaclass(fixer_base.BaseFix): + + PATTERN = u""" + classdef + """ + + def transform(self, node, results): + meta_results = has_metaclass(node) + if not meta_results: return + for meta in meta_results: + meta.remove() + target = Leaf(token.NAME, u"__metaclass__") + equal = Leaf(token.EQUAL, u"=", prefix=u" ") + # meta is the last item in what was returned by has_metaclass(): name + name = meta + name.prefix = u" " + stmt_node = Node(syms.atom, [target, equal, name]) + + suitify(node) + for item in node.children: + if item.type == syms.suite: + for stmt in item.children: + if stmt.type == token.INDENT: + # Insert, in reverse order, the statement, a newline, + # and an indent right after the first indented line + loc = item.children.index(stmt) + 1 + # Keep consistent indentation form + ident = Leaf(token.INDENT, stmt.value) + item.insert_child(loc, ident) + item.insert_child(loc, Newline()) + item.insert_child(loc, stmt_node) + break diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_newstyle.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_newstyle.py new file mode 100644 index 0000000..cc6b3ad --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_newstyle.py @@ -0,0 +1,33 @@ +u""" +Fixer for "class Foo: ..." -> "class Foo(object): ..." +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import LParen, RParen, Name + +from libfuturize.fixer_util import touch_import_top + + +def insert_object(node, idx): + node.insert_child(idx, RParen()) + node.insert_child(idx, Name(u"object")) + node.insert_child(idx, LParen()) + +class FixNewstyle(fixer_base.BaseFix): + + # Match: + # class Blah: + # and: + # class Blah(): + + PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >" + + def transform(self, node, results): + colon = results[u"colon"] + idx = node.children.index(colon) + if (node.children[idx-2].value == '(' and + node.children[idx-1].value == ')'): + del node.children[idx-2:idx] + idx -= 2 + insert_object(node, idx) + touch_import_top(u'builtins', 'object', node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_next.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_next.py new file mode 100644 index 0000000..9ecb6c0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_next.py @@ -0,0 +1,43 @@ +u""" +Fixer for: +it.__next__() -> it.next(). +next(it) -> it.next(). +""" + +from lib2to3.pgen2 import token +from lib2to3.pygram import python_symbols as syms +from lib2to3 import fixer_base +from lib2to3.fixer_util import Name, Call, find_binding, Attr + +bind_warning = u"Calls to builtin next() possibly shadowed by global binding" + + +class FixNext(fixer_base.BaseFix): + + PATTERN = u""" + power< base=any+ trailer< '.' attr='__next__' > any* > + | + power< head='next' trailer< '(' arg=any ')' > any* > + | + classdef< 'class' base=any+ ':' + suite< any* + funcdef< 'def' + attr='__next__' + parameters< '(' NAME ')' > any+ > + any* > > + """ + + def transform(self, node, results): + assert results + + base = results.get(u"base") + attr = results.get(u"attr") + head = results.get(u"head") + arg_ = results.get(u"arg") + if arg_: + arg = arg_.clone() + head.replace(Attr(Name(unicode(arg),prefix=head.prefix), + Name(u"next"))) + arg_.remove() + elif base: + attr.replace(Name(u"next", prefix=attr.prefix)) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_printfunction.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_printfunction.py new file mode 100644 index 0000000..a2a6e08 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_printfunction.py @@ -0,0 +1,17 @@ +u""" +Fixer for print: from __future__ import print_function. +""" + +from lib2to3 import fixer_base +from libfuturize.fixer_util import future_import + +class FixPrintfunction(fixer_base.BaseFix): + + # explicit = True + + PATTERN = u""" + power< 'print' trailer < '(' any* ')' > any* > + """ + + def transform(self, node, results): + future_import(u"print_function", node) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_raise.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_raise.py new file mode 100644 index 0000000..9c9c192 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_raise.py @@ -0,0 +1,25 @@ +u"""Fixer for 'raise E(V).with_traceback(T)' -> 'raise E, V, T'""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Comma, Node, Leaf, token, syms + +class FixRaise(fixer_base.BaseFix): + + PATTERN = u""" + raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] + [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" + + def transform(self, node, results): + name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) + chain = results.get(u"chain") + if chain is not None: + self.warning(node, u"explicit exception chaining is not supported in Python 2") + chain.prev_sibling.remove() + chain.remove() + if trc is not None: + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), + val.clone(), Comma(), trc.clone()] + raise_stmt = Node(syms.raise_stmt, kids) + node.replace(raise_stmt) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_raise_.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_raise_.py new file mode 100644 index 0000000..0f020c4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_raise_.py @@ -0,0 +1,35 @@ +u"""Fixer for + raise E(V).with_traceback(T) + to: + from future.utils import raise_ + ... + raise_(E, V, T) + +TODO: FIXME!! + +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import Comma, Node, Leaf, token, syms + +class FixRaise(fixer_base.BaseFix): + + PATTERN = u""" + raise_stmt< 'raise' (power< name=any [trailer< '(' val=any* ')' >] + [trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' >] > | any) ['from' chain=any] >""" + + def transform(self, node, results): + FIXME + name, val, trc = (results.get(u"name"), results.get(u"val"), results.get(u"trc")) + chain = results.get(u"chain") + if chain is not None: + self.warning(node, u"explicit exception chaining is not supported in Python 2") + chain.prev_sibling.remove() + chain.remove() + if trc is not None: + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [Leaf(token.NAME, u"raise"), name.clone(), Comma(), + val.clone(), Comma(), trc.clone()] + raise_stmt = Node(syms.raise_stmt, kids) + node.replace(raise_stmt) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_throw.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_throw.py new file mode 100644 index 0000000..c0feed1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_throw.py @@ -0,0 +1,23 @@ +u"""Fixer for 'g.throw(E(V).with_traceback(T))' -> 'g.throw(E, V, T)'""" + +from lib2to3 import fixer_base +from lib2to3.pytree import Node, Leaf +from lib2to3.pgen2 import token +from lib2to3.fixer_util import Comma + +class FixThrow(fixer_base.BaseFix): + + PATTERN = u""" + power< any trailer< '.' 'throw' > + trailer< '(' args=power< exc=any trailer< '(' val=any* ')' > + trailer< '.' 'with_traceback' > trailer< '(' trc=any ')' > > ')' > > + """ + + def transform(self, node, results): + syms = self.syms + exc, val, trc = (results[u"exc"], results[u"val"], results[u"trc"]) + val = val[0] if val else Leaf(token.NAME, u"None") + val.prefix = trc.prefix = u" " + kids = [exc.clone(), Comma(), val.clone(), Comma(), trc.clone()] + args = results[u"args"] + args.children = kids diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_unpacking.py b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_unpacking.py new file mode 100644 index 0000000..c2d3207 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/fixes/fix_unpacking.py @@ -0,0 +1,120 @@ +u""" +Fixer for: +(a,)* *b (,c)* [,] = s +for (a,)* *b (,c)* [,] in d: ... +""" + +from lib2to3 import fixer_base +from itertools import count +from lib2to3.fixer_util import (Assign, Comma, Call, Newline, Name, + Number, token, syms, Node, Leaf) +from libfuturize.fixer_util import indentation, suitify, commatize +# from libfuturize.fixer_util import Assign, Comma, Call, Newline, Name, Number, indentation, suitify, commatize, token, syms, Node, Leaf + +def assignment_source(num_pre, num_post, LISTNAME, ITERNAME): + u""" + Accepts num_pre and num_post, which are counts of values + before and after the starg (not including the starg) + Returns a source fit for Assign() from fixer_util + """ + children = [] + pre = unicode(num_pre) + post = unicode(num_post) + # This code builds the assignment source from lib2to3 tree primitives. + # It's not very readable, but it seems like the most correct way to do it. + if num_pre > 0: + pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])]) + children.append(pre_part) + children.append(Leaf(token.PLUS, u"+", prefix=u" ")) + main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])]) + children.append(main_part) + if num_post > 0: + children.append(Leaf(token.PLUS, u"+", prefix=u" ")) + post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])]) + children.append(post_part) + source = Node(syms.arith_expr, children) + return source + +class FixUnpacking(fixer_base.BaseFix): + + PATTERN = u""" + expl=expr_stmt< testlist_star_expr< + pre=(any ',')* + star_expr< '*' name=NAME > + post=(',' any)* [','] > '=' source=any > | + impl=for_stmt< 'for' lst=exprlist< + pre=(any ',')* + star_expr< '*' name=NAME > + post=(',' any)* [','] > 'in' it=any ':' suite=any>""" + + def fix_explicit_context(self, node, results): + pre, name, post, source = (results.get(n) for n in (u"pre", u"name", u"post", u"source")) + pre = [n.clone() for n in pre if n.type == token.NAME] + name.prefix = u" " + post = [n.clone() for n in post if n.type == token.NAME] + target = [n.clone() for n in commatize(pre + [name.clone()] + post)] + # to make the special-case fix for "*z, = ..." correct with the least + # amount of modification, make the left-side into a guaranteed tuple + target.append(Comma()) + source.prefix = u"" + setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [source.clone()])) + power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) + return setup_line, power_line + + def fix_implicit_context(self, node, results): + u""" + Only example of the implicit context is + a for loop, so only fix that. + """ + pre, name, post, it = (results.get(n) for n in (u"pre", u"name", u"post", u"it")) + pre = [n.clone() for n in pre if n.type == token.NAME] + name.prefix = u" " + post = [n.clone() for n in post if n.type == token.NAME] + target = [n.clone() for n in commatize(pre + [name.clone()] + post)] + # to make the special-case fix for "*z, = ..." correct with the least + # amount of modification, make the left-side into a guaranteed tuple + target.append(Comma()) + source = it.clone() + source.prefix = u"" + setup_line = Assign(Name(self.LISTNAME), Call(Name(u"list"), [Name(self.ITERNAME)])) + power_line = Assign(target, assignment_source(len(pre), len(post), self.LISTNAME, self.ITERNAME)) + return setup_line, power_line + + def transform(self, node, results): + u""" + a,b,c,d,e,f,*g,h,i = range(100) changes to + _3to2list = list(range(100)) + a,b,c,d,e,f,g,h,i, = _3to2list[:6] + [_3to2list[6:-2]] + _3to2list[-2:] + + and + + for a,b,*c,d,e in iter_of_iters: do_stuff changes to + for _3to2iter in iter_of_iters: + _3to2list = list(_3to2iter) + a,b,c,d,e, = _3to2list[:2] + [_3to2list[2:-2]] + _3to2list[-2:] + do_stuff + """ + self.LISTNAME = self.new_name(u"_3to2list") + self.ITERNAME = self.new_name(u"_3to2iter") + expl, impl = results.get(u"expl"), results.get(u"impl") + if expl is not None: + setup_line, power_line = self.fix_explicit_context(node, results) + setup_line.prefix = expl.prefix + power_line.prefix = indentation(expl.parent) + setup_line.append_child(Newline()) + parent = node.parent + i = node.remove() + parent.insert_child(i, power_line) + parent.insert_child(i, setup_line) + elif impl is not None: + setup_line, power_line = self.fix_implicit_context(node, results) + suitify(node) + suite = [k for k in node.children if k.type == syms.suite][0] + setup_line.prefix = u"" + power_line.prefix = suite.children[1].value + suite.children[2].prefix = indentation(suite.children[2]) + suite.insert_child(2, Newline()) + suite.insert_child(2, power_line) + suite.insert_child(2, Newline()) + suite.insert_child(2, setup_line) + results.get(u"lst").replace(Name(self.ITERNAME, prefix=u" ")) diff --git a/minor_project/lib/python3.6/site-packages/libpasteurize/main.py b/minor_project/lib/python3.6/site-packages/libpasteurize/main.py new file mode 100644 index 0000000..4179174 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/libpasteurize/main.py @@ -0,0 +1,204 @@ +""" +pasteurize: automatic conversion of Python 3 code to clean 2/3 code +=================================================================== + +``pasteurize`` attempts to convert existing Python 3 code into source-compatible +Python 2 and 3 code. + +Use it like this on Python 3 code: + + $ pasteurize --verbose mypython3script.py + +This removes any Py3-only syntax (e.g. new metaclasses) and adds these +import lines: + + from __future__ import absolute_import + from __future__ import division + from __future__ import print_function + from __future__ import unicode_literals + from future import standard_library + standard_library.install_hooks() + from builtins import * + +To write changes to the files, use the -w flag. + +It also adds any other wrappers needed for Py2/3 compatibility. + +Note that separate stages are not available (or needed) when converting from +Python 3 with ``pasteurize`` as they are when converting from Python 2 with +``futurize``. + +The --all-imports option forces adding all ``__future__`` imports, +``builtins`` imports, and standard library aliases, even if they don't +seem necessary for the current state of each module. (This can simplify +testing, and can reduce the need to think about Py2 compatibility when editing +the code further.) + +""" + +from __future__ import (absolute_import, print_function, unicode_literals) + +import sys +import logging +import optparse +from lib2to3.main import main, warn, StdoutRefactoringTool +from lib2to3 import refactor + +from future import __version__ +from libpasteurize.fixes import fix_names + + +def main(args=None): + """Main program. + + Returns a suggested exit status (0, 1, 2). + """ + # Set up option parser + parser = optparse.OptionParser(usage="pasteurize [options] file|dir ...") + parser.add_option("-V", "--version", action="store_true", + help="Report the version number of pasteurize") + parser.add_option("-a", "--all-imports", action="store_true", + help="Adds all __future__ and future imports to each module") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a fixer from being run.") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + # parser.add_option("-p", "--print-function", action="store_true", + # help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files.") + + # Parse command line arguments + refactor_stdin = False + flags = {} + options, args = parser.parse_args(args) + fixer_pkg = 'libpasteurize.fixes' + avail_fixes = fix_names + flags["print_function"] = True + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if options.version: + print(__version__) + return 0 + if options.list_fixes: + print("Available transformations for the -f/--fix option:") + for fixname in sorted(avail_fixes): + print(fixname) + if not args: + return 0 + if not args: + print("At least one file or directory argument required.", + file=sys.stderr) + print("Use --help to show usage.", file=sys.stderr) + return 2 + if "-" in args: + refactor_stdin = True + if options.write: + print("Can't write to stdin.", file=sys.stderr) + return 2 + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + + unwanted_fixes = set() + for fix in options.nofix: + if ".fix_" in fix: + unwanted_fixes.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + unwanted_fixes.add(found[0]) + + extra_fixes = set() + if options.all_imports: + prefix = 'libpasteurize.fixes.' + extra_fixes.add(prefix + 'fix_add_all__future__imports') + extra_fixes.add(prefix + 'fix_add_future_standard_library_import') + extra_fixes.add(prefix + 'fix_add_all_future_builtins') + + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == 'all': + all_present = True + else: + if ".fix_" in fix: + explicit.add(fix) + else: + # Infer the full module name for the fixer. + # First ensure that no names clash (e.g. + # lib2to3.fixes.fix_blah and libpasteurize.fixes.fix_blah): + found = [f for f in avail_fixes + if f.endswith('fix_{0}'.format(fix))] + if len(found) > 1: + print("Ambiguous fixer name. Choose a fully qualified " + "module name instead from these:\n" + + "\n".join(" " + myf for myf in found), + file=sys.stderr) + return 2 + elif len(found) == 0: + print("Unknown fixer. Use --list-fixes or -l for a list.", + file=sys.stderr) + return 2 + explicit.add(found[0]) + if len(explicit & unwanted_fixes) > 0: + print("Conflicting usage: the following fixers have been " + "simultaneously requested and disallowed:\n" + + "\n".join(" " + myf for myf in (explicit & unwanted_fixes)), + file=sys.stderr) + return 2 + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + + fixer_names = requested | extra_fixes - unwanted_fixes + + # Initialize the refactoring tool + rt = StdoutRefactoringTool(sorted(fixer_names), flags, set(), + options.nobackups, not options.no_diffs) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, None, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print("Sorry, -j isn't " \ + "supported on this platform.", file=sys.stderr) + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3-py3.6-nspkg.pth b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3-py3.6-nspkg.pth new file mode 100644 index 0000000..2137841 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3-py3.6-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('mpl_toolkits',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('mpl_toolkits', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('mpl_toolkits', [os.path.dirname(p)])));m = m or sys.modules.setdefault('mpl_toolkits', types.ModuleType('mpl_toolkits'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/INSTALLER b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE new file mode 100644 index 0000000..ec51537 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE @@ -0,0 +1,99 @@ +License agreement for matplotlib versions 1.3.0 and later +========================================================= + +1. This LICENSE AGREEMENT is between the Matplotlib Development Team +("MDT"), and the Individual or Organization ("Licensee") accessing and +otherwise using matplotlib software in source or binary form and its +associated documentation. + +2. Subject to the terms and conditions of this License Agreement, MDT +hereby grants Licensee a nonexclusive, royalty-free, world-wide license +to reproduce, analyze, test, perform and/or display publicly, prepare +derivative works, distribute, and otherwise use matplotlib +alone or in any derivative version, provided, however, that MDT's +License Agreement and MDT's notice of copyright, i.e., "Copyright (c) +2012- Matplotlib Development Team; All Rights Reserved" are retained in +matplotlib alone or in any derivative version prepared by +Licensee. + +3. In the event Licensee prepares a derivative work that is based on or +incorporates matplotlib or any part thereof, and wants to +make the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to matplotlib . + +4. MDT is making matplotlib available to Licensee on an "AS +IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB +WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + +5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR +LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING +MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF +THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between MDT and +Licensee. This License Agreement does not grant permission to use MDT +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using matplotlib , +Licensee agrees to be bound by the terms and conditions of this License +Agreement. + +License agreement for matplotlib versions prior to 1.3.0 +======================================================== + +1. This LICENSE AGREEMENT is between John D. Hunter ("JDH"), and the +Individual or Organization ("Licensee") accessing and otherwise using +matplotlib software in source or binary form and its associated +documentation. + +2. Subject to the terms and conditions of this License Agreement, JDH +hereby grants Licensee a nonexclusive, royalty-free, world-wide license +to reproduce, analyze, test, perform and/or display publicly, prepare +derivative works, distribute, and otherwise use matplotlib +alone or in any derivative version, provided, however, that JDH's +License Agreement and JDH's notice of copyright, i.e., "Copyright (c) +2002-2011 John D. Hunter; All Rights Reserved" are retained in +matplotlib alone or in any derivative version prepared by +Licensee. + +3. In the event Licensee prepares a derivative work that is based on or +incorporates matplotlib or any part thereof, and wants to +make the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to matplotlib. + +4. JDH is making matplotlib available to Licensee on an "AS +IS" basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB +WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + +5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR +LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING +MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF +THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between JDH and +Licensee. This License Agreement does not grant permission to use JDH +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using matplotlib, +Licensee agrees to be bound by the terms and conditions of this License +Agreement. \ No newline at end of file diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_AMSFONTS b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_AMSFONTS new file mode 100644 index 0000000..3627bb9 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_AMSFONTS @@ -0,0 +1,240 @@ +The cmr10.pfb file is a Type-1 version of one of Knuth's Computer Modern fonts. +It is included here as test data only, but the following license applies. + +Copyright (c) 1997, 2009, American Mathematical Society (http://www.ams.org). +All Rights Reserved. + +"cmb10" is a Reserved Font Name for this Font Software. +"cmbsy10" is a Reserved Font Name for this Font Software. +"cmbsy5" is a Reserved Font Name for this Font Software. +"cmbsy6" is a Reserved Font Name for this Font Software. +"cmbsy7" is a Reserved Font Name for this Font Software. +"cmbsy8" is a Reserved Font Name for this Font Software. +"cmbsy9" is a Reserved Font Name for this Font Software. +"cmbx10" is a Reserved Font Name for this Font Software. +"cmbx12" is a Reserved Font Name for this Font Software. +"cmbx5" is a Reserved Font Name for this Font Software. +"cmbx6" is a Reserved Font Name for this Font Software. +"cmbx7" is a Reserved Font Name for this Font Software. +"cmbx8" is a Reserved Font Name for this Font Software. +"cmbx9" is a Reserved Font Name for this Font Software. +"cmbxsl10" is a Reserved Font Name for this Font Software. +"cmbxti10" is a Reserved Font Name for this Font Software. +"cmcsc10" is a Reserved Font Name for this Font Software. +"cmcsc8" is a Reserved Font Name for this Font Software. +"cmcsc9" is a Reserved Font Name for this Font Software. +"cmdunh10" is a Reserved Font Name for this Font Software. +"cmex10" is a Reserved Font Name for this Font Software. +"cmex7" is a Reserved Font Name for this Font Software. +"cmex8" is a Reserved Font Name for this Font Software. +"cmex9" is a Reserved Font Name for this Font Software. +"cmff10" is a Reserved Font Name for this Font Software. +"cmfi10" is a Reserved Font Name for this Font Software. +"cmfib8" is a Reserved Font Name for this Font Software. +"cminch" is a Reserved Font Name for this Font Software. +"cmitt10" is a Reserved Font Name for this Font Software. +"cmmi10" is a Reserved Font Name for this Font Software. +"cmmi12" is a Reserved Font Name for this Font Software. +"cmmi5" is a Reserved Font Name for this Font Software. +"cmmi6" is a Reserved Font Name for this Font Software. +"cmmi7" is a Reserved Font Name for this Font Software. +"cmmi8" is a Reserved Font Name for this Font Software. +"cmmi9" is a Reserved Font Name for this Font Software. +"cmmib10" is a Reserved Font Name for this Font Software. +"cmmib5" is a Reserved Font Name for this Font Software. +"cmmib6" is a Reserved Font Name for this Font Software. +"cmmib7" is a Reserved Font Name for this Font Software. +"cmmib8" is a Reserved Font Name for this Font Software. +"cmmib9" is a Reserved Font Name for this Font Software. +"cmr10" is a Reserved Font Name for this Font Software. +"cmr12" is a Reserved Font Name for this Font Software. +"cmr17" is a Reserved Font Name for this Font Software. +"cmr5" is a Reserved Font Name for this Font Software. +"cmr6" is a Reserved Font Name for this Font Software. +"cmr7" is a Reserved Font Name for this Font Software. +"cmr8" is a Reserved Font Name for this Font Software. +"cmr9" is a Reserved Font Name for this Font Software. +"cmsl10" is a Reserved Font Name for this Font Software. +"cmsl12" is a Reserved Font Name for this Font Software. +"cmsl8" is a Reserved Font Name for this Font Software. +"cmsl9" is a Reserved Font Name for this Font Software. +"cmsltt10" is a Reserved Font Name for this Font Software. +"cmss10" is a Reserved Font Name for this Font Software. +"cmss12" is a Reserved Font Name for this Font Software. +"cmss17" is a Reserved Font Name for this Font Software. +"cmss8" is a Reserved Font Name for this Font Software. +"cmss9" is a Reserved Font Name for this Font Software. +"cmssbx10" is a Reserved Font Name for this Font Software. +"cmssdc10" is a Reserved Font Name for this Font Software. +"cmssi10" is a Reserved Font Name for this Font Software. +"cmssi12" is a Reserved Font Name for this Font Software. +"cmssi17" is a Reserved Font Name for this Font Software. +"cmssi8" is a Reserved Font Name for this Font Software. +"cmssi9" is a Reserved Font Name for this Font Software. +"cmssq8" is a Reserved Font Name for this Font Software. +"cmssqi8" is a Reserved Font Name for this Font Software. +"cmsy10" is a Reserved Font Name for this Font Software. +"cmsy5" is a Reserved Font Name for this Font Software. +"cmsy6" is a Reserved Font Name for this Font Software. +"cmsy7" is a Reserved Font Name for this Font Software. +"cmsy8" is a Reserved Font Name for this Font Software. +"cmsy9" is a Reserved Font Name for this Font Software. +"cmtcsc10" is a Reserved Font Name for this Font Software. +"cmtex10" is a Reserved Font Name for this Font Software. +"cmtex8" is a Reserved Font Name for this Font Software. +"cmtex9" is a Reserved Font Name for this Font Software. +"cmti10" is a Reserved Font Name for this Font Software. +"cmti12" is a Reserved Font Name for this Font Software. +"cmti7" is a Reserved Font Name for this Font Software. +"cmti8" is a Reserved Font Name for this Font Software. +"cmti9" is a Reserved Font Name for this Font Software. +"cmtt10" is a Reserved Font Name for this Font Software. +"cmtt12" is a Reserved Font Name for this Font Software. +"cmtt8" is a Reserved Font Name for this Font Software. +"cmtt9" is a Reserved Font Name for this Font Software. +"cmu10" is a Reserved Font Name for this Font Software. +"cmvtt10" is a Reserved Font Name for this Font Software. +"euex10" is a Reserved Font Name for this Font Software. +"euex7" is a Reserved Font Name for this Font Software. +"euex8" is a Reserved Font Name for this Font Software. +"euex9" is a Reserved Font Name for this Font Software. +"eufb10" is a Reserved Font Name for this Font Software. +"eufb5" is a Reserved Font Name for this Font Software. +"eufb7" is a Reserved Font Name for this Font Software. +"eufm10" is a Reserved Font Name for this Font Software. +"eufm5" is a Reserved Font Name for this Font Software. +"eufm7" is a Reserved Font Name for this Font Software. +"eurb10" is a Reserved Font Name for this Font Software. +"eurb5" is a Reserved Font Name for this Font Software. +"eurb7" is a Reserved Font Name for this Font Software. +"eurm10" is a Reserved Font Name for this Font Software. +"eurm5" is a Reserved Font Name for this Font Software. +"eurm7" is a Reserved Font Name for this Font Software. +"eusb10" is a Reserved Font Name for this Font Software. +"eusb5" is a Reserved Font Name for this Font Software. +"eusb7" is a Reserved Font Name for this Font Software. +"eusm10" is a Reserved Font Name for this Font Software. +"eusm5" is a Reserved Font Name for this Font Software. +"eusm7" is a Reserved Font Name for this Font Software. +"lasy10" is a Reserved Font Name for this Font Software. +"lasy5" is a Reserved Font Name for this Font Software. +"lasy6" is a Reserved Font Name for this Font Software. +"lasy7" is a Reserved Font Name for this Font Software. +"lasy8" is a Reserved Font Name for this Font Software. +"lasy9" is a Reserved Font Name for this Font Software. +"lasyb10" is a Reserved Font Name for this Font Software. +"lcircle1" is a Reserved Font Name for this Font Software. +"lcirclew" is a Reserved Font Name for this Font Software. +"lcmss8" is a Reserved Font Name for this Font Software. +"lcmssb8" is a Reserved Font Name for this Font Software. +"lcmssi8" is a Reserved Font Name for this Font Software. +"line10" is a Reserved Font Name for this Font Software. +"linew10" is a Reserved Font Name for this Font Software. +"msam10" is a Reserved Font Name for this Font Software. +"msam5" is a Reserved Font Name for this Font Software. +"msam6" is a Reserved Font Name for this Font Software. +"msam7" is a Reserved Font Name for this Font Software. +"msam8" is a Reserved Font Name for this Font Software. +"msam9" is a Reserved Font Name for this Font Software. +"msbm10" is a Reserved Font Name for this Font Software. +"msbm5" is a Reserved Font Name for this Font Software. +"msbm6" is a Reserved Font Name for this Font Software. +"msbm7" is a Reserved Font Name for this Font Software. +"msbm8" is a Reserved Font Name for this Font Software. +"msbm9" is a Reserved Font Name for this Font Software. +"wncyb10" is a Reserved Font Name for this Font Software. +"wncyi10" is a Reserved Font Name for this Font Software. +"wncyr10" is a Reserved Font Name for this Font Software. +"wncysc10" is a Reserved Font Name for this Font Software. +"wncyss10" is a Reserved Font Name for this Font Software. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_BAKOMA b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_BAKOMA new file mode 100644 index 0000000..801e20c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_BAKOMA @@ -0,0 +1,40 @@ + + BaKoMa Fonts Licence + -------------------- + + This licence covers two font packs (known as BaKoMa Fonts Colelction, + which is available at `CTAN:fonts/cm/ps-type1/bakoma/'): + + 1) BaKoMa-CM (1.1/12-Nov-94) + Computer Modern Fonts in PostScript Type 1 and TrueType font formats. + + 2) BaKoMa-AMS (1.2/19-Jan-95) + AMS TeX fonts in PostScript Type 1 and TrueType font formats. + + Copyright (C) 1994, 1995, Basil K. Malyshev. All Rights Reserved. + + Permission to copy and distribute these fonts for any purpose is + hereby granted without fee, provided that the above copyright notice, + author statement and this permission notice appear in all copies of + these fonts and related documentation. + + Permission to modify and distribute modified fonts for any purpose is + hereby granted without fee, provided that the copyright notice, + author statement, this permission notice and location of original + fonts (http://www.ctan.org/tex-archive/fonts/cm/ps-type1/bakoma) + appear in all copies of modified fonts and related documentation. + + Permission to use these fonts (embedding into PostScript, PDF, SVG + and printing by using any software) is hereby granted without fee. + It is not required to provide any notices about using these fonts. + + Basil K. Malyshev + INSTITUTE FOR HIGH ENERGY PHYSICS + IHEP, OMVT + Moscow Region + 142281 PROTVINO + RUSSIA + + E-Mail: bakoma@mail.ru + or malyshev@mail.ihep.ru + diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_CARLOGO b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_CARLOGO new file mode 100644 index 0000000..8c99c65 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_CARLOGO @@ -0,0 +1,45 @@ +----> we renamed carlito -> carlogo to comply with the terms <---- + +Copyright (c) 2010-2013 by tyPoland Lukasz Dziedzic with Reserved Font Name "Carlito". + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide development of collaborative font projects, to support the font creation efforts of academic and linguistic communities, and to provide a free and open framework in which fonts may be shared and improved in partnership with others. + +The OFL allows the licensed fonts to be used, studied, modified and redistributed freely as long as they are not sold by themselves. The fonts, including any derivative works, can be bundled, embedded, redistributed and/or sold with any software provided that any reserved names are not used by derivative works. The fonts and derivatives, however, cannot be released under any other type of license. The requirement for fonts to remain under this license does not apply to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright Holder(s) under this license and clearly marked as such. This may include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the copyright statement(s). + +"Original Version" refers to the collection of Font Software components as distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, or substituting -- in part or in whole -- any of the components of the Original Version, by changing formats or by porting the Font Software to a new environment. + +"Author" refers to any designer, engineer, programmer, technical writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to use, study, copy, merge, embed, modify, redistribute, and sell modified and unmodified copies of the Font Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, redistributed and/or sold with any software, provided that each copy contains the above copyright notice and this license. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine-readable metadata fields within text or binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font Name(s) unless explicit written permission is granted by the corresponding Copyright Holder. This restriction only applies to the primary font name as presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font Software shall not be used to promote, endorse or advertise any Modified Version, except to acknowledge the contribution(s) of the Copyright Holder(s) and the Author(s) or with their explicit written permission. + +5) The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this license, and must not be distributed under any other license. The requirement for fonts to remain under this license does not apply to any document created using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE. \ No newline at end of file diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_COLORBREWER b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_COLORBREWER new file mode 100644 index 0000000..568afe8 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_COLORBREWER @@ -0,0 +1,38 @@ +Apache-Style Software License for ColorBrewer Color Schemes + +Version 1.1 + +Copyright (c) 2002 Cynthia Brewer, Mark Harrower, and The Pennsylvania +State University. All rights reserved. Redistribution and use in source +and binary forms, with or without modification, are permitted provided +that the following conditions are met: + +1. Redistributions as source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +2. The end-user documentation included with the redistribution, if any, +must include the following acknowledgment: "This product includes color +specifications and designs developed by Cynthia Brewer +(http://colorbrewer.org/)." Alternately, this acknowledgment may appear in +the software itself, if and wherever such third-party acknowledgments +normally appear. + +3. The name "ColorBrewer" must not be used to endorse or promote products +derived from this software without prior written permission. For written +permission, please contact Cynthia Brewer at cbrewer@psu.edu. + +4. Products derived from this software may not be called "ColorBrewer", +nor may "ColorBrewer" appear in their name, without prior written +permission of Cynthia Brewer. + +THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +CYNTHIA BREWER, MARK HARROWER, OR THE PENNSYLVANIA STATE UNIVERSITY BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_JSXTOOLS_RESIZE_OBSERVER b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_JSXTOOLS_RESIZE_OBSERVER new file mode 100644 index 0000000..0bc1fa7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_JSXTOOLS_RESIZE_OBSERVER @@ -0,0 +1,108 @@ +# CC0 1.0 Universal + +## Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator and +subsequent owner(s) (each and all, an “ownerâ€) of an original work of +authorship and/or a database (each, a “Workâ€). + +Certain owners wish to permanently relinquish those rights to a Work for the +purpose of contributing to a commons of creative, cultural and scientific works +(“Commonsâ€) that the public can reliably and without fear of later claims of +infringement build upon, modify, incorporate in other works, reuse and +redistribute as freely as possible in any form whatsoever and for any purposes, +including without limitation commercial purposes. These owners may contribute +to the Commons to promote the ideal of a free culture and the further +production of creative, cultural and scientific works, or to gain reputation or +greater distribution for their Work in part through the use and efforts of +others. + +For these and/or other purposes and motivations, and without any expectation of +additional consideration or compensation, the person associating CC0 with a +Work (the “Affirmerâ€), to the extent that he or she is an owner of Copyright +and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and +publicly distribute the Work under its terms, with knowledge of his or her +Copyright and Related Rights in the Work and the meaning and intended legal +effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be + protected by copyright and related or neighboring rights (“Copyright and + Related Rightsâ€). Copyright and Related Rights include, but are not limited + to, the following: + 1. the right to reproduce, adapt, distribute, perform, display, communicate, + and translate a Work; + 2. moral rights retained by the original author(s) and/or performer(s); + 3. publicity and privacy rights pertaining to a person’s image or likeness + depicted in a Work; + 4. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(i), below; + 5. rights protecting the extraction, dissemination, use and reuse of data in + a Work; + 6. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation thereof, + including any amended or successor version of such directive); and + 7. other similar, equivalent or corresponding rights throughout the world + based on applicable law or treaty, and any national implementations + thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention of, + applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and + unconditionally waives, abandons, and surrenders all of Affirmer’s Copyright + and Related Rights and associated claims and causes of action, whether now + known or unknown (including existing as well as future claims and causes of + action), in the Work (i) in all territories worldwide, (ii) for the maximum + duration provided by applicable law or treaty (including future time + extensions), (iii) in any current or future medium and for any number of + copies, and (iv) for any purpose whatsoever, including without limitation + commercial, advertising or promotional purposes (the “Waiverâ€). Affirmer + makes the Waiver for the benefit of each member of the public at large and + to the detriment of Affirmer’s heirs and successors, fully intending that + such Waiver shall not be subject to revocation, rescission, cancellation, + termination, or any other legal or equitable action to disrupt the quiet + enjoyment of the Work by the public as contemplated by Affirmer’s express + Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason be + judged legally invalid or ineffective under applicable law, then the Waiver + shall be preserved to the maximum extent permitted taking into account + Affirmer’s express Statement of Purpose. In addition, to the extent the + Waiver is so judged Affirmer hereby grants to each affected person a + royalty-free, non transferable, non sublicensable, non exclusive, + irrevocable and unconditional license to exercise Affirmer’s Copyright and + Related Rights in the Work (i) in all territories worldwide, (ii) for the + maximum duration provided by applicable law or treaty (including future time + extensions), (iii) in any current or future medium and for any number of + copies, and (iv) for any purpose whatsoever, including without limitation + commercial, advertising or promotional purposes (the “Licenseâ€). The License + shall be deemed effective as of the date CC0 was applied by Affirmer to the + Work. Should any part of the License for any reason be judged legally + invalid or ineffective under applicable law, such partial invalidity or + ineffectiveness shall not invalidate the remainder of the License, and in + such case Affirmer hereby affirms that he or she will not (i) exercise any + of his or her remaining Copyright and Related Rights in the Work or (ii) + assert any associated claims and causes of action with respect to the Work, + in either case contrary to Affirmer’s express Statement of Purpose. + +4. Limitations and Disclaimers. + 1. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + 2. Affirmer offers the Work as-is and makes no representations or warranties + of any kind concerning the Work, express, implied, statutory or + otherwise, including without limitation warranties of title, + merchantability, fitness for a particular purpose, non infringement, or + the absence of latent or other defects, accuracy, or the present or + absence of errors, whether or not discoverable, all to the greatest + extent permissible under applicable law. + 3. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person’s Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the Work. + 4. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to this + CC0 or use of the Work. + +For more information, please see +http://creativecommons.org/publicdomain/zero/1.0/. diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_QT4_EDITOR b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_QT4_EDITOR new file mode 100644 index 0000000..1c9d941 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_QT4_EDITOR @@ -0,0 +1,30 @@ + +Module creating PyQt4 form dialogs/layouts to edit various type of parameters + + +formlayout License Agreement (MIT License) +------------------------------------------ + +Copyright (c) 2009 Pierre Raybaut + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_SOLARIZED b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_SOLARIZED new file mode 100644 index 0000000..6e5a047 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_SOLARIZED @@ -0,0 +1,20 @@ +https://github.com/altercation/solarized/blob/master/LICENSE +Copyright (c) 2011 Ethan Schoonover + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_STIX b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_STIX new file mode 100644 index 0000000..2f7aeea --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_STIX @@ -0,0 +1,71 @@ +TERMS AND CONDITIONS + + 1. Permission is hereby granted, free of charge, to any person +obtaining a copy of the STIX Fonts-TM set accompanying this license +(collectively, the "Fonts") and the associated documentation files +(collectively with the Fonts, the "Font Software"), to reproduce and +distribute the Font Software, including the rights to use, copy, merge +and publish copies of the Font Software, and to permit persons to whom +the Font Software is furnished to do so same, subject to the following +terms and conditions (the "License"). + + 2. The following copyright and trademark notice and these Terms and +Conditions shall be included in all copies of one or more of the Font +typefaces and any derivative work created as permitted under this +License: + + Copyright (c) 2001-2005 by the STI Pub Companies, consisting of +the American Institute of Physics, the American Chemical Society, the +American Mathematical Society, the American Physical Society, Elsevier, +Inc., and The Institute of Electrical and Electronic Engineers, Inc. +Portions copyright (c) 1998-2003 by MicroPress, Inc. Portions copyright +(c) 1990 by Elsevier, Inc. All rights reserved. STIX Fonts-TM is a +trademark of The Institute of Electrical and Electronics Engineers, Inc. + + 3. You may (a) convert the Fonts from one format to another (e.g., +from TrueType to PostScript), in which case the normal and reasonable +distortion that occurs during such conversion shall be permitted and (b) +embed or include a subset of the Fonts in a document for the purposes of +allowing users to read text in the document that utilizes the Fonts. In +each case, you may use the STIX Fonts-TM mark to designate the resulting +Fonts or subset of the Fonts. + + 4. You may also (a) add glyphs or characters to the Fonts, or modify +the shape of existing glyphs, so long as the base set of glyphs is not +removed and (b) delete glyphs or characters from the Fonts, provided +that the resulting font set is distributed with the following +disclaimer: "This [name] font does not include all the Unicode points +covered in the STIX Fonts-TM set but may include others." In each case, +the name used to denote the resulting font set shall not include the +term "STIX" or any similar term. + + 5. You may charge a fee in connection with the distribution of the +Font Software, provided that no copy of one or more of the individual +Font typefaces that form the STIX Fonts-TM set may be sold by itself. + + 6. THE FONT SOFTWARE IS PROVIDED "AS IS," WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK OR OTHER RIGHT. IN NO EVENT SHALL +MICROPRESS OR ANY OF THE STI PUB COMPANIES BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, INCLUDING, BUT NOT LIMITED TO, ANY GENERAL, +SPECIAL, INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM OR OUT OF THE USE OR +INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT +SOFTWARE. + + 7. Except as contained in the notice set forth in Section 2, the +names MicroPress Inc. and STI Pub Companies, as well as the names of the +companies/organizations that compose the STI Pub Companies, shall not be +used in advertising or otherwise to promote the sale, use or other +dealings in the Font Software without the prior written consent of the +respective company or organization. + + 8. This License shall become null and void in the event of any +material breach of the Terms and Conditions herein by licensee. + + 9. A substantial portion of the STIX Fonts set was developed by +MicroPress Inc. for the STI Pub Companies. To obtain additional +mathematical fonts, please contact MicroPress, Inc., 68-30 Harrow +Street, Forest Hills, NY 11375, USA - Phone: (718) 575-1816. + diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_YORICK b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_YORICK new file mode 100644 index 0000000..8c90850 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/LICENSE_YORICK @@ -0,0 +1,49 @@ +BSD-style license for gist/yorick colormaps. + +Copyright: + + Copyright (c) 1996. The Regents of the University of California. + All rights reserved. + +Permission to use, copy, modify, and distribute this software for any +purpose without fee is hereby granted, provided that this entire +notice is included in all copies of any software which is or includes +a copy or modification of this software and in all copies of the +supporting documentation for such software. + +This work was produced at the University of California, Lawrence +Livermore National Laboratory under contract no. W-7405-ENG-48 between +the U.S. Department of Energy and The Regents of the University of +California for the operation of UC LLNL. + + + DISCLAIMER + +This software was prepared as an account of work sponsored by an +agency of the United States Government. Neither the United States +Government nor the University of California nor any of their +employees, makes any warranty, express or implied, or assumes any +liability or responsibility for the accuracy, completeness, or +usefulness of any information, apparatus, product, or process +disclosed, or represents that its use would not infringe +privately-owned rights. Reference herein to any specific commercial +products, process, or service by trade name, trademark, manufacturer, +or otherwise, does not necessarily constitute or imply its +endorsement, recommendation, or favoring by the United States +Government or the University of California. The views and opinions of +authors expressed herein do not necessarily state or reflect those of +the United States Government or the University of California, and +shall not be used for advertising or product endorsement purposes. + + + AUTHOR + +David H. Munro wrote Yorick and Gist. Berkeley Yacc (byacc) generated +the Yorick parser. The routines in Math are from LAPACK and FFTPACK; +MathC contains C translations by David H. Munro. The algorithms for +Yorick's random number generator and several special functions in +Yorick/include were taken from Numerical Recipes by Press, et. al., +although the Yorick implementations are unrelated to those in +Numerical Recipes. A small amount of code in Gist was adapted from +the X11R4 release, copyright M.I.T. -- the complete copyright notice +may be found in the (unused) file Gist/host.c. diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/METADATA b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/METADATA new file mode 100644 index 0000000..ed80f82 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/METADATA @@ -0,0 +1,139 @@ +Metadata-Version: 2.1 +Name: matplotlib +Version: 3.3.3 +Summary: Python plotting package +Home-page: https://matplotlib.org +Author: John D. Hunter, Michael Droettboom +Author-email: matplotlib-users@python.org +License: PSF +Download-URL: https://matplotlib.org/users/installing.html +Project-URL: Documentation, https://matplotlib.org +Project-URL: Source Code, https://github.com/matplotlib/matplotlib +Project-URL: Bug Tracker, https://github.com/matplotlib/matplotlib/issues +Project-URL: Forum, https://discourse.matplotlib.org/ +Project-URL: Donate, https://numfocus.org/donate-to-matplotlib +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: Matplotlib +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Topic :: Scientific/Engineering :: Visualization +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +Requires-Dist: cycler (>=0.10) +Requires-Dist: kiwisolver (>=1.0.1) +Requires-Dist: numpy (>=1.15) +Requires-Dist: pillow (>=6.2.0) +Requires-Dist: pyparsing (!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.3) +Requires-Dist: python-dateutil (>=2.1) + +|PyPi|_ |Downloads|_ |NUMFocus|_ + +|DiscourseBadge|_ |Gitter|_ |GitHubIssues|_ |GitTutorial|_ + +|Travis|_ |AzurePipelines|_ |AppVeyor|_ |Codecov|_ |LGTM|_ + +.. |Travis| image:: https://travis-ci.com/matplotlib/matplotlib.svg?branch=master +.. _Travis: https://travis-ci.com/matplotlib/matplotlib + +.. |AzurePipelines| image:: https://dev.azure.com/matplotlib/matplotlib/_apis/build/status/matplotlib.matplotlib?branchName=master +.. _AzurePipelines: https://dev.azure.com/matplotlib/matplotlib/_build/latest?definitionId=1&branchName=master + +.. |AppVeyor| image:: https://ci.appveyor.com/api/projects/status/github/matplotlib/matplotlib?branch=master&svg=true +.. _AppVeyor: https://ci.appveyor.com/project/matplotlib/matplotlib + +.. |Codecov| image:: https://codecov.io/github/matplotlib/matplotlib/badge.svg?branch=master&service=github +.. _Codecov: https://codecov.io/github/matplotlib/matplotlib?branch=master + +.. |LGTM| image:: https://img.shields.io/lgtm/grade/python/g/matplotlib/matplotlib.svg?logo=lgtm&logoWidth=18 +.. _LGTM: https://lgtm.com/projects/g/matplotlib/matplotlib + +.. |DiscourseBadge| image:: https://img.shields.io/badge/help_forum-discourse-blue.svg +.. _DiscourseBadge: https://discourse.matplotlib.org + +.. |Gitter| image:: https://badges.gitter.im/matplotlib/matplotlib.svg +.. _Gitter: https://gitter.im/matplotlib/matplotlib + +.. |GitHubIssues| image:: https://img.shields.io/badge/issue_tracking-github-blue.svg +.. _GitHubIssues: https://github.com/matplotlib/matplotlib/issues + +.. |GitTutorial| image:: https://img.shields.io/badge/PR-Welcome-%23FF8300.svg? +.. _GitTutorial: https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project + +.. |PyPi| image:: https://badge.fury.io/py/matplotlib.svg +.. _PyPi: https://badge.fury.io/py/matplotlib + +.. |Downloads| image:: https://pepy.tech/badge/matplotlib/month +.. _Downloads: https://pepy.tech/project/matplotlib/month + +.. |NUMFocus| image:: https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A +.. _NUMFocus: https://numfocus.org + +.. image:: https://matplotlib.org/_static/logo2.svg + +Matplotlib is a comprehensive library for creating static, animated, and interactive visualizations in Python. + +Check out our `home page `_ for more information. + +.. image:: https://matplotlib.org/_static/readme_preview.png + +Matplotlib produces publication-quality figures in a variety of hardcopy formats +and interactive environments across platforms. Matplotlib can be used in Python scripts, +the Python and IPython shell, web application servers, and various +graphical user interface toolkits. + + +Install +======= + +For installation instructions and requirements, see `INSTALL.rst `_ or the +`install `_ documentation. + +Test +==== + +After installation, launch the test suite:: + + python -m pytest + +Read the `testing guide `_ for more information and alternatives. + +Contribute +========== +You've discovered a bug or something else you want to change - excellent! + +You've worked out a way to fix it – even better! + +You want to tell us about it – best of all! + +Start at the `contributing guide `_! + +Contact +======= + +`Discourse `_ is the discussion forum for general questions and discussions and our recommended starting point. + +Our active mailing lists (which are mirrored on Discourse) are: + +* `Users `_ mailing list: matplotlib-users@python.org +* `Announcement `_ mailing list: matplotlib-announce@python.org +* `Development `_ mailing list: matplotlib-devel@python.org + +Gitter_ is for coordinating development and asking questions directly related +to contributing to matplotlib. + + +Citing Matplotlib +================= +If Matplotlib contributes to a project that leads to publication, please +acknowledge this by citing Matplotlib. + +`A ready-made citation entry `_ is available. + + diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/RECORD b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/RECORD new file mode 100644 index 0000000..80b9d01 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/RECORD @@ -0,0 +1,824 @@ +__pycache__/pylab.cpython-36.pyc,, +matplotlib-3.3.3-py3.6-nspkg.pth,sha256=FgO_3ug071EXEKT8mgOPBUhyrswPtPCYjOpUCyau7UU,569 +matplotlib-3.3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +matplotlib-3.3.3.dist-info/LICENSE,sha256=WhqB6jAXKMi7opM9qDLAzWIina8giToCSrPVMkRGjbw,4830 +matplotlib-3.3.3.dist-info/LICENSE_AMSFONTS,sha256=FVFB1Zh38zj24cCAXem3mWTc5x_l0qVsROOLLA9-Ne4,12675 +matplotlib-3.3.3.dist-info/LICENSE_BAKOMA,sha256=WIfu5aAEHJn_BrjwP0Tc1zA8C_-NxwhOie4y32RY50s,1440 +matplotlib-3.3.3.dist-info/LICENSE_CARLOGO,sha256=YZAtXu803SSHC3KHqWJg0zKCM7lvcgK_cK1uKg2i3j8,4455 +matplotlib-3.3.3.dist-info/LICENSE_COLORBREWER,sha256=7FIbyIlwg2PD2R0pDZCClCN3gRfqJZABk-mOKfUiJAg,1968 +matplotlib-3.3.3.dist-info/LICENSE_JSXTOOLS_RESIZE_OBSERVER,sha256=WXdWrctR8kPvT7OGkgN39h0BKs4JBDZOGo7pquxq_IQ,6799 +matplotlib-3.3.3.dist-info/LICENSE_QT4_EDITOR,sha256=srUMqLYXKsojCVrfFduJ03J-nvLW7wF45CcjQBG-080,1230 +matplotlib-3.3.3.dist-info/LICENSE_SOLARIZED,sha256=EtUyf7xN-EWoaIPeme1f30GYRF1W26zfX62PDv3JdRM,1121 +matplotlib-3.3.3.dist-info/LICENSE_STIX,sha256=TMPvujo6YE62-TchHkbaHiFIgwBWpuCbzBnfQXDSUqQ,3914 +matplotlib-3.3.3.dist-info/LICENSE_YORICK,sha256=yrdT04wJNlHo3rWrtoTj7WgCDg5BgDT5TXnokNx66E0,2313 +matplotlib-3.3.3.dist-info/METADATA,sha256=1-kgHVQW6VnKGo9wqRwRJ-1WOwPQcxXxr7mMRrJJXPM,5662 +matplotlib-3.3.3.dist-info/RECORD,, +matplotlib-3.3.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib-3.3.3.dist-info/WHEEL,sha256=ZCgRoodM6cLv8Z9Z-kuTK9QreeQu8GHOdQFY_br7fEU,109 +matplotlib-3.3.3.dist-info/namespace_packages.txt,sha256=A2PHFg9NKYOU4pEQ1h97U0Qd-rB-65W34XqC-56ZN9g,13 +matplotlib-3.3.3.dist-info/top_level.txt,sha256=9tEw2ni8DdgX8CceoYHqSH1s50vrJ9SDfgtLIG8e3Y4,30 +matplotlib/__init__.py,sha256=sWC0DXgllHFP4RuZUOMPUtEtZ-YBYiQyRFOOJjsNNDE,50742 +matplotlib/__pycache__/__init__.cpython-36.pyc,, +matplotlib/__pycache__/_animation_data.cpython-36.pyc,, +matplotlib/__pycache__/_cm.cpython-36.pyc,, +matplotlib/__pycache__/_cm_listed.cpython-36.pyc,, +matplotlib/__pycache__/_color_data.cpython-36.pyc,, +matplotlib/__pycache__/_constrained_layout.cpython-36.pyc,, +matplotlib/__pycache__/_internal_utils.cpython-36.pyc,, +matplotlib/__pycache__/_layoutbox.cpython-36.pyc,, +matplotlib/__pycache__/_mathtext_data.cpython-36.pyc,, +matplotlib/__pycache__/_pylab_helpers.cpython-36.pyc,, +matplotlib/__pycache__/_text_layout.cpython-36.pyc,, +matplotlib/__pycache__/_version.cpython-36.pyc,, +matplotlib/__pycache__/afm.cpython-36.pyc,, +matplotlib/__pycache__/animation.cpython-36.pyc,, +matplotlib/__pycache__/artist.cpython-36.pyc,, +matplotlib/__pycache__/axis.cpython-36.pyc,, +matplotlib/__pycache__/backend_bases.cpython-36.pyc,, +matplotlib/__pycache__/backend_managers.cpython-36.pyc,, +matplotlib/__pycache__/backend_tools.cpython-36.pyc,, +matplotlib/__pycache__/bezier.cpython-36.pyc,, +matplotlib/__pycache__/blocking_input.cpython-36.pyc,, +matplotlib/__pycache__/category.cpython-36.pyc,, +matplotlib/__pycache__/cm.cpython-36.pyc,, +matplotlib/__pycache__/collections.cpython-36.pyc,, +matplotlib/__pycache__/colorbar.cpython-36.pyc,, +matplotlib/__pycache__/colors.cpython-36.pyc,, +matplotlib/__pycache__/container.cpython-36.pyc,, +matplotlib/__pycache__/contour.cpython-36.pyc,, +matplotlib/__pycache__/dates.cpython-36.pyc,, +matplotlib/__pycache__/docstring.cpython-36.pyc,, +matplotlib/__pycache__/dviread.cpython-36.pyc,, +matplotlib/__pycache__/figure.cpython-36.pyc,, +matplotlib/__pycache__/font_manager.cpython-36.pyc,, +matplotlib/__pycache__/fontconfig_pattern.cpython-36.pyc,, +matplotlib/__pycache__/gridspec.cpython-36.pyc,, +matplotlib/__pycache__/hatch.cpython-36.pyc,, +matplotlib/__pycache__/image.cpython-36.pyc,, +matplotlib/__pycache__/legend.cpython-36.pyc,, +matplotlib/__pycache__/legend_handler.cpython-36.pyc,, +matplotlib/__pycache__/lines.cpython-36.pyc,, +matplotlib/__pycache__/markers.cpython-36.pyc,, +matplotlib/__pycache__/mathtext.cpython-36.pyc,, +matplotlib/__pycache__/mlab.cpython-36.pyc,, +matplotlib/__pycache__/offsetbox.cpython-36.pyc,, +matplotlib/__pycache__/patches.cpython-36.pyc,, +matplotlib/__pycache__/path.cpython-36.pyc,, +matplotlib/__pycache__/patheffects.cpython-36.pyc,, +matplotlib/__pycache__/pylab.cpython-36.pyc,, +matplotlib/__pycache__/pyplot.cpython-36.pyc,, +matplotlib/__pycache__/quiver.cpython-36.pyc,, +matplotlib/__pycache__/rcsetup.cpython-36.pyc,, +matplotlib/__pycache__/sankey.cpython-36.pyc,, +matplotlib/__pycache__/scale.cpython-36.pyc,, +matplotlib/__pycache__/spines.cpython-36.pyc,, +matplotlib/__pycache__/stackplot.cpython-36.pyc,, +matplotlib/__pycache__/streamplot.cpython-36.pyc,, +matplotlib/__pycache__/table.cpython-36.pyc,, +matplotlib/__pycache__/texmanager.cpython-36.pyc,, +matplotlib/__pycache__/text.cpython-36.pyc,, +matplotlib/__pycache__/textpath.cpython-36.pyc,, +matplotlib/__pycache__/ticker.cpython-36.pyc,, +matplotlib/__pycache__/tight_bbox.cpython-36.pyc,, +matplotlib/__pycache__/tight_layout.cpython-36.pyc,, +matplotlib/__pycache__/transforms.cpython-36.pyc,, +matplotlib/__pycache__/ttconv.cpython-36.pyc,, +matplotlib/__pycache__/type1font.cpython-36.pyc,, +matplotlib/__pycache__/units.cpython-36.pyc,, +matplotlib/__pycache__/widgets.cpython-36.pyc,, +matplotlib/_animation_data.py,sha256=-u1mENRfiYh4Mk4sQsgBOF09V20sovW2AKDu5spraJ0,7729 +matplotlib/_cm.py,sha256=JNugoAjPt58HTVgKbTocqYmun2NAs3vZ1wAvsY4YlR0,66566 +matplotlib/_cm_listed.py,sha256=hpgMx7bjxJx5nl1PbQvaCDUBHQf8njaRrM2iMaBeZOM,109462 +matplotlib/_color_data.py,sha256=K2HSKblmuh-X_1ZZ9TcXcP7iKHaGC4mC_ScWqX_tdXE,34947 +matplotlib/_constrained_layout.py,sha256=-S_stIE0XVpnvS6pcJLfvUq0XrRZzpfi4vDo_irGX_o,27418 +matplotlib/_contour.cpython-36m-x86_64-linux-gnu.so,sha256=WdaPFcxP7zGbh9qW3h0gPRRJ4KXDhlcrL-dCDGmOr14,707352 +matplotlib/_image.cpython-36m-x86_64-linux-gnu.so,sha256=gMYqQcPchZPL4_quFToy__FMidSRQPuuF-iCY4HwcjI,2530890 +matplotlib/_internal_utils.py,sha256=nhK6LLWYW93fBcsFiO09JmqFj2rgHEsGYFOeaC7HRKw,2140 +matplotlib/_layoutbox.py,sha256=qwyq7ju6ok_K1Z_MKMV-TwoiEMc-FYGxHObpw99raK4,23634 +matplotlib/_mathtext_data.py,sha256=_EwmYih22UppDlyQ7zhltVu8hpnMS9x6iiMTXVTR3BE,56423 +matplotlib/_path.cpython-36m-x86_64-linux-gnu.so,sha256=rH0aDfS0avBRKr6BU8dcHYM33gazwGLNXaQ1sGoSnX0,1669683 +matplotlib/_pylab_helpers.py,sha256=mWAoMqEHRaZr44B0wM1sKjg_oCGrUPHWYIpBmgvXGRA,4500 +matplotlib/_qhull.cpython-36m-x86_64-linux-gnu.so,sha256=4Wq8W_954rCcbOdE8jvnF1mBa4ByILV8tTEN6yfqbls,1505844 +matplotlib/_text_layout.py,sha256=88DxzfAOPzpRjpu0OwLaRl6eOVJ5Var8ZxrDyhAQ7C8,1036 +matplotlib/_tri.cpython-36m-x86_64-linux-gnu.so,sha256=IUINSHklXblQErdWMf5esns1OXTO2RYXIVD5Dg4bDyk,1280288 +matplotlib/_ttconv.cpython-36m-x86_64-linux-gnu.so,sha256=ZJ_hZKmDWBn-VDruZLaevTo0a0z-kaSkBJB3NdAaLLI,448738 +matplotlib/_version.py,sha256=9NKay7G04q3wACl_cEm8uOlJDnBj2EgmnAvLVslAdRw,471 +matplotlib/afm.py,sha256=y5a3lMwOlb_GUe5BEYGoBBlCRcQt07Cg2SmqsZI6swc,16577 +matplotlib/animation.py,sha256=uDhXF9WKT6bugZFbn8564QJPLOv9U7rcfZxIxE6Qg6A,67247 +matplotlib/artist.py,sha256=eeB7KTb3od6huB9uF3xk495GlJU9lsVQd0Kyniu5HH0,54000 +matplotlib/axes/__init__.py,sha256=npQuBvs_xEBEGUP2-BBZzCrelsAQYgB1U96kSZTSWIs,46 +matplotlib/axes/__pycache__/__init__.cpython-36.pyc,, +matplotlib/axes/__pycache__/_axes.cpython-36.pyc,, +matplotlib/axes/__pycache__/_base.cpython-36.pyc,, +matplotlib/axes/__pycache__/_secondary_axes.cpython-36.pyc,, +matplotlib/axes/__pycache__/_subplots.cpython-36.pyc,, +matplotlib/axes/_axes.py,sha256=wNxl4G4phJb5FtBuURkfLkKdlfrrr4E7rEjf_JzpTHg,313063 +matplotlib/axes/_base.py,sha256=QrB04bGPZ8ObLKSyqZLobxUZ-X09lnU7xY_08Zh_e-k,159042 +matplotlib/axes/_secondary_axes.py,sha256=-7f4ovuJ9Mj6tA_01wPxsMD13Z_0HPIT-T7TLKkeNog,13360 +matplotlib/axes/_subplots.py,sha256=aBxf2fNRl5NNFeo8iURzMLTZwmYc0xY1Uhs4XJWWBtw,9658 +matplotlib/axis.py,sha256=fjX055IvBnHFRaAXLfqL-Nw8N2SKRJe0omaV6UfqcEU,92664 +matplotlib/backend_bases.py,sha256=nO4ZCPnQ5wMNAv9iVa2Y_W_ZiiAbN5_kjQSLjA8OiFg,127296 +matplotlib/backend_managers.py,sha256=UPyhQ9Q8a_H_FPwFWkctjghrVkCp-ZE4xxaav4RlW74,13859 +matplotlib/backend_tools.py,sha256=kYQD9ouoD6sHV2tQzPwVhUKDW1OrM_zRoMwydHms8AA,34935 +matplotlib/backends/__init__.py,sha256=X-F5YPMYcPpXRDsjWBLeffSWcVYsdcW4-qJCfTZlUNQ,107 +matplotlib/backends/__pycache__/__init__.cpython-36.pyc,, +matplotlib/backends/__pycache__/_backend_pdf_ps.cpython-36.pyc,, +matplotlib/backends/__pycache__/_backend_tk.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_agg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_cairo.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_gtk3.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_gtk3agg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_gtk3cairo.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_macosx.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_mixed.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_nbagg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_pdf.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_pgf.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_ps.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_qt4.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_qt4agg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_qt4cairo.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_qt5.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_qt5agg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_qt5cairo.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_svg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_template.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_tkagg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_tkcairo.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_webagg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_webagg_core.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_wx.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_wxagg.cpython-36.pyc,, +matplotlib/backends/__pycache__/backend_wxcairo.cpython-36.pyc,, +matplotlib/backends/__pycache__/qt_compat.cpython-36.pyc,, +matplotlib/backends/_backend_agg.cpython-36m-x86_64-linux-gnu.so,sha256=hHYky8yiNMJ_pSDsCmKq8-7wrVZqBq3AaXHXpkD6btI,3823641 +matplotlib/backends/_backend_pdf_ps.py,sha256=hVNIb4OiNqUcu01MwPzfTmyAwi3fyt-HNKyO1L8e1T8,3774 +matplotlib/backends/_backend_tk.py,sha256=NDRsSEfimDxGiOUYZh2Erga2g630lIhGo80OcmItPqU,32859 +matplotlib/backends/_tkagg.cpython-36m-x86_64-linux-gnu.so,sha256=E4CLN39qYFlLHUH0E9gSh8lG_GseqaLm6VUJBm5pRiA,220134 +matplotlib/backends/backend_agg.py,sha256=8DHxKReLBNd0HyCOn9NnEUhCJPKP5uqkoYPVzyVJAjE,23006 +matplotlib/backends/backend_cairo.py,sha256=Ot7ArUPUXEa29tEYhCqft5hV7L7CYe0POx6j7Onyet8,18680 +matplotlib/backends/backend_gtk3.py,sha256=L1OTDahZGGyGmO3moMg1ome9fMPqX0DEzkJY9VGD1m8,33094 +matplotlib/backends/backend_gtk3agg.py,sha256=h6IVIBHR2zDy0SNm-N9PxTcddZnJSJ7mmayKemLDgvc,2804 +matplotlib/backends/backend_gtk3cairo.py,sha256=7q60kRBN8LPAWljbRxIdw8W146jF1jv8BtTmX8TPYpQ,1351 +matplotlib/backends/backend_macosx.py,sha256=eSuOMg8XZ_2KjuyeIB3oRMqRBGZmHx2-uoIbRJeIMAs,5678 +matplotlib/backends/backend_mixed.py,sha256=tkALWz3fuHPiFM89GkkCXdGWQs0PFR-lujwHDyy236c,5190 +matplotlib/backends/backend_nbagg.py,sha256=rh5SDP7D9sR_PWU2MHvg9W5zLvsGVb8snmwPZLOl_5A,8551 +matplotlib/backends/backend_pdf.py,sha256=izNzt-IiOPAkOMgjmxPC9Bwi6V7f98mUzV5KCkfUYs8,97014 +matplotlib/backends/backend_pgf.py,sha256=a-eHjIAElJWH6l7fhOQYXVK4f7OxDC4ZQysFK77pUus,43445 +matplotlib/backends/backend_ps.py,sha256=L6K5pskWcjxb5KSqsEmYYDOy6SIVtXYvlihuHvFPNns,46731 +matplotlib/backends/backend_qt4.py,sha256=5pWOrGgQXYBE_ohTWaju7ZwYFCvDzGatgjHTIYvvd84,513 +matplotlib/backends/backend_qt4agg.py,sha256=6vsXxzUBN4tM3zuuY6RRtioruicJwp_HsSd4yEOFu6o,379 +matplotlib/backends/backend_qt4cairo.py,sha256=Md2XqfttMCsx1dZfCX7zEFiGEXN107wEt_fxKNtya44,316 +matplotlib/backends/backend_qt5.py,sha256=RuoVCpuLo0dMkfpT1w033fQCjmr3kRIsQfaHe7s7cNA,39226 +matplotlib/backends/backend_qt5agg.py,sha256=T0Lgc8J2tCkb3RU_blQpSOhX_qBB5zkdeEQ81NbtPe8,3160 +matplotlib/backends/backend_qt5cairo.py,sha256=H3BSQNQD4GxzA6zeQ05M2h0Semd5zJIxzhvK6fzVVPg,1819 +matplotlib/backends/backend_svg.py,sha256=qTW5u5uAjMHTk3ufYVGQtrs2qlgeaptUdHU-DVIaLg8,49161 +matplotlib/backends/backend_template.py,sha256=7-lDt_AALk33KQ4pbiuzmKol2scb0Py43sDDK6s3Dak,8442 +matplotlib/backends/backend_tkagg.py,sha256=WMslLWYmtxlmAaBH4tx4HjmRDWMKiSV91KHF9yeMRng,676 +matplotlib/backends/backend_tkcairo.py,sha256=dVCh7ZD_2OR0DBQ0N3icD8cDV1SeEzCsRja446wWhPw,1069 +matplotlib/backends/backend_webagg.py,sha256=Y1jvjX7-ZTi-rr_B4FicNAQa6xhb5ebAc9AcBveXaNU,11055 +matplotlib/backends/backend_webagg_core.py,sha256=n6E3Gjs2yXkPuDf8IFS_sdYXG2GDj6lHUlno-OinMZ8,18033 +matplotlib/backends/backend_wx.py,sha256=umnN5RCP9FdMrvOLP375xWmaCEasj2cxWMHdSHH73UM,60574 +matplotlib/backends/backend_wxagg.py,sha256=fsRREmFmyOuTCrSQMEYsrCnJ4me00_c8JNbHk2gTrRU,2932 +matplotlib/backends/backend_wxcairo.py,sha256=-K5wJxpggKlWn7vnJW5pCo5t2dIiapbMyNMX_U6jd_k,1833 +matplotlib/backends/qt_compat.py,sha256=1b2unO_FDG2EU_WaUBIzo4Ve55Dxdiu-bkOfvo3WKD0,8069 +matplotlib/backends/qt_editor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/backends/qt_editor/__pycache__/__init__.cpython-36.pyc,, +matplotlib/backends/qt_editor/__pycache__/_formlayout.cpython-36.pyc,, +matplotlib/backends/qt_editor/__pycache__/_formsubplottool.cpython-36.pyc,, +matplotlib/backends/qt_editor/__pycache__/figureoptions.cpython-36.pyc,, +matplotlib/backends/qt_editor/__pycache__/formsubplottool.cpython-36.pyc,, +matplotlib/backends/qt_editor/_formlayout.py,sha256=Ptk7uWeDnMAK_RI6VsaE9jGuPhjp507I4mSm-DEgZFo,20581 +matplotlib/backends/qt_editor/_formsubplottool.py,sha256=pqnL7mEdfvYoh-QLpww3HBwKGmRtYtc2xVpB0elL-Pg,1507 +matplotlib/backends/qt_editor/figureoptions.py,sha256=XRjakdBeSxnUJwQDHYc1xGyRzfuOAW8rVJCC5ro3y_o,9471 +matplotlib/backends/qt_editor/formsubplottool.py,sha256=S_v3MgumItnQxb06TSItN5HsGPLhHHeNhbAajrgYRVY,236 +matplotlib/backends/web_backend/.eslintrc.js,sha256=pZxDrJU80urQlDMoO6A3ylTeZ7DgFoexDhi93Yfm6DU,666 +matplotlib/backends/web_backend/.prettierignore,sha256=L47QXaDPUyI-rMmNAmn-OQH-5-Gi04-ZGl7QXdjP7h8,97 +matplotlib/backends/web_backend/.prettierrc,sha256=OjC7XB1lRdhntVFThQG-J-wRiqwY1fStHF2i0XTOrbk,145 +matplotlib/backends/web_backend/all_figures.html,sha256=K0MyVxwnpQuJl-PPvsrCALlCuyjbgKo_dXum72_YmoA,1620 +matplotlib/backends/web_backend/css/boilerplate.css,sha256=qui16QXRnQFNJDbcMasfH6KtN9hLjv8883U9cJmsVCE,2310 +matplotlib/backends/web_backend/css/fbm.css,sha256=Us0osu_rK8EUAdp_GXrh89tN_hUNCN-r7N1T1NvmmwI,1473 +matplotlib/backends/web_backend/css/mpl.css,sha256=ruca_aA5kNnP-MZmLkriu8teVP1nIgwcFEpoB16j8Z4,1611 +matplotlib/backends/web_backend/css/page.css,sha256=JJqTPCwFeUy3AgHJTv_wbX00AQd-4qVUYCSrcTztluo,1624 +matplotlib/backends/web_backend/ipython_inline_figure.html,sha256=yjJa-Vjwk58BKNVlsP2bugLhednhdnCBB88kvP1nqmM,1311 +matplotlib/backends/web_backend/js/mpl.js,sha256=eNTFSdQNQq945_oONz3kU10GFG5VBjXzPenos9j5bOY,23410 +matplotlib/backends/web_backend/js/mpl_tornado.js,sha256=Zs2Uzs7YUilG765nYvanCo-IK8HkHDtIum1KAq6bQ_w,302 +matplotlib/backends/web_backend/js/nbagg_mpl.js,sha256=8xVM74nHAWaP82qGi_kk7xnpanENkXl-Nr9d97YKtZM,8961 +matplotlib/backends/web_backend/nbagg_uat.ipynb,sha256=y1N8hQzBJ05rJ2hZla2_Mw6tOUfNP1UHKo636W1e098,15933 +matplotlib/backends/web_backend/package.json,sha256=vl3nGaoaVKYGnxlhZP7kVyAqoNmbQbKmEBnQwUWDmwE,545 +matplotlib/backends/web_backend/single_figure.html,sha256=COFDb5xbaFoVrY5o0SmDWv5bWb9HkZR2pGRZx4hzCow,1239 +matplotlib/bezier.py,sha256=sgcrKRi7wgA6N5C-PwKFiIrSswityHq_TschvXB5X04,19476 +matplotlib/blocking_input.py,sha256=qXeHCx72m5isp08xNIzUtpGhVtq9J5sAtKfONonk0ZI,11301 +matplotlib/category.py,sha256=W8xq1Kh4yVzMHQWh3la78DXL3Ya1yy0c0dRr9aFHGn4,7164 +matplotlib/cbook/__init__.py,sha256=9ZrFn8daVVVLiqRrOANqr_g2VY6FWmnLkPLhInLC5U0,77217 +matplotlib/cbook/__pycache__/__init__.cpython-36.pyc,, +matplotlib/cbook/__pycache__/deprecation.cpython-36.pyc,, +matplotlib/cbook/deprecation.py,sha256=n7HAp2ynRCVGyVgP4g6faZAbrRl1irMlAmKy9ul5XHE,18912 +matplotlib/cm.py,sha256=W0ZNghptxpzv1MQ7cfsA0J4cpWxFC0EJcUkOaK-XnN4,16743 +matplotlib/collections.py,sha256=LqHHAsMShjGuqdUPX5d-8YlMd0O7woaneE-_Mdu3zWc,77012 +matplotlib/colorbar.py,sha256=pgUltq3zeqiskVjb5kyDw_X4EMolnxlljuKiEYuSv7Y,65766 +matplotlib/colors.py,sha256=zUiIcEI1beoLMpurfARiOld7yrSYy_eDzYjK8b8V54g,78296 +matplotlib/compat/__init__.py,sha256=oto0cT2n56P0X0nWDRkGfYj-lsrKlwoFiQNG6ZWi9e0,94 +matplotlib/compat/__pycache__/__init__.cpython-36.pyc,, +matplotlib/container.py,sha256=l_-yWxnuI0bk2xczyyzunKsna8T7BCxOObjonqLXS0g,4305 +matplotlib/contour.py,sha256=Oo42M57NoZPtyQXY8OHeNa0sScSTBjuLNelRwikZA1Y,69101 +matplotlib/dates.py,sha256=wX8dTCv6TeadQ0REb_9115AY1UjbQuGpvdY5zR4-btU,66901 +matplotlib/docstring.py,sha256=yLCe0cXdYA1Fy6JPSXVHtSpabPjNHtJ3307FqRICNNw,2436 +matplotlib/dviread.py,sha256=aL1biMgTIPAn6bEheHGzgC24rOPidGpEILBLlrXu2CU,40232 +matplotlib/figure.py,sha256=yikI-g88aoXBnvt8njnhuWNgwwB1sDVGb0hZZKZViZg,105934 +matplotlib/font_manager.py,sha256=tVO1nIYeabQE4HuE58NOOsin6AIJ2rr0NYbl7uEMHS0,48786 +matplotlib/fontconfig_pattern.py,sha256=KAW895GkX4BlctRfb8Vp3bLCO_h4_ChDRjeGqFlAhHg,6651 +matplotlib/ft2font.cpython-36m-x86_64-linux-gnu.so,sha256=Y6JiyyeHoFFwmCN4kcoRhULns6a8k1tou7BfiCrsLbM,1483667 +matplotlib/gridspec.py,sha256=qzgFPb6hOkBCM1HP_cUy4sxmzHlgeRkOtAmdEiVvN3k,33054 +matplotlib/hatch.py,sha256=vcaOXM0nxNkZxZ_rsLuNRmefcSDXm4-_9zFILNn66ps,6888 +matplotlib/image.py,sha256=dcHk1GJoDx1mdTh6pgG0yY7ItQ-UXQoa8uJ6n4OjDq0,66954 +matplotlib/legend.py,sha256=mBUcau_vF6iQV2Dte5yd25WYKt3Wc_3zA9NO3Iqa23g,47617 +matplotlib/legend_handler.py,sha256=RA_wy27ZHKlXBhSFX3YIXnyewMuTctj3tsf9qPg6_rs,26211 +matplotlib/lines.py,sha256=F_OFGitR7HoYDIHQotrPdqSgjMl2T76FFQioZvBO4EI,51549 +matplotlib/markers.py,sha256=ujsSAJR-rM6O5HfSaEjBJuYkjA7Sbm7keoxohRjCV-4,31988 +matplotlib/mathtext.py,sha256=8LMlPbNZDSO3jHTqs_bh75mt-PqN9ObyRPbc1yuywq4,120227 +matplotlib/mlab.py,sha256=68kp1sHGqZws9ItPVJXkf6HqWN8W1v9CP1cCqwW5p0s,35666 +matplotlib/mpl-data/fonts/afm/cmex10.afm,sha256=blR3ERmrVBV5XKkAnDCj4NMeYVgzH7cXtJ3u59u9GuE,12070 +matplotlib/mpl-data/fonts/afm/cmmi10.afm,sha256=5qwEOpedEo76bDUahyuuF1q0cD84tRrX-VQ4p3MlfBo,10416 +matplotlib/mpl-data/fonts/afm/cmr10.afm,sha256=WDvgC_D3UkGJg9u-J0U6RaT02lF4oz3lQxHtg1r3lYw,10101 +matplotlib/mpl-data/fonts/afm/cmsy10.afm,sha256=AbmzvCVWBceHRfmRfeJ9E6xzOQTFLk0U1zDfpf3_MaM,8295 +matplotlib/mpl-data/fonts/afm/cmtt10.afm,sha256=4ji7_mTpeWMa93o_UHBWPKCnqsBfhJJNllat1lJArP4,6501 +matplotlib/mpl-data/fonts/afm/pagd8a.afm,sha256=jjFrigwkTpYLqa26cpzZvKQNBo-PuF4bmDVqaM4pMWw,17183 +matplotlib/mpl-data/fonts/afm/pagdo8a.afm,sha256=sgNQdeYyx8J-itGw9h31y95aMBiTCRvmNSPTXwwS7xg,17255 +matplotlib/mpl-data/fonts/afm/pagk8a.afm,sha256=ZUtfHPloNqcvGMHMxaKDSlshhOcjwheUx143RwpGdIU,17241 +matplotlib/mpl-data/fonts/afm/pagko8a.afm,sha256=Yj1wBg6Jsqqz1KBfhRoJ3ACR-CMQol8Fj_ZM5NZ1gDk,17346 +matplotlib/mpl-data/fonts/afm/pbkd8a.afm,sha256=Zl5o6J_di9Y5j2EpHtjew-_sfg7-WoeVmO9PzOYSTUc,15157 +matplotlib/mpl-data/fonts/afm/pbkdi8a.afm,sha256=JAOno930iTyfZILMf11vWtiaTgrJcPpP6FRTRhEMMD4,15278 +matplotlib/mpl-data/fonts/afm/pbkl8a.afm,sha256=UJqJjOJ6xQDgDBLX157mKpohIJFVmHM-N6x2-DiGv14,15000 +matplotlib/mpl-data/fonts/afm/pbkli8a.afm,sha256=AWislZ2hDbs0ox_qOWREugsbS8_8lpL48LPMR40qpi0,15181 +matplotlib/mpl-data/fonts/afm/pcrb8a.afm,sha256=6j1TS2Uc7DWSc-8l42TGDc1u0Fg8JspeWfxFayjUwi8,15352 +matplotlib/mpl-data/fonts/afm/pcrbo8a.afm,sha256=smg3mjl9QaBDtQIt06ko5GvaxLsO9QtTvYANuE5hfG0,15422 +matplotlib/mpl-data/fonts/afm/pcrr8a.afm,sha256=7nxFr0Ehz4E5KG_zSE5SZOhxRH8MyfnCbw-7x5wu7tw,15339 +matplotlib/mpl-data/fonts/afm/pcrro8a.afm,sha256=NKEz7XtdFkh9cA8MvY-S3UOZlV2Y_J3tMEWFFxj7QSg,15443 +matplotlib/mpl-data/fonts/afm/phvb8a.afm,sha256=NAx4M4HjL7vANCJbc-tk04Vkol-T0oaXeQ3T2h-XUvM,17155 +matplotlib/mpl-data/fonts/afm/phvb8an.afm,sha256=8e_myD-AQkNF7q9XNLb2m76_lX2TUr3a5wog_LIE1sk,17086 +matplotlib/mpl-data/fonts/afm/phvbo8a.afm,sha256=8fkBRmJ-SWY2YrBg8fFyjJyrJp8daQ6JPO6LvhM8xPI,17230 +matplotlib/mpl-data/fonts/afm/phvbo8an.afm,sha256=aeVRvV4r15BBvxuRJ0MG8ZHuH2HViuIiCYkvuapmkmM,17195 +matplotlib/mpl-data/fonts/afm/phvl8a.afm,sha256=IyMYM-bgl-gI6rG0EuZZ2OLzlxJfGeSh8xqsh0t-eJQ,15627 +matplotlib/mpl-data/fonts/afm/phvlo8a.afm,sha256=s12C-eNnIDHJ_UVbuiprjxBjCiHIbS3Y8ORTC-qTpuI,15729 +matplotlib/mpl-data/fonts/afm/phvr8a.afm,sha256=Kt8KaRidts89EBIK29X2JomDUEDxvroeaJz_RNTi6r4,17839 +matplotlib/mpl-data/fonts/afm/phvr8an.afm,sha256=lL5fAHTRwODl-sB5mH7IfsD1tnnea4yRUK-_Ca2bQHM,17781 +matplotlib/mpl-data/fonts/afm/phvro8a.afm,sha256=3KqK3eejiR4hIFBUynuSX_4lMdE2V2T58xOF8lX-fwc,17919 +matplotlib/mpl-data/fonts/afm/phvro8an.afm,sha256=Vx9rRf3YfasMY7tz-njSxz67xHKk-fNkN7yBi0X2IP0,17877 +matplotlib/mpl-data/fonts/afm/pncb8a.afm,sha256=aoXepTcDQtQa_mspflMJkEFKefzXHoyjz6ioJVI0YNc,16028 +matplotlib/mpl-data/fonts/afm/pncbi8a.afm,sha256=pCWW1MYgy0EmvwaYsaYJaAI_LfrsKmDANHu7Pk0RaiU,17496 +matplotlib/mpl-data/fonts/afm/pncr8a.afm,sha256=0CIB2BLe9r-6_Wl5ObRTTf98UOrezmGQ8ZOuBX5kLks,16665 +matplotlib/mpl-data/fonts/afm/pncri8a.afm,sha256=5R-pLZOnaHNG8pjV6MP3Ai-d2OTQYR_cYCb5zQhzfSU,16920 +matplotlib/mpl-data/fonts/afm/pplb8a.afm,sha256=3EzUbNnXr5Ft5eFLY00W9oWu59rHORgDXUuJaOoKN58,15662 +matplotlib/mpl-data/fonts/afm/pplbi8a.afm,sha256=X_9tVspvrcMer3OS8qvdwjFFqpAXYZneyCL2NHA902g,15810 +matplotlib/mpl-data/fonts/afm/pplr8a.afm,sha256=ijMb497FDJ9nVdVMb21F7W3-cu9sb_9nF0oriFpSn8k,15752 +matplotlib/mpl-data/fonts/afm/pplri8a.afm,sha256=8KITbarcUUMi_hdoRLLmNHtlqs0TtOSKqtPFft7X5nY,15733 +matplotlib/mpl-data/fonts/afm/psyr.afm,sha256=Iyt8ajE4B2Tm34oBj2pKtctIf9kPfq05suQefq8p3Ro,9644 +matplotlib/mpl-data/fonts/afm/ptmb8a.afm,sha256=bL1fA1NC4_nW14Zrnxz4nHlXJb4dzELJPvodqKnYeMg,17983 +matplotlib/mpl-data/fonts/afm/ptmbi8a.afm,sha256=-_Ui6XlKaFTHEnkoS_-1GtIr5VtGa3gFQ2ezLOYHs08,18070 +matplotlib/mpl-data/fonts/afm/ptmr8a.afm,sha256=IEcsWcmzJyjCwkgsw4o6hIMmzlyXUglJat9s1PZNnEU,17942 +matplotlib/mpl-data/fonts/afm/ptmri8a.afm,sha256=49fQMg5fIGguZ7rgc_2styMK55Pv5bPTs7wCzqpcGpk,18068 +matplotlib/mpl-data/fonts/afm/putb8a.afm,sha256=qMaHTdpkrNL-m4DWhjpxJCSmgYkCv1qIzLlFfM0rl40,21532 +matplotlib/mpl-data/fonts/afm/putbi8a.afm,sha256=g7AVJyiTxeMpNk_1cSfmYgM09uNUfPlZyWGv3D1vcAk,21931 +matplotlib/mpl-data/fonts/afm/putr8a.afm,sha256=XYmNC5GQgSVAZKTIYdYeNksE6znNm9GF_0SmQlriqx0,22148 +matplotlib/mpl-data/fonts/afm/putri8a.afm,sha256=i7fVe-iLyLtQxCfAa4IxdxH-ufcHmMk7hbCGG5TxAY4,21891 +matplotlib/mpl-data/fonts/afm/pzcmi8a.afm,sha256=wyuoIWEZOcoXrSl1tPzLkEahik7kGi91JJj-tkFRG4A,16250 +matplotlib/mpl-data/fonts/afm/pzdr.afm,sha256=MyjLAnzKYRdQBfof1W3k_hf30MvqOkqL__G22mQ5xww,9467 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier-Bold.afm,sha256=sIDDI-B82VZ3C0mI_mHFITCZ7PVn37AIYMv1CrHX4sE,15333 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier-BoldOblique.afm,sha256=zg61QobD3YU9UBfCXmvmhBNaFKno-xj8sY0b2RpgfLw,15399 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier-Oblique.afm,sha256=vRQm5j1sTUN4hicT1PcVZ9P9DTTUHhEzfPXqUUzVZhE,15441 +matplotlib/mpl-data/fonts/pdfcorefonts/Courier.afm,sha256=Mdcq2teZEBJrIqVXnsnhee7oZnTs6-P8_292kWGTrw4,15335 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-Bold.afm,sha256=i2l4gcjuYXoXf28uK7yIVwuf0rnw6J7PwPVQeHj5iPw,69269 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-BoldOblique.afm,sha256=Um5O6qK11DXLt8uj_0IoWkc84TKqHK3bObSKUswQqvY,69365 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica-Oblique.afm,sha256=hVYDg2b52kqtbVeCzmiv25bW1yYdpkZS-LXlGREN2Rs,74392 +matplotlib/mpl-data/fonts/pdfcorefonts/Helvetica.afm,sha256=23cvKDD7bQAJB3kdjSahJSTZaUOppznlIO6FXGslyW8,74292 +matplotlib/mpl-data/fonts/pdfcorefonts/Symbol.afm,sha256=P5UaoXr4y0qh4SiMa5uqijDT6ZDr2-jPmj1ayry593E,9740 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-Bold.afm,sha256=cQTmr2LFPwKQE_sGQageMcmFicjye16mKJslsJLHQyE,64251 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-BoldItalic.afm,sha256=pzWOdycm6RqocBWgAVY5Jq0z3Fp7LuqWgLNMx4q6OFw,59642 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-Italic.afm,sha256=bK5puSMpGT_YUILwyJrXoxjfj7XJOdfv5TQ_iKsJRzw,66328 +matplotlib/mpl-data/fonts/pdfcorefonts/Times-Roman.afm,sha256=hhNrUnpazuDDKD1WpraPxqPWCYLrO7D7bMVOg-zI13o,60460 +matplotlib/mpl-data/fonts/pdfcorefonts/ZapfDingbats.afm,sha256=ZuOmt9GcKofjdOq8kqhPhtAIhOwkL2rTJTmZxAjFakA,9527 +matplotlib/mpl-data/fonts/pdfcorefonts/readme.txt,sha256=MRv8ppSITYYAb7lt5EOw9DWWNZIblfxsFhu5TQE7cpI,828 +matplotlib/mpl-data/fonts/ttf/DejaVuSans-Bold.ttf,sha256=sYS4njwQdfIva3FXW2_CDUlys8_TsjMiym_Vltyu8Wc,704128 +matplotlib/mpl-data/fonts/ttf/DejaVuSans-BoldOblique.ttf,sha256=bt8CgxYBhq9FHL7nHnuEXy5Mq_Jku5ks5mjIPCVGXm8,641720 +matplotlib/mpl-data/fonts/ttf/DejaVuSans-Oblique.ttf,sha256=zN90s1DxH9PdV3TeUOXmNGoaXaH1t9X7g1kGZel6UhM,633840 +matplotlib/mpl-data/fonts/ttf/DejaVuSans.ttf,sha256=P99pyr8GBJ6nCgC1kZNA4s4ebQKwzDxLRPtoAb0eDSI,756072 +matplotlib/mpl-data/fonts/ttf/DejaVuSansDisplay.ttf,sha256=ggmdz7paqGjN_CdFGYlSX-MpL3N_s8ngMozpzvWWUvY,25712 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-Bold.ttf,sha256=uq2ppRcv4giGJRr_BDP8OEYZEtXa8HKH577lZiCo2pY,331536 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-BoldOblique.ttf,sha256=ppCBwVx2yCfgonpaf1x0thNchDSZlVSV_6jCDTqYKIs,253116 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-Oblique.ttf,sha256=KAUoE_enCfyJ9S0ZLcmV708P3Fw9e3OknWhJsZFtDNA,251472 +matplotlib/mpl-data/fonts/ttf/DejaVuSansMono.ttf,sha256=YC7Ia4lIz82VZIL-ZPlMNshndwFJ7y95HUYT9EO87LM,340240 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Bold.ttf,sha256=w3U_Lta8Zz8VhG3EWt2-s7nIcvMvsY_VOiHxvvHtdnY,355692 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif-BoldItalic.ttf,sha256=2T7-x6nS6CZ2jRou6VuVhw4V4pWZqE80hK8d4c7C4YE,347064 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Italic.ttf,sha256=PnmU-8VPoQzjNSpC1Uj63X2crbacsRCbydlg9trFfwQ,345612 +matplotlib/mpl-data/fonts/ttf/DejaVuSerif.ttf,sha256=EHJElW6ZYrnpb6zNxVGCXgrgiYrhNzcTPhuSGi_TX_o,379740 +matplotlib/mpl-data/fonts/ttf/DejaVuSerifDisplay.ttf,sha256=KRTzLkfHd8J75Wd6-ufbTeefnkXeb8kJfZlJwjwU99U,14300 +matplotlib/mpl-data/fonts/ttf/LICENSE_DEJAVU,sha256=11k43sCY8G8Kw8AIUwZdlPAgvhw8Yu8dwpdboVtNmw4,4816 +matplotlib/mpl-data/fonts/ttf/LICENSE_STIX,sha256=cxFOZdp1AxNhXR6XxCzf5iJpNcu-APm-geOHhD-s0h8,5475 +matplotlib/mpl-data/fonts/ttf/STIXGeneral.ttf,sha256=FnN4Ax4t3cYhbWeBnJJg6aBv_ExHjk4jy5im_USxg8I,448228 +matplotlib/mpl-data/fonts/ttf/STIXGeneralBol.ttf,sha256=6FM9xwg_o0a9oZM9YOpKg7Z9CUW86vGzVB-CtKDixqA,237360 +matplotlib/mpl-data/fonts/ttf/STIXGeneralBolIta.ttf,sha256=mHiP1LpI37sr0CbA4gokeosGxzcoeWKLemuw1bsJc2w,181152 +matplotlib/mpl-data/fonts/ttf/STIXGeneralItalic.ttf,sha256=bPyzM9IrfDxiO9_UAXTxTIXD1nMcphZsHtyAFA6uhSc,175040 +matplotlib/mpl-data/fonts/ttf/STIXNonUni.ttf,sha256=Ulb34CEzWsSFTRgPDovxmJZOwvyCAXYnbhaqvGU3u1c,59108 +matplotlib/mpl-data/fonts/ttf/STIXNonUniBol.ttf,sha256=XRBqW3jR_8MBdFU0ObhiV7-kXwiBIMs7QVClHcT5tgs,30512 +matplotlib/mpl-data/fonts/ttf/STIXNonUniBolIta.ttf,sha256=pb22DnbDf2yQqizotc3wBDqFGC_g27YcCGJivH9-Le8,41272 +matplotlib/mpl-data/fonts/ttf/STIXNonUniIta.ttf,sha256=BMr9pWiBv2YIZdq04X4c3CgL6NPLUPrl64aV1N4w9Ug,46752 +matplotlib/mpl-data/fonts/ttf/STIXSizFiveSymReg.ttf,sha256=wYuH1gYUpCuusqItRH5kf9p_s6mUD-9X3L5RvRtKSxs,13656 +matplotlib/mpl-data/fonts/ttf/STIXSizFourSymBol.ttf,sha256=yNdvjUoSmsZCULmD7SVq9HabndG9P4dPhboL1JpAf0s,12228 +matplotlib/mpl-data/fonts/ttf/STIXSizFourSymReg.ttf,sha256=-9xVMYL4_1rcO8FiCKrCfR4PaSmKtA42ddLGqwtei1w,15972 +matplotlib/mpl-data/fonts/ttf/STIXSizOneSymBol.ttf,sha256=cYexyo8rZcdqMlpa9fNF5a2IoXLUTZuIvh0JD1Qp0i4,12556 +matplotlib/mpl-data/fonts/ttf/STIXSizOneSymReg.ttf,sha256=0lbHzpndzJmO8S42mlkhsz5NbvJLQCaH5Mcc7QZRDzc,19760 +matplotlib/mpl-data/fonts/ttf/STIXSizThreeSymBol.ttf,sha256=3eBc-VtYbhQU3BnxiypfO6eAzEu8BdDvtIJSFbkS2oY,12192 +matplotlib/mpl-data/fonts/ttf/STIXSizThreeSymReg.ttf,sha256=XFSKCptbESM8uxHtUFSAV2cybwxhSjd8dWVByq6f3w0,15836 +matplotlib/mpl-data/fonts/ttf/STIXSizTwoSymBol.ttf,sha256=MUCYHrA0ZqFiSE_PjIGlJZgMuv79aUgQqE7Dtu3kuo0,12116 +matplotlib/mpl-data/fonts/ttf/STIXSizTwoSymReg.ttf,sha256=_sdxDuEwBDtADpu9CyIXQxV7sIqA2TZVBCUiUjq5UCk,15704 +matplotlib/mpl-data/fonts/ttf/cmb10.ttf,sha256=B0SXtQxD6ldZcYFZH5iT04_BKofpUQT1ZX_CSB9hojo,25680 +matplotlib/mpl-data/fonts/ttf/cmex10.ttf,sha256=ryjwwXByOsd2pxv6WVrKCemNFa5cPVTOGa_VYZyWqQU,21092 +matplotlib/mpl-data/fonts/ttf/cmmi10.ttf,sha256=MJKWW4gR_WpnZXmWZIRRgfwd0TMLk3-RWAjEhdMWI00,32560 +matplotlib/mpl-data/fonts/ttf/cmr10.ttf,sha256=Tdl2GwWMAJ25shRfVe5mF9CTwnPdPWxbPkP_YRD6m_Y,26348 +matplotlib/mpl-data/fonts/ttf/cmss10.ttf,sha256=ffkag9BbLkcexjjLC0NaNgo8eSsJ_EKn2mfpHy55EVo,20376 +matplotlib/mpl-data/fonts/ttf/cmsy10.ttf,sha256=uyJu2TLz8QDNDlL15JEu5VO0G2nnv9uNOFTbDrZgUjI,29396 +matplotlib/mpl-data/fonts/ttf/cmtt10.ttf,sha256=YhHwmuk1mZka_alwwkZp2tGnfiU9kVYk-_IS9wLwcdc,28136 +matplotlib/mpl-data/images/back-symbolic.svg,sha256=yRdMiKsa-awUm2x_JE_rEV20rNTa7FInbFBEoMo-6ik,1512 +matplotlib/mpl-data/images/back.gif,sha256=sdkxFRAh-Mgs44DTvruO5OxcI3Av9CS1g5MqMA_DDkQ,608 +matplotlib/mpl-data/images/back.pdf,sha256=ZR7CJo_dAeCM-KlaGvskgtHQyRtrPIolc8REOmcoqJk,1623 +matplotlib/mpl-data/images/back.png,sha256=E4dGf4Gnz1xJ1v2tMygHV0YNQgShreDeVApaMb-74mU,380 +matplotlib/mpl-data/images/back.svg,sha256=yRdMiKsa-awUm2x_JE_rEV20rNTa7FInbFBEoMo-6ik,1512 +matplotlib/mpl-data/images/back_large.gif,sha256=tqCtecrxNrPuDCUj7FGs8UXWftljKcwgp5cSBBhXwiQ,799 +matplotlib/mpl-data/images/back_large.png,sha256=9A6hUSQeszhYONE4ZuH3kvOItM0JfDVu6tkfromCbsQ,620 +matplotlib/mpl-data/images/filesave-symbolic.svg,sha256=oxPVbLS9Pzelz71C1GCJWB34DZ0sx_pUVPRHBrCZrGs,2029 +matplotlib/mpl-data/images/filesave.gif,sha256=wAyNwOPd9c-EIPwcUAlqHSfLmxq167nhDVppOWPy9UA,723 +matplotlib/mpl-data/images/filesave.pdf,sha256=P1EPPV2g50WTt8UaX-6kFoTZM1xVqo6S2H6FJ6Zd1ec,1734 +matplotlib/mpl-data/images/filesave.png,sha256=b7ctucrM_F2mG-DycTedG_a_y4pHkx3F-zM7l18GLhk,458 +matplotlib/mpl-data/images/filesave.svg,sha256=oxPVbLS9Pzelz71C1GCJWB34DZ0sx_pUVPRHBrCZrGs,2029 +matplotlib/mpl-data/images/filesave_large.gif,sha256=IXrenlwu3wwO8WTRvxHt_q62NF6ZWyqk3jZhm6GE-G8,1498 +matplotlib/mpl-data/images/filesave_large.png,sha256=LNbRD5KZ3Kf7nbp-stx_a1_6XfGBSWUfDdpgmnzoRvk,720 +matplotlib/mpl-data/images/forward-symbolic.svg,sha256=NnQDOenfjsn-o0aJMUfErrP320Zcx9XHZkLh0cjMHsk,1531 +matplotlib/mpl-data/images/forward.gif,sha256=VNL9R-dECOX7wUAYPtU_DWn5hwi3SwLR17DhmBvUIxE,590 +matplotlib/mpl-data/images/forward.pdf,sha256=KIqIL4YId43LkcOxV_TT5uvz1SP8k5iUNUeJmAElMV8,1630 +matplotlib/mpl-data/images/forward.png,sha256=pKbLepgGiGeyY2TCBl8svjvm7Z4CS3iysFxcq4GR-wk,357 +matplotlib/mpl-data/images/forward.svg,sha256=NnQDOenfjsn-o0aJMUfErrP320Zcx9XHZkLh0cjMHsk,1531 +matplotlib/mpl-data/images/forward_large.gif,sha256=H6Jbcc7qJwHJAE294YqI5Bm-5irofX40cKRvYdrG_Ig,786 +matplotlib/mpl-data/images/forward_large.png,sha256=36h7m7DZDHql6kkdpNPckyi2LKCe_xhhyavWARz_2kQ,593 +matplotlib/mpl-data/images/hand.gif,sha256=3lRfmAqQU7A2t1YXXsB9IbwzK7FaRh-IZO84D5-xCrw,1267 +matplotlib/mpl-data/images/hand.pdf,sha256=hspwkNY915KPD7AMWnVQs7LFPOtlcj0VUiLu76dMabQ,4172 +matplotlib/mpl-data/images/hand.png,sha256=2cchRETGKa0hYNKUxnJABwkyYXEBPqJy_VqSPlT0W2Q,979 +matplotlib/mpl-data/images/hand.svg,sha256=tsVIES_nINrAbH4FqdsCGOx0SVE37vcofSYBhnnaOP0,4888 +matplotlib/mpl-data/images/hand_large.gif,sha256=H5IHmVTvOqHQb9FZ_7g7AlPt9gv-zRq0L5_Q9B7OuvU,973 +matplotlib/mpl-data/images/help-symbolic.svg,sha256=KXabvQhqIWen_t2SvZuddFYa3S0iI3W8cAKm3s1fI8Q,1870 +matplotlib/mpl-data/images/help.gif,sha256=3Cjr7YqfH7HFmYCmrJKxnoLPkbUfUcxQOW7RI2-4Cpo,564 +matplotlib/mpl-data/images/help.pdf,sha256=CeE978IMi0YWznWKjIT1R8IrP4KhZ0S7usPUvreSgcA,1813 +matplotlib/mpl-data/images/help.png,sha256=s4pQrqaQ0py8I7vc9hv3BI3DO_tky-7YBMpaHuBDCBY,472 +matplotlib/mpl-data/images/help.ppm,sha256=mVPvgwcddzCM-nGZd8Lnl_CorzDkRIXQE17b7qo8vlU,1741 +matplotlib/mpl-data/images/help.svg,sha256=KXabvQhqIWen_t2SvZuddFYa3S0iI3W8cAKm3s1fI8Q,1870 +matplotlib/mpl-data/images/help_large.png,sha256=1IwEyWfGRgnoCWM-r9CJHEogTJVD5n1c8LXTK4AJ4RE,747 +matplotlib/mpl-data/images/help_large.ppm,sha256=MiCSKp1Su88FXOi9MTtkQDA2srwbX3w5navi6cneAi4,6925 +matplotlib/mpl-data/images/home-symbolic.svg,sha256=n_AosjJVXET3McymFuHgXbUr5vMLdXK2PDgghX8Cch4,1891 +matplotlib/mpl-data/images/home.gif,sha256=NKuFM7tTtFngdfsOpJ4AxYTL8PYS5GWKAoiJjBMwLlU,666 +matplotlib/mpl-data/images/home.pdf,sha256=e0e0pI-XRtPmvUCW2VTKL1DeYu1pvPmUUeRSgEbWmik,1737 +matplotlib/mpl-data/images/home.png,sha256=IcFdAAUa6_A0qt8IO3I8p4rpXpQgAlJ8ndBECCh7C1w,468 +matplotlib/mpl-data/images/home.svg,sha256=n_AosjJVXET3McymFuHgXbUr5vMLdXK2PDgghX8Cch4,1891 +matplotlib/mpl-data/images/home_large.gif,sha256=k86PJCgED46sCFkOlUYHA0s5U7OjRsc517bpAtU2JSw,1422 +matplotlib/mpl-data/images/home_large.png,sha256=uxS2O3tWOHh1iau7CaVV4ermIJaZ007ibm5Z3i8kXYg,790 +matplotlib/mpl-data/images/matplotlib.pdf,sha256=BkSUf-2xoij-eXfpV2t7y1JFKG1zD1gtV6aAg3Xi_wE,22852 +matplotlib/mpl-data/images/matplotlib.png,sha256=w8KLRYVa-voUZXa41hgJauQuoois23f3NFfdc72pUYY,1283 +matplotlib/mpl-data/images/matplotlib.svg,sha256=QiTIcqlQwGaVPtHsEk-vtmJk1wxwZSvijhqBe_b9VCI,62087 +matplotlib/mpl-data/images/matplotlib_128.ppm,sha256=IHPRWXpLFRq3Vb7UjiCkFrN_N86lSPcfrEGunST08d8,49167 +matplotlib/mpl-data/images/matplotlib_large.png,sha256=ElRoue9grUqkZXJngk-nvh4GKfpvJ4gE69WryjCbX5U,3088 +matplotlib/mpl-data/images/move-symbolic.svg,sha256=_ZKpcwGD6DMTkZlbyj0nQbT8Ygt5vslEZ0OqXaXGd4E,2509 +matplotlib/mpl-data/images/move.gif,sha256=FN52MptH4FZiwmV2rQgYCO2FvO3m5LtqYv8jk6Xbeyk,679 +matplotlib/mpl-data/images/move.pdf,sha256=CXk3PGK9WL5t-5J-G2X5Tl-nb6lcErTBS5oUj2St6aU,1867 +matplotlib/mpl-data/images/move.png,sha256=TmjR41IzSzxGbhiUcV64X0zx2BjrxbWH3cSKvnG2vzc,481 +matplotlib/mpl-data/images/move.svg,sha256=_ZKpcwGD6DMTkZlbyj0nQbT8Ygt5vslEZ0OqXaXGd4E,2509 +matplotlib/mpl-data/images/move_large.gif,sha256=RMIAr-G9OOY7vWC04oN6qv5TAHJxhQGhLsw_bNsvWbg,951 +matplotlib/mpl-data/images/move_large.png,sha256=Skjz2nW_RTA5s_0g88gdq2hrVbm6DOcfYW4Fu42Fn9U,767 +matplotlib/mpl-data/images/qt4_editor_options.pdf,sha256=2qu6GVyBrJvVHxychQoJUiXPYxBylbH2j90QnytXs_w,1568 +matplotlib/mpl-data/images/qt4_editor_options.png,sha256=EryQjQ5hh2dwmIxtzCFiMN1U6Tnd11p1CDfgH5ZHjNM,380 +matplotlib/mpl-data/images/qt4_editor_options.svg,sha256=E00YoX7u4NrxMHm_L1TM8PDJ88bX5qRdCrO-Uj59CEA,1244 +matplotlib/mpl-data/images/qt4_editor_options_large.png,sha256=-Pd-9Vh5aIr3PZa8O6Ge_BLo41kiEnpmkdDj8a11JkY,619 +matplotlib/mpl-data/images/subplots-symbolic.svg,sha256=8acBogXIr9OWGn1iD6mUkgahdFZgDybww385zLCLoIs,2130 +matplotlib/mpl-data/images/subplots.gif,sha256=QfhmUdcrko08-WtrzCJUjrVFDTvUZCJEXpARNtzEwkg,691 +matplotlib/mpl-data/images/subplots.pdf,sha256=Q0syPMI5EvtgM-CE-YXKOkL9eFUAZnj_X2Ihoj6R4p4,1714 +matplotlib/mpl-data/images/subplots.png,sha256=MUfCItq3_yzb9yRieGOglpn0Y74h8IA7m5i70B63iRc,445 +matplotlib/mpl-data/images/subplots.svg,sha256=8acBogXIr9OWGn1iD6mUkgahdFZgDybww385zLCLoIs,2130 +matplotlib/mpl-data/images/subplots_large.gif,sha256=Ff3ERmtVAaGP9i1QGUNnIIKac6LGuSW2Qf4DrockZSI,1350 +matplotlib/mpl-data/images/subplots_large.png,sha256=Edu9SwVMQEXJZ5ogU5cyW7VLcwXJdhdf-EtxxmxdkIs,662 +matplotlib/mpl-data/images/zoom_to_rect-symbolic.svg,sha256=1vRxr3cl8QTwTuRlQzD1jxu0fXZofTJ2PMgG97E7Bco,1479 +matplotlib/mpl-data/images/zoom_to_rect.gif,sha256=mTX6h9fh2W9zmvUYqeibK0TZ7qIMKOB1nAXMpD_jDys,696 +matplotlib/mpl-data/images/zoom_to_rect.pdf,sha256=SEvPc24gfZRpl-dHv7nx8KkxPyU66Kq4zgQTvGFm9KA,1609 +matplotlib/mpl-data/images/zoom_to_rect.png,sha256=aNz3QZBrIgxu9E-fFfaQweCVNitGuDUFoC27e5NU2L4,530 +matplotlib/mpl-data/images/zoom_to_rect.svg,sha256=1vRxr3cl8QTwTuRlQzD1jxu0fXZofTJ2PMgG97E7Bco,1479 +matplotlib/mpl-data/images/zoom_to_rect_large.gif,sha256=nx5LUpTAH6ZynM3ZfZDS-wR87jbMUsUnyQ27NGkV0_c,1456 +matplotlib/mpl-data/images/zoom_to_rect_large.png,sha256=V6pkxmm6VwFExdg_PEJWdK37HB7k3cE_corLa7RbUMk,1016 +matplotlib/mpl-data/matplotlibrc,sha256=QwmkRCCHLTbVgwKy5myHxqjjILbwI1WGg5DgePCFA5E,39901 +matplotlib/mpl-data/sample_data/Minduka_Present_Blue_Pack.png,sha256=XnKGiCanpDKalQ5anvo5NZSAeDP7fyflzQAaivuc0IE,13634 +matplotlib/mpl-data/sample_data/None_vs_nearest-pdf.png,sha256=5CPvcG3SDNfOXx39CMKHCNS9JKZ-fmOUwIfpppNXsQ0,106228 +matplotlib/mpl-data/sample_data/README.txt,sha256=ABz19VBKfGewdY39QInG9Qccgn1MTYV3bT5Ph7TCy2Y,128 +matplotlib/mpl-data/sample_data/aapl.npz,sha256=GssVYka_EccteiXbNRJJ5GsuqU7G8F597qX7srYXZsw,107503 +matplotlib/mpl-data/sample_data/ada.png,sha256=X1hjJK1_1Nc8DN-EEhey3G7Sq8jBwQDKNSl4cCAE0uY,308313 +matplotlib/mpl-data/sample_data/axes_grid/bivariate_normal.npy,sha256=DpWZ9udAh6ospYqneEa27D6EkRgORFwHosacZXVu98U,1880 +matplotlib/mpl-data/sample_data/ct.raw.gz,sha256=LDvvgH-mycRQF2D29-w5MW94ZI0opvwKUoFI8euNpMk,256159 +matplotlib/mpl-data/sample_data/data_x_x2_x3.csv,sha256=A0SU3buOUGhT-NI_6LQ6p70fFSIU3iLFdgzvzrKR6SE,132 +matplotlib/mpl-data/sample_data/demodata.csv,sha256=MRybziqnyrqMCH9qG7Mr6BwcohIhftVG5dejXV2AX2M,659 +matplotlib/mpl-data/sample_data/eeg.dat,sha256=KGVjFt8ABKz7p6XZirNfcxSTOpGGNuyA8JYErRKLRBc,25600 +matplotlib/mpl-data/sample_data/embedding_in_wx3.xrc,sha256=cUqVw5vDHNSZoaO4J0ebZUf5SrJP36775abs7R9Bclg,2186 +matplotlib/mpl-data/sample_data/goog.npz,sha256=QAkXzzDmtmT3sNqT18dFhg06qQCNqLfxYNLdEuajGLE,22845 +matplotlib/mpl-data/sample_data/grace_hopper.jpg,sha256=qMptc0dlcDsJcoq0f-WfRz2Trjln_CTHwCiMPHrbcTA,61306 +matplotlib/mpl-data/sample_data/grace_hopper.png,sha256=MCf0ju2kpC40srQ0xw4HEyOoKhLL4khP3jHfU9_dR7s,628280 +matplotlib/mpl-data/sample_data/jacksboro_fault_dem.npz,sha256=1JP1CjPoKkQgSUxU0fyhU50Xe9wnqxkLxf5ukvYvtjc,174061 +matplotlib/mpl-data/sample_data/logo2.png,sha256=ITxkJUsan2oqXgJDy6DJvwJ4aHviKeWGnxPkTjXUt7A,33541 +matplotlib/mpl-data/sample_data/membrane.dat,sha256=q3lbQpIBpbtXXGNw1eFwkN_PwxdDGqk4L46IE2b0M1c,48000 +matplotlib/mpl-data/sample_data/msft.csv,sha256=GArKb0O3DgKZRsKdJf6lX3rMSf-PCekIiBoLNdgF7Mk,3211 +matplotlib/mpl-data/sample_data/percent_bachelors_degrees_women_usa.csv,sha256=TzoqamsV_N3d3lW7SKmj14zZVX4FOOg9jJcsC5U9pbA,5681 +matplotlib/mpl-data/sample_data/s1045.ima.gz,sha256=MrQk1k9it-ccsk0p_VOTitVmTWCAVaZ6srKvQ2n4uJ4,33229 +matplotlib/mpl-data/sample_data/topobathy.npz,sha256=AkTgMpFwLfRQJNy1ysvE89TLMNct-n_TccSsYcQrT78,45224 +matplotlib/mpl-data/stylelib/Solarize_Light2.mplstyle,sha256=PECeO60wwJe2sSDvxapBJRuKGek0qLcoaN8qOX6tgNQ,1255 +matplotlib/mpl-data/stylelib/_classic_test_patch.mplstyle,sha256=iopHpMaM3im_AK2aiHGuM2DKM5i9Kc84v6NQEoSb10Q,167 +matplotlib/mpl-data/stylelib/bmh.mplstyle,sha256=-KbhaI859BITHIoyUZIfpQDjfckgLAlDAS_ydKsm6mc,712 +matplotlib/mpl-data/stylelib/classic.mplstyle,sha256=GW1PkcxZ0PwnU3bCF4p4v4yGrGySRiMBSuvVzyuuL8Y,24228 +matplotlib/mpl-data/stylelib/dark_background.mplstyle,sha256=Ht6phZUy3zNRdcfHKcSb1uh3O8DunSPX8HPt9xTyzuo,658 +matplotlib/mpl-data/stylelib/fast.mplstyle,sha256=yTa2YEIIP9xi5V_G0p2vSlxghuhNwjRi9gPECMxyRiM,288 +matplotlib/mpl-data/stylelib/fivethirtyeight.mplstyle,sha256=WNUmAFuBPcqQPVgt6AS1ldy8Be2XO01N-1YQL__Q6ZY,832 +matplotlib/mpl-data/stylelib/ggplot.mplstyle,sha256=xhjLwr8hiikEXKy8APMy0Bmvtz1g0WnG84gX7e9lArs,957 +matplotlib/mpl-data/stylelib/grayscale.mplstyle,sha256=KCLg-pXpns9cnKDXKN2WH6mV41OH-6cbT-5zKQotSdw,526 +matplotlib/mpl-data/stylelib/seaborn-bright.mplstyle,sha256=pDqn3-NUyVLvlfkYs8n8HzNZvmslVMChkeH-HtZuJIc,144 +matplotlib/mpl-data/stylelib/seaborn-colorblind.mplstyle,sha256=eCSzFj5_2vR6n5qu1rHE46wvSVGZcdVqz85ov40ZsH8,148 +matplotlib/mpl-data/stylelib/seaborn-dark-palette.mplstyle,sha256=p5ABKNQHRG7bk4HXqMQrRBjDlxGAo3RCXHdQmP7g-Ng,142 +matplotlib/mpl-data/stylelib/seaborn-dark.mplstyle,sha256=I4xQ75vE5_9X4k0cNDiqhhnF3OcrZ2xlPX8Ll7OCkoE,667 +matplotlib/mpl-data/stylelib/seaborn-darkgrid.mplstyle,sha256=2bXOSzS5gmPzRBrRmzVWyhg_7ZaBRQ6t_-O-cRuyZoA,670 +matplotlib/mpl-data/stylelib/seaborn-deep.mplstyle,sha256=44dLcXjjRgR-6yaopgGRInaVgz3jk8VJVQTbBIcxRB0,142 +matplotlib/mpl-data/stylelib/seaborn-muted.mplstyle,sha256=T4o3jvqKD_ImXDkp66XFOV_xrBVFUolJU34JDFk1Xkk,143 +matplotlib/mpl-data/stylelib/seaborn-notebook.mplstyle,sha256=PcvZQbYrDdducrNlavBPmQ1g2minio_9GkUUFRdgtoM,382 +matplotlib/mpl-data/stylelib/seaborn-paper.mplstyle,sha256=n0mboUp2C4Usq2j6tNWcu4TZ_YT4-kKgrYO0t-rz1yw,393 +matplotlib/mpl-data/stylelib/seaborn-pastel.mplstyle,sha256=8nV8qRpbUrnFZeyE6VcQ1oRuZPLil2W74M2U37DNMOE,144 +matplotlib/mpl-data/stylelib/seaborn-poster.mplstyle,sha256=dUaKqTE4MRfUq2rWVXbbou7kzD7Z9PE9Ko8aXLza8JA,403 +matplotlib/mpl-data/stylelib/seaborn-talk.mplstyle,sha256=7FnBaBEdWBbncTm6_ER-EQVa_bZgU7dncgez-ez8R74,403 +matplotlib/mpl-data/stylelib/seaborn-ticks.mplstyle,sha256=CITZmZFUFp40MK2Oz8tI8a7WRoCizQU9Z4J172YWfWw,665 +matplotlib/mpl-data/stylelib/seaborn-white.mplstyle,sha256=WjJ6LEU6rlCwUugToawciAbKP9oERFHr9rfFlUrdTx0,665 +matplotlib/mpl-data/stylelib/seaborn-whitegrid.mplstyle,sha256=ec4BjsNzmOvHptcJ3mdPxULF3S1_U1EUocuqfIpw-Nk,664 +matplotlib/mpl-data/stylelib/seaborn.mplstyle,sha256=_Xu6qXKzi4b3GymCOB1b1-ykKTQ8xhDliZ8ezHGTiAs,1130 +matplotlib/mpl-data/stylelib/tableau-colorblind10.mplstyle,sha256=BsirZVd1LmPWT4tBIz6loZPjZcInoQrIGfC7rvzqmJw,190 +matplotlib/offsetbox.py,sha256=jl9fv9tiXIGKySyn8lpJ2YTgMmssTUgIeMWOrnzpzgM,59932 +matplotlib/patches.py,sha256=G1WfAmBZqIyyxHhcXjA-UkVcOaXzU76BdrUws492zPA,148905 +matplotlib/path.py,sha256=_uB4k4Q3pug51DpGj1s_qnqdHuv61CF_AjOP5Wvsn8Y,39443 +matplotlib/patheffects.py,sha256=Rv4znXNBQIAdv4BWEqSYc4RKE2c80QLaiZ2hhAW6xJA,13600 +matplotlib/projections/__init__.py,sha256=fd9jVOdrhQQMSDwRRsqd3cM-6y9DkYmVjpL0j1chQnI,1668 +matplotlib/projections/__pycache__/__init__.cpython-36.pyc,, +matplotlib/projections/__pycache__/geo.cpython-36.pyc,, +matplotlib/projections/__pycache__/polar.cpython-36.pyc,, +matplotlib/projections/geo.py,sha256=7p4Xde6eEZVGVnoCX6QcfWJm-76I5O3VzN0E-Rxm7qA,17457 +matplotlib/projections/polar.py,sha256=RpO-I6B1Qv46VAs-wobDDqwxC1_2g2aM3j3Ox9qTqvo,54200 +matplotlib/pylab.py,sha256=q3UYAFKHw4_QImBim6f0M3lNvn0phmQFm7y3o_mzTvk,1691 +matplotlib/pyplot.py,sha256=fnm0vOMTTd4i1boSzFJLq83EzKoorWVncWhtK8-2lQY,115709 +matplotlib/quiver.py,sha256=VIFTlGPdwOh_JsT7F1YKs2ak3bFV1yCc4Orh65gPeuE,47284 +matplotlib/rcsetup.py,sha256=rC6ZAr_77doqpvX7Oh7n_3wYXwxA2CxFIYrEUqJ38AA,57256 +matplotlib/sankey.py,sha256=mlEsQgppYW6vyRHS6HPBxkrd4SKWnCU2l6Aji0nJafA,36338 +matplotlib/scale.py,sha256=DGOmpKpjPt5uKbATq76mtG7TpDhMkMPxO6QxqbtGksU,23623 +matplotlib/sphinxext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib/sphinxext/__pycache__/__init__.cpython-36.pyc,, +matplotlib/sphinxext/__pycache__/mathmpl.cpython-36.pyc,, +matplotlib/sphinxext/__pycache__/plot_directive.cpython-36.pyc,, +matplotlib/sphinxext/mathmpl.py,sha256=dUQg1fazNBSBH4-VawOMBeiQnqCK7eQtkSEkXklAW_8,3759 +matplotlib/sphinxext/plot_directive.py,sha256=Ios2cPLcRw0q31JKm_A901rCj7N3QlkMzOXx5xfe0wo,26230 +matplotlib/spines.py,sha256=aQNIkdauITmsLUSSamYvnp_mBkThD9w9jjb0NDYtGvg,20223 +matplotlib/stackplot.py,sha256=CVmWXtEu1gvypR0H4sXqapKpH8zakW0VNHvdxuiRKYo,3908 +matplotlib/streamplot.py,sha256=PWiFd6_EipDDvhcUEfoCwyAWnVKb5BrbwYSLQfRcpmw,22916 +matplotlib/style/__init__.py,sha256=EExOAUAq3u_rscUwkfKtZoEgLA5npmltCrYZOP9ftjw,67 +matplotlib/style/__pycache__/__init__.cpython-36.pyc,, +matplotlib/style/__pycache__/core.cpython-36.pyc,, +matplotlib/style/core.py,sha256=hNuFln6CA3tZQVnbc1lq_KnkWI1xAaniMDU8tABH0uU,8486 +matplotlib/table.py,sha256=EteA95gpi0dkPFFROnRhEXL-t8rrB1Uuv48nyOnlP_4,26560 +matplotlib/testing/__init__.py,sha256=6OFh9UBnCgvcMElvYfl45QwO25JstN8R6Sc-gm8NFSk,1402 +matplotlib/testing/__pycache__/__init__.cpython-36.pyc,, +matplotlib/testing/__pycache__/compare.cpython-36.pyc,, +matplotlib/testing/__pycache__/conftest.cpython-36.pyc,, +matplotlib/testing/__pycache__/decorators.cpython-36.pyc,, +matplotlib/testing/__pycache__/disable_internet.cpython-36.pyc,, +matplotlib/testing/__pycache__/exceptions.cpython-36.pyc,, +matplotlib/testing/__pycache__/widgets.cpython-36.pyc,, +matplotlib/testing/compare.py,sha256=18R0aDuasxy9OTPTkf0rxPfNIRdKkGvHyxkymRH4Pgs,16981 +matplotlib/testing/conftest.py,sha256=LeTKHn7XJiHmnEPY7ZfsUndvoOH69d-_L906Yp3o6gA,5567 +matplotlib/testing/decorators.py,sha256=PG-bOz-0F14oHh5K_6q8gGN6vce3_rbfVRKH07ezGZo,18726 +matplotlib/testing/disable_internet.py,sha256=SkQvibGuw2Ey5mXcVrNayIc2MiKopomaAgGQd_OBss8,4911 +matplotlib/testing/exceptions.py,sha256=72QmjiHG7DwxSvlJf8mei-hRit5AH3NKh0-osBo4YbY,138 +matplotlib/testing/jpl_units/Duration.py,sha256=dmC4GMu1reBR5N5b8bEUxWuoBQPQJ3p1tZEcEtMcn-8,4458 +matplotlib/testing/jpl_units/Epoch.py,sha256=6KVs5AVJtxRhLxbEwvr4RO3SmWIRX3NK8-6jY1mzGRg,6350 +matplotlib/testing/jpl_units/EpochConverter.py,sha256=LVvvSWxlyhpWzaWP6qIM-J1pT_NavtRxKeFL0tPQmNI,3165 +matplotlib/testing/jpl_units/StrConverter.py,sha256=qJQW3lwFdTTu6VbhuR4woJdvRWwMX-Yc5HyQrHFr_hU,2953 +matplotlib/testing/jpl_units/UnitDbl.py,sha256=nXFWa8Oloovg4gi4rbTRgMC36vedJjxJsXAuizr1LYA,7611 +matplotlib/testing/jpl_units/UnitDblConverter.py,sha256=3GqeyY8rdw0osr66QNd9PwxqsrOfRM0B_tLXemcptnE,3099 +matplotlib/testing/jpl_units/UnitDblFormatter.py,sha256=CRcbPtE3K0FlFJ4hkhi-SgQl1MUV-VlmIeOPIEPNwuI,681 +matplotlib/testing/jpl_units/__init__.py,sha256=p__9RUwrt2LJ2eoT2JPM-42XLxSJrfA4az3rN5uP6d4,2684 +matplotlib/testing/jpl_units/__pycache__/Duration.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/Epoch.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/EpochConverter.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/StrConverter.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/UnitDbl.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/UnitDblConverter.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/UnitDblFormatter.cpython-36.pyc,, +matplotlib/testing/jpl_units/__pycache__/__init__.cpython-36.pyc,, +matplotlib/testing/widgets.py,sha256=GXZIU41tAPvyJ3VkTqT64rZA582zX1vDTFs_jA2ZT-g,1509 +matplotlib/tests/__init__.py,sha256=ns6SIKdszYNXD5h5PqKRCR06Z45H-sXrUX2VwujSRIM,366 +matplotlib/tests/__pycache__/__init__.cpython-36.pyc,, +matplotlib/tests/__pycache__/conftest.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_afm.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_agg.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_agg_filter.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_animation.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_arrow_patches.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_artist.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_axes.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_bases.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_cairo.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_nbagg.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_pdf.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_pgf.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_ps.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_qt.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_svg.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_tk.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_tools.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backend_webagg.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_backends_interactive.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_basic.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_bbox_tight.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_category.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_cbook.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_collections.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_colorbar.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_colors.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_compare_images.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_constrainedlayout.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_container.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_contour.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_cycles.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_dates.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_determinism.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_dviread.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_figure.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_font_manager.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_fontconfig_pattern.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_gridspec.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_image.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_legend.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_lines.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_marker.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_mathtext.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_matplotlib.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_mlab.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_offsetbox.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_patches.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_path.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_patheffects.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_pickle.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_png.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_polar.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_preprocess_data.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_pyplot.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_quiver.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_rcparams.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_sankey.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_scale.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_simplification.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_skew.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_sphinxext.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_spines.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_streamplot.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_style.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_subplots.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_table.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_testing.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_texmanager.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_text.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_ticker.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_tightlayout.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_transforms.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_triangulation.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_ttconv.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_type1font.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_units.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_usetex.cpython-36.pyc,, +matplotlib/tests/__pycache__/test_widgets.cpython-36.pyc,, +matplotlib/tests/conftest.py,sha256=QtpdWPUoXL_9F8WIytDc3--h0nPjbo8PToig7svIT1Y,258 +matplotlib/tests/test_afm.py,sha256=DGVfvABg6FRmbAq2ldRhM2wlqNfVrmRtSz12MCyqDXk,3710 +matplotlib/tests/test_agg.py,sha256=Rkm7_LGXyKrzW3GwKwN0mUBFdlAzUZyHplK7fHwTZRI,7429 +matplotlib/tests/test_agg_filter.py,sha256=sfntvGVUuCaGqU3DdOnTRXqq0P1afVqWuaV_ZEYh8kQ,969 +matplotlib/tests/test_animation.py,sha256=JCjOkj2faWxRxzvs2dXg4djvKTzQvcSEHy2Q-hyKSZs,8354 +matplotlib/tests/test_arrow_patches.py,sha256=ZzLXyQQ3c33BopDLEu0WL5UpS2nohSqzvDnFcWEiQg8,5660 +matplotlib/tests/test_artist.py,sha256=JUjHpSr52wq28ax9n9VuXlKsbp3UvJ0e_n9j5EtZhOA,9061 +matplotlib/tests/test_axes.py,sha256=gNXr0BCDOwWdrEvZVymtNzOiyd_y_pmpXs7kYpb9Kq4,211255 +matplotlib/tests/test_backend_bases.py,sha256=xK0X4-U1zptpB0yuE0XYibZazJ0kfumsHBgZZR9g1TE,5935 +matplotlib/tests/test_backend_cairo.py,sha256=O2LTYjsfPn__bKtTz4MGGBodpSshoPkzu0INsc18xmI,1821 +matplotlib/tests/test_backend_nbagg.py,sha256=ydzDqVZEPJNdGBoyrORhkV73B3RbGvDNLGyJqpz2XtE,907 +matplotlib/tests/test_backend_pdf.py,sha256=vkiBnItAQ4pBE7JT3I5r2O3hISgCP1sn9w7i5WYtjWo,8668 +matplotlib/tests/test_backend_pgf.py,sha256=f6XzRLXTTKl25EWINt4nVydecrz12nFNfaNe4lQf2J8,10320 +matplotlib/tests/test_backend_ps.py,sha256=66ekBUkVCcIJnk1G15_8D2HTkRnLWuO6XLwal6RYe1A,4588 +matplotlib/tests/test_backend_qt.py,sha256=KAZGcYW3KP8HdI0R0U_ufihq8X21NhPaHsle2Fwb7IM,9211 +matplotlib/tests/test_backend_svg.py,sha256=LLtBDkEZdu-EwqWBz8eZAPqEKoKdoYHTepNNpdpaNSI,12443 +matplotlib/tests/test_backend_tk.py,sha256=kQRNkil_iGDN2fykGOdLogY6oYqylIsuOWBPRnKqeWk,1402 +matplotlib/tests/test_backend_tools.py,sha256=C-B7NCkyWsQ5KzQEnI5Be16DsAHHZJU9P5v9--wsF-o,501 +matplotlib/tests/test_backend_webagg.py,sha256=u-UvO04iqxZbFlUrNIfyD0KIAxEhSOyqlrOqxO4DnDw,702 +matplotlib/tests/test_backends_interactive.py,sha256=bab-LUJxQmnxLLdNyBh0jPdiDpJDRmg2PKHQNprLBT4,7875 +matplotlib/tests/test_basic.py,sha256=cYBVq3xiXayYibUunhXmGOLQ7p9v33LjCB0mSDxull4,1201 +matplotlib/tests/test_bbox_tight.py,sha256=0Oh66vu8R1o2Zid9G97H6wa_ezUuegz_V5ubYmkxOXw,4949 +matplotlib/tests/test_category.py,sha256=qvYupN49Eo-GQeglvBoE9VjOmVamZ3JBI30fXaNhTjM,10223 +matplotlib/tests/test_cbook.py,sha256=Bx9DBq5Aa7ybydjIcKRnlsPiwK61qz5RAfnoxT5Rkdg,24628 +matplotlib/tests/test_collections.py,sha256=4xx5fi9DuyYP9jMjy2j0tYfWhWjhPcSNJ5vgAzXrDHA,23008 +matplotlib/tests/test_colorbar.py,sha256=jyBsfRk4fxuWdass5dEMi43YkkaHuIUa_iQ0VeQV8-U,22652 +matplotlib/tests/test_colors.py,sha256=IyU0S_tw4jSSlxJ0GW3hzXj37YydcELax9HGdZVXL7Y,40696 +matplotlib/tests/test_compare_images.py,sha256=L5-LlwjqGqZG5pxpiXKcoF2K1X4d0fTe3ZtaE8ITwZk,3294 +matplotlib/tests/test_constrainedlayout.py,sha256=IDEEg76sM28n2M8vfVcBYTwpsKsbeWflqvLE9yAGHRc,13052 +matplotlib/tests/test_container.py,sha256=ijZ2QDT_GLdMwKW393K4WU6R7A5jJNaH1uhLRDwoXXI,550 +matplotlib/tests/test_contour.py,sha256=z60n5jCxlFEDFOcMcVNyxGNP_oPytf5eQzZ6tQMzWo4,13831 +matplotlib/tests/test_cycles.py,sha256=_R4kd5eVok6QimW8NTtk65oNG9mNHCOIz1AFSFBSgx4,5660 +matplotlib/tests/test_dates.py,sha256=pDXEFsbedmY3ocAPbL8MJNVVL2v3B5H-jvk-_0tsaKE,38932 +matplotlib/tests/test_determinism.py,sha256=bu8E_NMdeN0X2KRMStjAgK3tklL5SG_IqYu4hov6Z-4,4586 +matplotlib/tests/test_dviread.py,sha256=XawbcMIJrT5N1AyA_c3VaaOAT5b6Ym6KyqIVo3MQlRg,2313 +matplotlib/tests/test_figure.py,sha256=XR16rrkuNP2-nbi1R3U-ZOYFg-oSnveaTF0gwj9gQ9w,25599 +matplotlib/tests/test_font_manager.py,sha256=dlshasWEzysAXM53Ba4vqgMF9B-clFPdva6odX7_rpY,7615 +matplotlib/tests/test_fontconfig_pattern.py,sha256=JI2E0Jz1P_ByzhP5pEUm7Eu6szyWePvoJr86_QP0sAY,2021 +matplotlib/tests/test_gridspec.py,sha256=OOCSE5R6xq3XI8Bstm0xMR1hv4guHFBCAwfCXrk_Td0,960 +matplotlib/tests/test_image.py,sha256=m13IG0PxT7B20B8-CtR-jUgp8ZCiQZnYIp0kBI9Tckk,35943 +matplotlib/tests/test_legend.py,sha256=aVztwy1LjsXhG2htqFmL9SqNGAENwbBoZnfpPN-MXwQ,22891 +matplotlib/tests/test_lines.py,sha256=gwjxoqaV0XBhsQvyjdxIrygfZSQtd7yXrPt1ToYRIqc,8337 +matplotlib/tests/test_marker.py,sha256=ejCW_5hY778KhuU9C_P6WKUZGoa59kozok__VESL7GE,6436 +matplotlib/tests/test_mathtext.py,sha256=xnfd8GR34hYqhiaEJXPEnYHJUZXlLnsxjug6OvUQMLg,14629 +matplotlib/tests/test_matplotlib.py,sha256=ubT3PYR5foBLDJoRo0BaRpBAWrwuz-qTWVOsSWtPSLQ,1455 +matplotlib/tests/test_mlab.py,sha256=cdhtXCBn2W-TSXbHPM1RJgQG72v4iaeqeUFlNPox5Mo,65684 +matplotlib/tests/test_offsetbox.py,sha256=xRIOdVPDWz2gTGdncmf4vq1qDo2ievgQw3sF9exOUvI,10828 +matplotlib/tests/test_patches.py,sha256=dyQDtw4buHEYxMR8F0jvQrO-nVXINX1T_F9k9BwJQOQ,19273 +matplotlib/tests/test_path.py,sha256=Ment4_Y6SKJ5ZBeUqLAhlBd1Udue2RtVXCU2lF_YJ3s,16254 +matplotlib/tests/test_patheffects.py,sha256=FvzVNN3BlgYzCI7Jcsf0GqnUvUhHF6Zg7S2QW2JXJHE,5202 +matplotlib/tests/test_pickle.py,sha256=1lDbWJcPpAK3rVGQG9KLWUb8qmAT9q8FmXmf6zKyG6c,5626 +matplotlib/tests/test_png.py,sha256=AnAGf7l-Nauh5mB-6sNQpI5NyAFwiQ06wj6DTgAdzK8,1300 +matplotlib/tests/test_polar.py,sha256=nVnuPrUO1suPN_1lRHrrW-EYug40clk6ywf_tjkeO-A,11730 +matplotlib/tests/test_preprocess_data.py,sha256=BmIpKQGlP5CBiJ9fQIToLKKEwj_g-zrCanUwEzSYcUI,10321 +matplotlib/tests/test_pyplot.py,sha256=H8TVYx0c4jebucHHx5JpybjPDPdsnNVhH90mmwOfjnw,2481 +matplotlib/tests/test_quiver.py,sha256=CcOtckFlLdWKfNbC6y84P_UWM3AciBWsOqHKwrx12yU,8060 +matplotlib/tests/test_rcparams.py,sha256=XuaWHNwmYmvPbHrgyXrOlNLejmPORRSRma3_HGqCzO4,19271 +matplotlib/tests/test_sankey.py,sha256=KdWhgZpEiCJ8o7yuFjnCgqFeodwKSCuuhV_mhIc04dM,310 +matplotlib/tests/test_scale.py,sha256=d0fNtLZw-cMkv6h2_0FJSwdreMY-_8pknATmntmXKzM,5659 +matplotlib/tests/test_simplification.py,sha256=_dIImjiA0VHBEbOrGQluQKCS6SQFwEVvfL9pqIn_SHI,11048 +matplotlib/tests/test_skew.py,sha256=quEWYGkr2oPbTq0cnU9uydaZakninKCX08HX2aRFwYI,6306 +matplotlib/tests/test_sphinxext.py,sha256=5rKrJNwt90aT-dSftCDG_2M6wmg91g4eK5sAQLhnYaw,1969 +matplotlib/tests/test_spines.py,sha256=5jGy42khH7ndaHRCgJONXBOvM5ZnrzOhPSR42IAUAA0,3132 +matplotlib/tests/test_streamplot.py,sha256=LW7JMfYvlwznp4bLEDp7oAK-xZN7e7O5fqEcNxOUW4U,3823 +matplotlib/tests/test_style.py,sha256=xuj6w7q49hgID0bWJp0FRKnzOVzcIhmZi3VWOMia004,5727 +matplotlib/tests/test_subplots.py,sha256=ZIivQ4SME_kNWS2I5c2g5jcQ9ZFGx1A3DfMyixECBzk,5980 +matplotlib/tests/test_table.py,sha256=SC3HPD07aUThw6RR4hohsMYVlDaqwMBE9-XV-bsgQjY,5729 +matplotlib/tests/test_testing.py,sha256=qXLI2etvF964zK9tviBk9XOQywzV5mtiNk7bVY02DG8,629 +matplotlib/tests/test_texmanager.py,sha256=EXoFPnBJ2hI2tPI1DRsP9CNzqzonzJOEQbJ4TQAoo5I,457 +matplotlib/tests/test_text.py,sha256=AQ30w8uTRFWxmxEAu_yfesPBcInHIoXkvIFrt5Jphqg,22408 +matplotlib/tests/test_ticker.py,sha256=VX3qv_PJMBXVLoXojWf2hd66wIiS53B4pSCnHDpb9uI,51441 +matplotlib/tests/test_tightlayout.py,sha256=OerbmOkNl5f12pLcHnJDSt-pm1njdUSOacuPx9I9QM0,10307 +matplotlib/tests/test_transforms.py,sha256=FqqjsCyv7wCCvlFgd18i1ZMz72_SGmB9im-6qyNOEJw,27288 +matplotlib/tests/test_triangulation.py,sha256=wXdBV3Uo0EKi0ct4JG-wct2VzjigAdx2ZDQvnOKe3gQ,45946 +matplotlib/tests/test_ttconv.py,sha256=yW3YkGWTh-h9KXcyy70dEH4ytVuhMlzl-HMsNbK_gX8,540 +matplotlib/tests/test_type1font.py,sha256=29r5f9PEPTDaWiJ8X3KMVAMPuL2zqFxcVNtnuAC5emI,2088 +matplotlib/tests/test_units.py,sha256=BmRHlRdxQYcen5QYRFc6bMM6auHp7nTMPKg644U-S0M,5715 +matplotlib/tests/test_usetex.py,sha256=wvQAFmv8zIFU1amXEtHwk4s-SFGoc3U5tyoU_yG3FOs,2899 +matplotlib/tests/test_widgets.py,sha256=Sbbk245D5hnWmdfB6Z971ouLO6GfNlbgEHlcyXbnCKY,16208 +matplotlib/texmanager.py,sha256=yK-n17gjWh1sktKXfkw_gNxJiqQMOPCmsTq_2c39AUs,15868 +matplotlib/text.py,sha256=7DFZLul_AL-XhBwIU4X6S3gF_1kKp2KLgtwOBEyd1PQ,66092 +matplotlib/textpath.py,sha256=FXZRkSYAUvRGKyOph7ftlDl7Fa8MHNgvmXDCUgy1sWA,14982 +matplotlib/ticker.py,sha256=rRB4w_4sYYuoTLb7KC7_BJHoZ1EUWSSyQ5HHTNlPX1c,104585 +matplotlib/tight_bbox.py,sha256=uSTY0z5feK2ef6OSR2MQAW0UgobRk4qhq5mNvOOxfaA,2935 +matplotlib/tight_layout.py,sha256=PMspr2TVY5y9p1LxxXyjVabwo885A6FGn3uiGteqUkk,13156 +matplotlib/transforms.py,sha256=Q-jAY54C8PMgGyajJEwZlFtUGfux_l9uexxYSFTgiow,96797 +matplotlib/tri/__init__.py,sha256=XMaejh88uov7Neu7MuYMyaNQqaxg49nXaiJfvjifrRM,256 +matplotlib/tri/__pycache__/__init__.cpython-36.pyc,, +matplotlib/tri/__pycache__/triangulation.cpython-36.pyc,, +matplotlib/tri/__pycache__/tricontour.cpython-36.pyc,, +matplotlib/tri/__pycache__/trifinder.cpython-36.pyc,, +matplotlib/tri/__pycache__/triinterpolate.cpython-36.pyc,, +matplotlib/tri/__pycache__/tripcolor.cpython-36.pyc,, +matplotlib/tri/__pycache__/triplot.cpython-36.pyc,, +matplotlib/tri/__pycache__/trirefine.cpython-36.pyc,, +matplotlib/tri/__pycache__/tritools.cpython-36.pyc,, +matplotlib/tri/triangulation.py,sha256=MLYgUszhjc4Ns8PU-nqJooYhjHkQAIhO8PoYmvmig_k,8326 +matplotlib/tri/tricontour.py,sha256=73zQDubtylXr2-Q2MQT04uH4l5Xsa8OkgKwvcUrxyEk,11314 +matplotlib/tri/trifinder.py,sha256=juA5OPBiK2v58Hm9_h2Z93X88hWMvuzbqSN-var7fFg,3468 +matplotlib/tri/triinterpolate.py,sha256=6Sct0C6ebySPm1Df_n_uZTnQYK9mX4rUHMawsRQi5q0,64375 +matplotlib/tri/tripcolor.py,sha256=tOWZYjwe16QhkzEU_zoPyATjT3y2LjXwT2_zz9S8kJ4,5007 +matplotlib/tri/triplot.py,sha256=bFMLKxPqw4GDzja0iZE6NId0QXpx8JFBkoNl0eGx9F8,2763 +matplotlib/tri/trirefine.py,sha256=82eMuX5pXcAD5gVmfmjCv9A5rlhtlFVip9ZLL25TTuw,13222 +matplotlib/tri/tritools.py,sha256=fZSYLphtOV1Gd2F07a_WQyEdQrkbfwm18NehQP6GDXo,10579 +matplotlib/ttconv.py,sha256=YHcvd5xEeHrvPBLwCmwUruWtVlY8xM6-TbA5mQpNUxE,239 +matplotlib/type1font.py,sha256=GiD06VYI-CP4jjbqshyHXdMDXztkctNjfQa-r_wZnIY,12330 +matplotlib/units.py,sha256=oD3Vz9QZuwzN9xMQRBzqxhfK-43SrR-I1rOY0mh2xYk,7275 +matplotlib/widgets.py,sha256=ocwoFqsxtADASvvtLnRYFNfC8HxjhPebNStx9aKJsJU,93210 +mpl_toolkits/axes_grid/__init__.py,sha256=VLlc0DaOkr9JumPa8W4zt9lGHp180ie8_WLPZVNSJMw,537 +mpl_toolkits/axes_grid/__pycache__/__init__.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/anchored_artists.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/angle_helper.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axes_divider.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axes_grid.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axes_rgb.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axes_size.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axis_artist.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axisline_style.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/axislines.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/clip_path.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/colorbar.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/floating_axes.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/grid_finder.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/grid_helper_curvelinear.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/inset_locator.cpython-36.pyc,, +mpl_toolkits/axes_grid/__pycache__/parasite_axes.cpython-36.pyc,, +mpl_toolkits/axes_grid/anchored_artists.py,sha256=_F6-9iacZidb5JpJ8jCOZ9PdiZaR5qpfBjf-3VjTzNc,291 +mpl_toolkits/axes_grid/angle_helper.py,sha256=Tb4Mb_NGkUdkisebe2dqfBdFmUZiSmGyUnftiSeSIls,51 +mpl_toolkits/axes_grid/axes_divider.py,sha256=tJlPia3Z8xLq6uXehBwAlD_4ywMvRTTkM73qNnCpo7Q,178 +mpl_toolkits/axes_grid/axes_grid.py,sha256=UPlVDwsze_w2aZeLaMg4WZVK3q2EvWePXTFZFvjCQz4,89 +mpl_toolkits/axes_grid/axes_rgb.py,sha256=LFo4FEXTM2E-zxE8cuYRFFzDADdLoKyzm-VNOkSX7AU,47 +mpl_toolkits/axes_grid/axes_size.py,sha256=v4Nhxe7DVp1FkKX03DqJJ1aevDanDvgKT9r0ouDzTxw,48 +mpl_toolkits/axes_grid/axis_artist.py,sha256=zUlJFUHueDsMtzLi_mK2_Wf-nSBQgiTsMOFpo_SngZ0,50 +mpl_toolkits/axes_grid/axisline_style.py,sha256=lNVHXkFWhSWPXOOfF-wlVkDPzmzuStJyJzF-NS5Wf_g,53 +mpl_toolkits/axes_grid/axislines.py,sha256=kVyhb6laiImmuNE53QTQh3kgxz0sO1mcSMpnqIdjylA,48 +mpl_toolkits/axes_grid/clip_path.py,sha256=s-d36hUiy9I9BSr9wpxjgoAACCQrczHjw072JvArNvE,48 +mpl_toolkits/axes_grid/colorbar.py,sha256=DckRf6tadLeTNjx-Zk1u3agnSGZgizDjd0Dxw1-GRdw,171 +mpl_toolkits/axes_grid/floating_axes.py,sha256=i35OfV1ZMF-DkLo4bKmzFZP6LgCwXfdDKxYlGqjyKOM,52 +mpl_toolkits/axes_grid/grid_finder.py,sha256=Y221c-Jh_AFd3Oolzvr0B1Zrz9MoXPatUABQdLsFdpw,50 +mpl_toolkits/axes_grid/grid_helper_curvelinear.py,sha256=nRl_B-755X7UpVqqdwkqc_IwiTmM48z3eOMHuvJT5HI,62 +mpl_toolkits/axes_grid/inset_locator.py,sha256=qqXlT8JWokP0kV-8NHknZDINtK-jbXfkutH_1tcRe_o,216 +mpl_toolkits/axes_grid/parasite_axes.py,sha256=rOJ8sBoR-stf1Y2PsZBDJOqk_76yN9MHHU8MYzBkOHI,438 +mpl_toolkits/axes_grid1/__init__.py,sha256=-lw0ZfG4XUpuAolCpXKFwtS3w1LJ1ZToSEC9OSmB-4Q,204 +mpl_toolkits/axes_grid1/__pycache__/__init__.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/anchored_artists.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/axes_divider.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/axes_grid.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/axes_rgb.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/axes_size.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/colorbar.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/inset_locator.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/mpl_axes.cpython-36.pyc,, +mpl_toolkits/axes_grid1/__pycache__/parasite_axes.cpython-36.pyc,, +mpl_toolkits/axes_grid1/anchored_artists.py,sha256=8vkx5HIRj6mfxuKigC0Dg_GTNmDCIt6kUbhre4vOSkg,20166 +mpl_toolkits/axes_grid1/axes_divider.py,sha256=p0FnHJdtU0TKOsY5C2vWJwmkLC8t9o_EHPlyh_Gl9a4,25804 +mpl_toolkits/axes_grid1/axes_grid.py,sha256=5-P14KAlDz2gOTDjd9FVEfVsQvBcy1VJ6xfAdzT91I8,23602 +mpl_toolkits/axes_grid1/axes_rgb.py,sha256=PhVPj1798xETTBkdW0mSOSePrx5C180PLfgOWBS6h1Y,5168 +mpl_toolkits/axes_grid1/axes_size.py,sha256=b4wos0_YJgpa8qv3ysyUuIkYgtNQEW0ryOrW3XWj0jU,7550 +mpl_toolkits/axes_grid1/colorbar.py,sha256=xOIxNLXACuBg-bOGqrTddtg-jhaNvxQPMoaRgegD0qM,27914 +mpl_toolkits/axes_grid1/inset_locator.py,sha256=Wb1n8_DB8iiOpb70lN7bef4sPh0wHFTh5AUbd77ApMA,23112 +mpl_toolkits/axes_grid1/mpl_axes.py,sha256=MJVYUN4YRtTWrq1wmyv_y61O002tiyFESmBSaJ8xkG4,4380 +mpl_toolkits/axes_grid1/parasite_axes.py,sha256=V4oZHxbTn_Dg-UShbELB5r1v2RANT99rLg3_V_yIEog,14399 +mpl_toolkits/axisartist/__init__.py,sha256=N-qlxUsIBuEcYaRNEEEzVDjHFFdIIDxJCMScIObcdNs,715 +mpl_toolkits/axisartist/__pycache__/__init__.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/angle_helper.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/axes_divider.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/axes_grid.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/axes_rgb.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/axis_artist.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/axisline_style.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/axislines.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/clip_path.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/floating_axes.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/grid_finder.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/grid_helper_curvelinear.cpython-36.pyc,, +mpl_toolkits/axisartist/__pycache__/parasite_axes.cpython-36.pyc,, +mpl_toolkits/axisartist/angle_helper.py,sha256=qTg0PVLbblshwC0iQI5aTqEODLGY35BqPP3AZIZBCkY,13213 +mpl_toolkits/axisartist/axes_divider.py,sha256=baPCBjM20SvAUeMjhvlS_cccRSM1y7ZKybtoW8upo2k,127 +mpl_toolkits/axisartist/axes_grid.py,sha256=d1gBlfPI592V5MjOOj-a5pM6RmF2LDoJpLloP7CJ-oo,347 +mpl_toolkits/axisartist/axes_rgb.py,sha256=cybzNZApLXFM_oZ922j7eBBZFW_qHTyieKCf5VKHAkM,183 +mpl_toolkits/axisartist/axis_artist.py,sha256=uyJ_Hd-EDjsYnLOnb_ZyYO_WlWuYtZhYmxBRlHyWydU,42066 +mpl_toolkits/axisartist/axisline_style.py,sha256=20_j6V4tJRTEKJAKeTsG8Oxc5O4mc7u2dNYYLrzGMEU,5039 +mpl_toolkits/axisartist/axislines.py,sha256=FekRY5Nl6MpChNekBU9kVlu1TyGT403QRAq6xoLAskk,19779 +mpl_toolkits/axisartist/clip_path.py,sha256=LE_IIP0byNr5ELJlD8_8fsAh215MUDoK19-BISuFB80,3777 +mpl_toolkits/axisartist/floating_axes.py,sha256=D2M4qAIsdfozLUMiF0gkckFC6yg5WVOGeuG_JtORdfY,12861 +mpl_toolkits/axisartist/grid_finder.py,sha256=zJP1WiMww6uMJz0lhKTBVoyPzxmLZz1klMuzf8C8jFU,10967 +mpl_toolkits/axisartist/grid_helper_curvelinear.py,sha256=ge7S2Xdr7XGizvG1D7feqeb5hXOUBP5AjKn_tz6a2E0,14258 +mpl_toolkits/axisartist/parasite_axes.py,sha256=BOUAQOChzmyvXUakE1cVjEjq5gmzMKXlQ4HfKQaMmoo,415 +mpl_toolkits/mplot3d/__init__.py,sha256=V2iPIP9VyRhoJsFWnQf5AkfyI1GSSP9H6hICEe9edJo,27 +mpl_toolkits/mplot3d/__pycache__/__init__.cpython-36.pyc,, +mpl_toolkits/mplot3d/__pycache__/art3d.cpython-36.pyc,, +mpl_toolkits/mplot3d/__pycache__/axes3d.cpython-36.pyc,, +mpl_toolkits/mplot3d/__pycache__/axis3d.cpython-36.pyc,, +mpl_toolkits/mplot3d/__pycache__/proj3d.cpython-36.pyc,, +mpl_toolkits/mplot3d/art3d.py,sha256=zlc2Yi1g2W16H27hhq1dmrqjdagSCvi0aM0ZlNHMmyY,29244 +mpl_toolkits/mplot3d/axes3d.py,sha256=06QHfxU0jFfHkGyljaAD-bpV8YbR7H0RdnyUkNo1Xew,103538 +mpl_toolkits/mplot3d/axis3d.py,sha256=eN-4LZDksVer0Et5qvcBWp_vVxPVE0b0mJtIBshgmUM,18811 +mpl_toolkits/mplot3d/proj3d.py,sha256=_hVJ_LhfXA6d9svtX8n3IWmRAFEfzKkFD-z150_L5oY,4266 +mpl_toolkits/tests/__init__.py,sha256=Ox41zElZt1Po-41lx14-gMFr9R1DEK6Amt64Hn5d6sY,365 +mpl_toolkits/tests/__pycache__/__init__.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/conftest.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axes_grid.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axes_grid1.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_angle_helper.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_axis_artist.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_axislines.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_clip_path.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_floating_axes.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_grid_finder.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_axisartist_grid_helper_curvelinear.cpython-36.pyc,, +mpl_toolkits/tests/__pycache__/test_mplot3d.cpython-36.pyc,, +mpl_toolkits/tests/conftest.py,sha256=Ph6QZKdfAnkPwU52StddC-uwtCHfANKX1dDXgtX122g,213 +mpl_toolkits/tests/test_axes_grid.py,sha256=oYXRFhdRo1MMjFxaoI3Bx-6w4lkiS8VkCKYk1DiGk0U,2451 +mpl_toolkits/tests/test_axes_grid1.py,sha256=6kdEeSooNT0Kj7oGsbP0z2CjOpvpiEMG8tcFpQZ_HFs,18091 +mpl_toolkits/tests/test_axisartist_angle_helper.py,sha256=PwhJwBm2kk4uMyhdO5arQs8IlqSX2vN0hvUzI7YHqrw,5670 +mpl_toolkits/tests/test_axisartist_axis_artist.py,sha256=N4Khx8jSxkoiMz3KvumodmFKHZUtdwtjkzxLWPSdyuw,3008 +mpl_toolkits/tests/test_axisartist_axislines.py,sha256=XRlLT8Hx0X3Pn0JnmXFP3hnnImiB1RIKt_17MyOTnyA,2442 +mpl_toolkits/tests/test_axisartist_clip_path.py,sha256=afS3nvNqCgvDpJdg_MvbwydtSWv5b6ciP-Iq2aNcNFQ,1004 +mpl_toolkits/tests/test_axisartist_floating_axes.py,sha256=xENnUpFU8EHPgnON6W1xqMVWIq8qxIzuGf1oMmSMFJo,4127 +mpl_toolkits/tests/test_axisartist_grid_finder.py,sha256=e65sLudWFIXeU08Sis3_SI1JEI6eq8YqKj-80F_Nohk,325 +mpl_toolkits/tests/test_axisartist_grid_helper_curvelinear.py,sha256=w0jJQs1uTSQEmEi4sMUkFdm6rvgD-T1TVM7Ck3p3U8E,7516 +mpl_toolkits/tests/test_mplot3d.py,sha256=Va1upO2Fj90ilRGaHx4wog6q6Ukec_HSSQRzL1HrBzQ,35996 +pylab.py,sha256=u_By3CHla-rBMg57egFXIxZ3P_J6zEkSu_dNpBcH5pw,90 diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/REQUESTED b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/WHEEL b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/WHEEL new file mode 100644 index 0000000..a5338f5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-manylinux1_x86_64 + diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/namespace_packages.txt b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/namespace_packages.txt new file mode 100644 index 0000000..ba2e3ed --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +mpl_toolkits diff --git a/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/top_level.txt b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/top_level.txt new file mode 100644 index 0000000..0eb77e4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib-3.3.3.dist-info/top_level.txt @@ -0,0 +1,3 @@ +matplotlib +mpl_toolkits +pylab diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__init__.py b/minor_project/lib/python3.6/site-packages/matplotlib/__init__.py new file mode 100644 index 0000000..1c47973 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/__init__.py @@ -0,0 +1,1483 @@ +""" +An object-oriented plotting library. + +A procedural interface is provided by the companion pyplot module, +which may be imported directly, e.g.:: + + import matplotlib.pyplot as plt + +or using ipython:: + + ipython + +at your terminal, followed by:: + + In [1]: %matplotlib + In [2]: import matplotlib.pyplot as plt + +at the ipython shell prompt. + +For the most part, direct use of the object-oriented library is encouraged when +programming; pyplot is primarily for working interactively. The exceptions are +the pyplot functions `.pyplot.figure`, `.pyplot.subplot`, `.pyplot.subplots`, +and `.pyplot.savefig`, which can greatly simplify scripting. + +Modules include: + + :mod:`matplotlib.axes` + The `~.axes.Axes` class. Most pyplot functions are wrappers for + `~.axes.Axes` methods. The axes module is the highest level of OO + access to the library. + + :mod:`matplotlib.figure` + The `.Figure` class. + + :mod:`matplotlib.artist` + The `.Artist` base class for all classes that draw things. + + :mod:`matplotlib.lines` + The `.Line2D` class for drawing lines and markers. + + :mod:`matplotlib.patches` + Classes for drawing polygons. + + :mod:`matplotlib.text` + The `.Text` and `.Annotation` classes. + + :mod:`matplotlib.image` + The `.AxesImage` and `.FigureImage` classes. + + :mod:`matplotlib.collections` + Classes for efficient drawing of groups of lines or polygons. + + :mod:`matplotlib.colors` + Color specifications and making colormaps. + + :mod:`matplotlib.cm` + Colormaps, and the `.ScalarMappable` mixin class for providing color + mapping functionality to other classes. + + :mod:`matplotlib.ticker` + Calculation of tick mark locations and formatting of tick labels. + + :mod:`matplotlib.backends` + A subpackage with modules for various GUI libraries and output formats. + +The base matplotlib namespace includes: + + `~matplotlib.rcParams` + Default configuration settings; their defaults may be overridden using + a :file:`matplotlibrc` file. + + `~matplotlib.use` + Setting the Matplotlib backend. This should be called before any + figure is created, because it is not possible to switch between + different GUI backends after that. + +Matplotlib was initially written by John D. Hunter (1968-2012) and is now +developed and maintained by a host of others. + +Occasionally the internal documentation (python docstrings) will refer +to MATLAB®, a registered trademark of The MathWorks, Inc. +""" + +import atexit +from collections import namedtuple +from collections.abc import MutableMapping +import contextlib +from distutils.version import LooseVersion +import functools +import importlib +import inspect +from inspect import Parameter +import locale +import logging +import os +from pathlib import Path +import pprint +import re +import shutil +import subprocess +import sys +import tempfile +import warnings + +# cbook must import matplotlib only within function +# definitions, so it is safe to import from it here. +from . import cbook, rcsetup +from matplotlib.cbook import MatplotlibDeprecationWarning, sanitize_sequence +from matplotlib.cbook import mplDeprecation # deprecated +from matplotlib.rcsetup import validate_backend, cycler + +import numpy + +# Get the version from the _version.py versioneer file. For a git checkout, +# this is computed based on the number of commits since the last tag. +from ._version import get_versions +__version__ = str(get_versions()['version']) +del get_versions + +_log = logging.getLogger(__name__) + +__bibtex__ = r"""@Article{Hunter:2007, + Author = {Hunter, J. D.}, + Title = {Matplotlib: A 2D graphics environment}, + Journal = {Computing in Science \& Engineering}, + Volume = {9}, + Number = {3}, + Pages = {90--95}, + abstract = {Matplotlib is a 2D graphics package used for Python + for application development, interactive scripting, and + publication-quality image generation across user + interfaces and operating systems.}, + publisher = {IEEE COMPUTER SOC}, + year = 2007 +}""" + + +@cbook.deprecated("3.2") +def compare_versions(a, b): + """Return whether version *a* is greater than or equal to version *b*.""" + if isinstance(a, bytes): + cbook.warn_deprecated( + "3.0", message="compare_versions arguments should be strs.") + a = a.decode('ascii') + if isinstance(b, bytes): + cbook.warn_deprecated( + "3.0", message="compare_versions arguments should be strs.") + b = b.decode('ascii') + if a: + return LooseVersion(a) >= LooseVersion(b) + else: + return False + + +def _check_versions(): + + # Quickfix to ensure Microsoft Visual C++ redistributable + # DLLs are loaded before importing kiwisolver + from . import ft2font + + for modname, minver in [ + ("cycler", "0.10"), + ("dateutil", "2.1"), + ("kiwisolver", "1.0.1"), + ("numpy", "1.15"), + ("pyparsing", "2.0.1"), + ]: + module = importlib.import_module(modname) + if LooseVersion(module.__version__) < minver: + raise ImportError("Matplotlib requires {}>={}; you have {}" + .format(modname, minver, module.__version__)) + + +_check_versions() + + +# The decorator ensures this always returns the same handler (and it is only +# attached once). +@functools.lru_cache() +def _ensure_handler(): + """ + The first time this function is called, attach a `StreamHandler` using the + same format as `logging.basicConfig` to the Matplotlib root logger. + + Return this handler every time this function is called. + """ + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT)) + _log.addHandler(handler) + return handler + + +def set_loglevel(level): + """ + Set Matplotlib's root logger and root logger handler level, creating + the handler if it does not exist yet. + + Typically, one should call ``set_loglevel("info")`` or + ``set_loglevel("debug")`` to get additional debugging information. + + Parameters + ---------- + level : {"notset", "debug", "info", "warning", "error", "critical"} + The log level of the handler. + + Notes + ----- + The first time this function is called, an additional handler is attached + to Matplotlib's root handler; this handler is reused every time and this + function simply manipulates the logger and handler's level. + """ + _log.setLevel(level.upper()) + _ensure_handler().setLevel(level.upper()) + + +def _logged_cached(fmt, func=None): + """ + Decorator that logs a function's return value, and memoizes that value. + + After :: + + @_logged_cached(fmt) + def func(): ... + + the first call to *func* will log its return value at the DEBUG level using + %-format string *fmt*, and memoize it; later calls to *func* will directly + return that value. + """ + if func is None: # Return the actual decorator. + return functools.partial(_logged_cached, fmt) + + called = False + ret = None + + @functools.wraps(func) + def wrapper(**kwargs): + nonlocal called, ret + if not called: + ret = func(**kwargs) + called = True + _log.debug(fmt, ret) + return ret + + return wrapper + + +_ExecInfo = namedtuple("_ExecInfo", "executable version") + + +class ExecutableNotFoundError(FileNotFoundError): + """ + Error raised when an executable that Matplotlib optionally + depends on can't be found. + """ + pass + + +@functools.lru_cache() +def _get_executable_info(name): + """ + Get the version of some executable that Matplotlib optionally depends on. + + .. warning: + The list of executables that this function supports is set according to + Matplotlib's internal needs, and may change without notice. + + Parameters + ---------- + name : str + The executable to query. The following values are currently supported: + "dvipng", "gs", "inkscape", "magick", "pdftops". This list is subject + to change without notice. + + Returns + ------- + If the executable is found, a namedtuple with fields ``executable`` (`str`) + and ``version`` (`distutils.version.LooseVersion`, or ``None`` if the + version cannot be determined). + + Raises + ------ + ExecutableNotFoundError + If the executable is not found or older than the oldest version + supported by Matplotlib. + ValueError + If the executable is not one that we know how to query. + """ + + def impl(args, regex, min_ver=None, ignore_exit_code=False): + # Execute the subprocess specified by args; capture stdout and stderr. + # Search for a regex match in the output; if the match succeeds, the + # first group of the match is the version. + # Return an _ExecInfo if the executable exists, and has a version of + # at least min_ver (if set); else, raise ExecutableNotFoundError. + try: + output = subprocess.check_output( + args, stderr=subprocess.STDOUT, + universal_newlines=True, errors="replace") + except subprocess.CalledProcessError as _cpe: + if ignore_exit_code: + output = _cpe.output + else: + raise ExecutableNotFoundError(str(_cpe)) from _cpe + except OSError as _ose: + raise ExecutableNotFoundError(str(_ose)) from _ose + match = re.search(regex, output) + if match: + version = LooseVersion(match.group(1)) + if min_ver is not None and version < min_ver: + raise ExecutableNotFoundError( + f"You have {args[0]} version {version} but the minimum " + f"version supported by Matplotlib is {min_ver}") + return _ExecInfo(args[0], version) + else: + raise ExecutableNotFoundError( + f"Failed to determine the version of {args[0]} from " + f"{' '.join(args)}, which output {output}") + + if name == "dvipng": + return impl(["dvipng", "-version"], "(?m)^dvipng(?: .*)? (.+)", "1.6") + elif name == "gs": + execs = (["gswin32c", "gswin64c", "mgs", "gs"] # "mgs" for miktex. + if sys.platform == "win32" else + ["gs"]) + for e in execs: + try: + return impl([e, "--version"], "(.*)", "9") + except ExecutableNotFoundError: + pass + message = "Failed to find a Ghostscript installation" + raise ExecutableNotFoundError(message) + elif name == "inkscape": + try: + # Try headless option first (needed for Inkscape version < 1.0): + return impl(["inkscape", "--without-gui", "-V"], + "Inkscape ([^ ]*)") + except ExecutableNotFoundError: + pass # Suppress exception chaining. + # If --without-gui is not accepted, we may be using Inkscape >= 1.0 so + # try without it: + return impl(["inkscape", "-V"], "Inkscape ([^ ]*)") + elif name == "magick": + path = None + if sys.platform == "win32": + # Check the registry to avoid confusing ImageMagick's convert with + # Windows's builtin convert.exe. + import winreg + binpath = "" + for flag in [0, winreg.KEY_WOW64_32KEY, winreg.KEY_WOW64_64KEY]: + try: + with winreg.OpenKeyEx( + winreg.HKEY_LOCAL_MACHINE, + r"Software\Imagemagick\Current", + 0, winreg.KEY_QUERY_VALUE | flag) as hkey: + binpath = winreg.QueryValueEx(hkey, "BinPath")[0] + except OSError: + pass + if binpath: + for name in ["convert.exe", "magick.exe"]: + candidate = Path(binpath, name) + if candidate.exists(): + path = str(candidate) + break + else: + path = "convert" + if path is None: + raise ExecutableNotFoundError( + "Failed to find an ImageMagick installation") + return impl([path, "--version"], r"^Version: ImageMagick (\S*)") + elif name == "pdftops": + info = impl(["pdftops", "-v"], "^pdftops version (.*)", + ignore_exit_code=True) + if info and not ("3.0" <= info.version + # poppler version numbers. + or "0.9" <= info.version <= "1.0"): + raise ExecutableNotFoundError( + f"You have pdftops version {info.version} but the minimum " + f"version supported by Matplotlib is 3.0") + return info + else: + raise ValueError("Unknown executable: {!r}".format(name)) + + +@cbook.deprecated("3.2") +def checkdep_ps_distiller(s): + if not s: + return False + try: + _get_executable_info("gs") + except ExecutableNotFoundError: + _log.warning( + "Setting rcParams['ps.usedistiller'] requires ghostscript.") + return False + if s == "xpdf": + try: + _get_executable_info("pdftops") + except ExecutableNotFoundError: + _log.warning( + "Setting rcParams['ps.usedistiller'] to 'xpdf' requires xpdf.") + return False + return s + + +def checkdep_usetex(s): + if not s: + return False + if not shutil.which("tex"): + _log.warning("usetex mode requires TeX.") + return False + try: + _get_executable_info("dvipng") + except ExecutableNotFoundError: + _log.warning("usetex mode requires dvipng.") + return False + try: + _get_executable_info("gs") + except ExecutableNotFoundError: + _log.warning("usetex mode requires ghostscript.") + return False + return True + + +@cbook.deprecated("3.2", alternative="os.path.expanduser('~')") +@_logged_cached('$HOME=%s') +def get_home(): + """ + Return the user's home directory. + + If the user's home directory cannot be found, return None. + """ + try: + return str(Path.home()) + except Exception: + return None + + +def _get_xdg_config_dir(): + """ + Return the XDG configuration directory, according to the XDG base + directory spec: + + https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + """ + return os.environ.get('XDG_CONFIG_HOME') or str(Path.home() / ".config") + + +def _get_xdg_cache_dir(): + """ + Return the XDG cache directory, according to the XDG base directory spec: + + https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + """ + return os.environ.get('XDG_CACHE_HOME') or str(Path.home() / ".cache") + + +def _get_config_or_cache_dir(xdg_base): + configdir = os.environ.get('MPLCONFIGDIR') + if configdir: + configdir = Path(configdir).resolve() + elif sys.platform.startswith(('linux', 'freebsd')) and xdg_base: + configdir = Path(xdg_base, "matplotlib") + else: + configdir = Path.home() / ".matplotlib" + try: + configdir.mkdir(parents=True, exist_ok=True) + except OSError: + pass + else: + if os.access(str(configdir), os.W_OK) and configdir.is_dir(): + return str(configdir) + # If the config or cache directory cannot be created or is not a writable + # directory, create a temporary one. + tmpdir = os.environ["MPLCONFIGDIR"] = \ + tempfile.mkdtemp(prefix="matplotlib-") + atexit.register(shutil.rmtree, tmpdir) + _log.warning( + "Matplotlib created a temporary config/cache directory at %s because " + "the default path (%s) is not a writable directory; it is highly " + "recommended to set the MPLCONFIGDIR environment variable to a " + "writable directory, in particular to speed up the import of " + "Matplotlib and to better support multiprocessing.", + tmpdir, configdir) + return tmpdir + + +@_logged_cached('CONFIGDIR=%s') +def get_configdir(): + """ + Return the string path of the the configuration directory. + + The directory is chosen as follows: + + 1. If the MPLCONFIGDIR environment variable is supplied, choose that. + 2. On Linux, follow the XDG specification and look first in + ``$XDG_CONFIG_HOME``, if defined, or ``$HOME/.config``. On other + platforms, choose ``$HOME/.matplotlib``. + 3. If the chosen directory exists and is writable, use that as the + configuration directory. + 4. Else, create a temporary directory, and use it as the configuration + directory. + """ + return _get_config_or_cache_dir(_get_xdg_config_dir()) + + +@_logged_cached('CACHEDIR=%s') +def get_cachedir(): + """ + Return the string path of the cache directory. + + The procedure used to find the directory is the same as for + _get_config_dir, except using ``$XDG_CACHE_HOME``/``$HOME/.cache`` instead. + """ + return _get_config_or_cache_dir(_get_xdg_cache_dir()) + + +@_logged_cached('matplotlib data path: %s') +def get_data_path(*, _from_rc=None): + """Return the path to Matplotlib data.""" + if _from_rc is not None: + cbook.warn_deprecated( + "3.2", + message=("Setting the datapath via matplotlibrc is deprecated " + "%(since)s and will be removed %(removal)s."), + removal='3.4') + path = Path(_from_rc) + if path.is_dir(): + return str(path) + else: + warnings.warn(f"You passed datapath: {_from_rc!r} in your " + f"matplotribrc file ({matplotlib_fname()}). " + "However this path does not exist, falling back " + "to standard paths.") + + return _get_data_path() + + +@_logged_cached('(private) matplotlib data path: %s') +def _get_data_path(): + path = Path(__file__).with_name("mpl-data") + if path.is_dir(): + return str(path) + + cbook.warn_deprecated( + "3.2", message="Matplotlib installs where the data is not in the " + "mpl-data subdirectory of the package are deprecated since %(since)s " + "and support for them will be removed %(removal)s.") + + def get_candidate_paths(): + # setuptools' namespace_packages may hijack this init file + # so need to try something known to be in Matplotlib, not basemap. + import matplotlib.afm + yield Path(matplotlib.afm.__file__).with_name('mpl-data') + # py2exe zips pure python, so still need special check. + if getattr(sys, 'frozen', None): + yield Path(sys.executable).with_name('mpl-data') + # Try again assuming we need to step up one more directory. + yield Path(sys.executable).parent.with_name('mpl-data') + # Try again assuming sys.path[0] is a dir not a exe. + yield Path(sys.path[0]) / 'mpl-data' + + for path in get_candidate_paths(): + if path.is_dir(): + defaultParams['datapath'][0] = str(path) + return str(path) + + raise RuntimeError('Could not find the matplotlib data files') + + +def matplotlib_fname(): + """ + Get the location of the config file. + + The file location is determined in the following order + + - ``$PWD/matplotlibrc`` + - ``$MATPLOTLIBRC`` if it is not a directory + - ``$MATPLOTLIBRC/matplotlibrc`` + - ``$MPLCONFIGDIR/matplotlibrc`` + - On Linux, + - ``$XDG_CONFIG_HOME/matplotlib/matplotlibrc`` (if ``$XDG_CONFIG_HOME`` + is defined) + - or ``$HOME/.config/matplotlib/matplotlibrc`` (if ``$XDG_CONFIG_HOME`` + is not defined) + - On other platforms, + - ``$HOME/.matplotlib/matplotlibrc`` if ``$HOME`` is defined + - Lastly, it looks in ``$MATPLOTLIBDATA/matplotlibrc``, which should always + exist. + """ + + def gen_candidates(): + yield os.path.join(os.getcwd(), 'matplotlibrc') + try: + matplotlibrc = os.environ['MATPLOTLIBRC'] + except KeyError: + pass + else: + yield matplotlibrc + yield os.path.join(matplotlibrc, 'matplotlibrc') + yield os.path.join(get_configdir(), 'matplotlibrc') + yield os.path.join(_get_data_path(), 'matplotlibrc') + + for fname in gen_candidates(): + if os.path.exists(fname) and not os.path.isdir(fname): + return fname + + raise RuntimeError("Could not find matplotlibrc file; your Matplotlib " + "install is broken") + + +# rcParams deprecated and automatically mapped to another key. +# Values are tuples of (version, new_name, f_old2new, f_new2old). +_deprecated_map = {} + +# rcParams deprecated; some can manually be mapped to another key. +# Values are tuples of (version, new_name_or_None). +_deprecated_ignore_map = { +} + +# rcParams deprecated; can use None to suppress warnings; remain actually +# listed in the rcParams (not included in _all_deprecated). +# Values are tuples of (version,) +_deprecated_remain_as_none = { + 'datapath': ('3.2.1',), + 'animation.avconv_path': ('3.3',), + 'animation.avconv_args': ('3.3',), + 'animation.html_args': ('3.3',), + 'mathtext.fallback_to_cm': ('3.3',), + 'keymap.all_axes': ('3.3',), + 'savefig.jpeg_quality': ('3.3',), + 'text.latex.preview': ('3.3',), +} + + +_all_deprecated = {*_deprecated_map, *_deprecated_ignore_map} + + +class RcParams(MutableMapping, dict): + """ + A dictionary object including validation. + + Validating functions are defined and associated with rc parameters in + :mod:`matplotlib.rcsetup`. + + See Also + -------- + :ref:`customizing-with-matplotlibrc-files` + """ + + validate = rcsetup._validators + + # validate values on the way in + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def __setitem__(self, key, val): + try: + if key in _deprecated_map: + version, alt_key, alt_val, inverse_alt = _deprecated_map[key] + cbook.warn_deprecated( + version, name=key, obj_type="rcparam", alternative=alt_key) + key = alt_key + val = alt_val(val) + elif key in _deprecated_remain_as_none and val is not None: + version, = _deprecated_remain_as_none[key] + cbook.warn_deprecated( + version, name=key, obj_type="rcparam") + elif key in _deprecated_ignore_map: + version, alt_key = _deprecated_ignore_map[key] + cbook.warn_deprecated( + version, name=key, obj_type="rcparam", alternative=alt_key) + return + elif key == 'backend': + if val is rcsetup._auto_backend_sentinel: + if 'backend' in self: + return + try: + cval = self.validate[key](val) + except ValueError as ve: + raise ValueError(f"Key {key}: {ve}") from None + dict.__setitem__(self, key, cval) + except KeyError as err: + raise KeyError( + f"{key} is not a valid rc parameter (see rcParams.keys() for " + f"a list of valid parameters)") from err + + def __getitem__(self, key): + if key in _deprecated_map: + version, alt_key, alt_val, inverse_alt = _deprecated_map[key] + cbook.warn_deprecated( + version, name=key, obj_type="rcparam", alternative=alt_key) + return inverse_alt(dict.__getitem__(self, alt_key)) + + elif key in _deprecated_ignore_map: + version, alt_key = _deprecated_ignore_map[key] + cbook.warn_deprecated( + version, name=key, obj_type="rcparam", alternative=alt_key) + return dict.__getitem__(self, alt_key) if alt_key else None + + elif key == "backend": + val = dict.__getitem__(self, key) + if val is rcsetup._auto_backend_sentinel: + from matplotlib import pyplot as plt + plt.switch_backend(rcsetup._auto_backend_sentinel) + + elif key == "datapath": + return get_data_path() + + return dict.__getitem__(self, key) + + def __repr__(self): + class_name = self.__class__.__name__ + indent = len(class_name) + 1 + with cbook._suppress_matplotlib_deprecation_warning(): + repr_split = pprint.pformat(dict(self), indent=1, + width=80 - indent).split('\n') + repr_indented = ('\n' + ' ' * indent).join(repr_split) + return '{}({})'.format(class_name, repr_indented) + + def __str__(self): + return '\n'.join(map('{0[0]}: {0[1]}'.format, sorted(self.items()))) + + def __iter__(self): + """Yield sorted list of keys.""" + with cbook._suppress_matplotlib_deprecation_warning(): + yield from sorted(dict.__iter__(self)) + + def __len__(self): + return dict.__len__(self) + + def find_all(self, pattern): + """ + Return the subset of this RcParams dictionary whose keys match, + using :func:`re.search`, the given ``pattern``. + + .. note:: + + Changes to the returned dictionary are *not* propagated to + the parent RcParams dictionary. + + """ + pattern_re = re.compile(pattern) + return RcParams((key, value) + for key, value in self.items() + if pattern_re.search(key)) + + def copy(self): + return {k: dict.__getitem__(self, k) for k in self} + + +def rc_params(fail_on_error=False): + """Construct a `RcParams` instance from the default Matplotlib rc file.""" + return rc_params_from_file(matplotlib_fname(), fail_on_error) + + +URL_REGEX = re.compile(r'^http://|^https://|^ftp://|^file:') + + +def is_url(filename): + """Return True if string is an http, ftp, or file URL path.""" + return URL_REGEX.match(filename) is not None + + +@functools.lru_cache() +def _get_ssl_context(): + try: + import certifi + except ImportError: + _log.debug("Could not import certifi.") + return None + import ssl + return ssl.create_default_context(cafile=certifi.where()) + + +@contextlib.contextmanager +def _open_file_or_url(fname): + if not isinstance(fname, Path) and is_url(fname): + import urllib.request + ssl_ctx = _get_ssl_context() + if ssl_ctx is None: + _log.debug( + "Could not get certifi ssl context, https may not work." + ) + with urllib.request.urlopen(fname, context=ssl_ctx) as f: + yield (line.decode('utf-8') for line in f) + else: + fname = os.path.expanduser(fname) + encoding = locale.getpreferredencoding(do_setlocale=False) + if encoding is None: + encoding = "utf-8" + with open(fname, encoding=encoding) as f: + yield f + + +def _rc_params_in_file(fname, transform=lambda x: x, fail_on_error=False): + """ + Construct a `RcParams` instance from file *fname*. + + Unlike `rc_params_from_file`, the configuration class only contains the + parameters specified in the file (i.e. default values are not filled in). + + Parameters + ---------- + fname : path-like + The loaded file. + transform : callable, default: the identity function + A function called on each individual line of the file to transform it, + before further parsing. + fail_on_error : bool, default: False + Whether invalid entries should result in an exception or a warning. + """ + rc_temp = {} + with _open_file_or_url(fname) as fd: + try: + for line_no, line in enumerate(fd, 1): + line = transform(line) + strippedline = line.split('#', 1)[0].strip() + if not strippedline: + continue + tup = strippedline.split(':', 1) + if len(tup) != 2: + _log.warning('Missing colon in file %r, line %d (%r)', + fname, line_no, line.rstrip('\n')) + continue + key, val = tup + key = key.strip() + val = val.strip() + if key in rc_temp: + _log.warning('Duplicate key in file %r, line %d (%r)', + fname, line_no, line.rstrip('\n')) + rc_temp[key] = (val, line, line_no) + except UnicodeDecodeError: + _log.warning('Cannot decode configuration file %s with encoding ' + '%s, check LANG and LC_* variables.', + fname, + locale.getpreferredencoding(do_setlocale=False) + or 'utf-8 (default)') + raise + + config = RcParams() + + for key, (val, line, line_no) in rc_temp.items(): + if key in rcsetup._validators: + if fail_on_error: + config[key] = val # try to convert to proper type or raise + else: + try: + config[key] = val # try to convert to proper type or skip + except Exception as msg: + _log.warning('Bad value in file %r, line %d (%r): %s', + fname, line_no, line.rstrip('\n'), msg) + elif key in _deprecated_ignore_map: + version, alt_key = _deprecated_ignore_map[key] + cbook.warn_deprecated( + version, name=key, alternative=alt_key, + addendum="Please update your matplotlibrc.") + else: + version = 'master' if '.post' in __version__ else f'v{__version__}' + _log.warning(""" +Bad key %(key)s in file %(fname)s, line %(line_no)s (%(line)r) +You probably need to get an updated matplotlibrc file from +https://github.com/matplotlib/matplotlib/blob/%(version)s/matplotlibrc.template +or from the matplotlib source distribution""", + dict(key=key, fname=fname, line_no=line_no, + line=line.rstrip('\n'), version=version)) + return config + + +def rc_params_from_file(fname, fail_on_error=False, use_default_template=True): + """ + Construct a `RcParams` from file *fname*. + + Parameters + ---------- + fname : str or path-like + A file with Matplotlib rc settings. + fail_on_error : bool + If True, raise an error when the parser fails to convert a parameter. + use_default_template : bool + If True, initialize with default parameters before updating with those + in the given file. If False, the configuration class only contains the + parameters specified in the file. (Useful for updating dicts.) + """ + config_from_file = _rc_params_in_file(fname, fail_on_error=fail_on_error) + + if not use_default_template: + return config_from_file + + with cbook._suppress_matplotlib_deprecation_warning(): + config = RcParams({**rcParamsDefault, **config_from_file}) + + with cbook._suppress_matplotlib_deprecation_warning(): + if config['datapath'] is None: + config['datapath'] = _get_data_path() + else: + config['datapath'] = get_data_path(_from_rc=config['datapath']) + + if "".join(config['text.latex.preamble']): + _log.info(""" +***************************************************************** +You have the following UNSUPPORTED LaTeX preamble customizations: +%s +Please do not ask for support with these customizations active. +***************************************************************** +""", '\n'.join(config['text.latex.preamble'])) + _log.debug('loaded rc file %s', fname) + + return config + + +# When constructing the global instances, we need to perform certain updates +# by explicitly calling the superclass (dict.update, dict.items) to avoid +# triggering resolution of _auto_backend_sentinel. +rcParamsDefault = _rc_params_in_file( + cbook._get_data_path("matplotlibrc"), + # Strip leading comment. + transform=lambda line: line[1:] if line.startswith("#") else line, + fail_on_error=True) +dict.update(rcParamsDefault, rcsetup._hardcoded_defaults) +rcParams = RcParams() # The global instance. +dict.update(rcParams, dict.items(rcParamsDefault)) +dict.update(rcParams, _rc_params_in_file(matplotlib_fname())) +with cbook._suppress_matplotlib_deprecation_warning(): + rcParamsOrig = RcParams(rcParams.copy()) + # This also checks that all rcParams are indeed listed in the template. + # Assiging to rcsetup.defaultParams is left only for backcompat. + defaultParams = rcsetup.defaultParams = { + # We want to resolve deprecated rcParams, but not backend... + key: [(rcsetup._auto_backend_sentinel if key == "backend" else + rcParamsDefault[key]), + validator] + for key, validator in rcsetup._validators.items()} +if rcParams['axes.formatter.use_locale']: + locale.setlocale(locale.LC_ALL, '') + + +def rc(group, **kwargs): + """ + Set the current `.rcParams`. *group* is the grouping for the rc, e.g., + for ``lines.linewidth`` the group is ``lines``, for + ``axes.facecolor``, the group is ``axes``, and so on. Group may + also be a list or tuple of group names, e.g., (*xtick*, *ytick*). + *kwargs* is a dictionary attribute name/value pairs, e.g.,:: + + rc('lines', linewidth=2, color='r') + + sets the current `.rcParams` and is equivalent to:: + + rcParams['lines.linewidth'] = 2 + rcParams['lines.color'] = 'r' + + The following aliases are available to save typing for interactive users: + + ===== ================= + Alias Property + ===== ================= + 'lw' 'linewidth' + 'ls' 'linestyle' + 'c' 'color' + 'fc' 'facecolor' + 'ec' 'edgecolor' + 'mew' 'markeredgewidth' + 'aa' 'antialiased' + ===== ================= + + Thus you could abbreviate the above call as:: + + rc('lines', lw=2, c='r') + + Note you can use python's kwargs dictionary facility to store + dictionaries of default parameters. e.g., you can customize the + font rc as follows:: + + font = {'family' : 'monospace', + 'weight' : 'bold', + 'size' : 'larger'} + rc('font', **font) # pass in the font dict as kwargs + + This enables you to easily switch between several configurations. Use + ``matplotlib.style.use('default')`` or :func:`~matplotlib.rcdefaults` to + restore the default `.rcParams` after changes. + + Notes + ----- + Similar functionality is available by using the normal dict interface, i.e. + ``rcParams.update({"lines.linewidth": 2, ...})`` (but ``rcParams.update`` + does not support abbreviations or grouping). + """ + + aliases = { + 'lw': 'linewidth', + 'ls': 'linestyle', + 'c': 'color', + 'fc': 'facecolor', + 'ec': 'edgecolor', + 'mew': 'markeredgewidth', + 'aa': 'antialiased', + } + + if isinstance(group, str): + group = (group,) + for g in group: + for k, v in kwargs.items(): + name = aliases.get(k) or k + key = '%s.%s' % (g, name) + try: + rcParams[key] = v + except KeyError as err: + raise KeyError(('Unrecognized key "%s" for group "%s" and ' + 'name "%s"') % (key, g, name)) from err + + +def rcdefaults(): + """ + Restore the `.rcParams` from Matplotlib's internal default style. + + Style-blacklisted `.rcParams` (defined in + `matplotlib.style.core.STYLE_BLACKLIST`) are not updated. + + See Also + -------- + matplotlib.rc_file_defaults + Restore the `.rcParams` from the rc file originally loaded by + Matplotlib. + matplotlib.style.use + Use a specific style file. Call ``style.use('default')`` to restore + the default style. + """ + # Deprecation warnings were already handled when creating rcParamsDefault, + # no need to reemit them here. + with cbook._suppress_matplotlib_deprecation_warning(): + from .style.core import STYLE_BLACKLIST + rcParams.clear() + rcParams.update({k: v for k, v in rcParamsDefault.items() + if k not in STYLE_BLACKLIST}) + + +def rc_file_defaults(): + """ + Restore the `.rcParams` from the original rc file loaded by Matplotlib. + + Style-blacklisted `.rcParams` (defined in + `matplotlib.style.core.STYLE_BLACKLIST`) are not updated. + """ + # Deprecation warnings were already handled when creating rcParamsOrig, no + # need to reemit them here. + with cbook._suppress_matplotlib_deprecation_warning(): + from .style.core import STYLE_BLACKLIST + rcParams.update({k: rcParamsOrig[k] for k in rcParamsOrig + if k not in STYLE_BLACKLIST}) + + +def rc_file(fname, *, use_default_template=True): + """ + Update `.rcParams` from file. + + Style-blacklisted `.rcParams` (defined in + `matplotlib.style.core.STYLE_BLACKLIST`) are not updated. + + Parameters + ---------- + fname : str or path-like + A file with Matplotlib rc settings. + + use_default_template : bool + If True, initialize with default parameters before updating with those + in the given file. If False, the current configuration persists + and only the parameters specified in the file are updated. + """ + # Deprecation warnings were already handled in rc_params_from_file, no need + # to reemit them here. + with cbook._suppress_matplotlib_deprecation_warning(): + from .style.core import STYLE_BLACKLIST + rc_from_file = rc_params_from_file( + fname, use_default_template=use_default_template) + rcParams.update({k: rc_from_file[k] for k in rc_from_file + if k not in STYLE_BLACKLIST}) + + +@contextlib.contextmanager +def rc_context(rc=None, fname=None): + """ + Return a context manager for temporarily changing rcParams. + + Parameters + ---------- + rc : dict + The rcParams to temporarily set. + fname : str or path-like + A file with Matplotlib rc settings. If both *fname* and *rc* are given, + settings from *rc* take precedence. + + See Also + -------- + :ref:`customizing-with-matplotlibrc-files` + + Examples + -------- + Passing explicit values via a dict:: + + with mpl.rc_context({'interactive': False}): + fig, ax = plt.subplots() + ax.plot(range(3), range(3)) + fig.savefig('example.png') + plt.close(fig) + + Loading settings from a file:: + + with mpl.rc_context(fname='print.rc'): + plt.plot(x, y) # uses 'print.rc' + + """ + orig = rcParams.copy() + try: + if fname: + rc_file(fname) + if rc: + rcParams.update(rc) + yield + finally: + dict.update(rcParams, orig) # Revert to the original rcs. + + +def use(backend, *, force=True): + """ + Select the backend used for rendering and GUI integration. + + Parameters + ---------- + backend : str + The backend to switch to. This can either be one of the standard + backend names, which are case-insensitive: + + - interactive backends: + GTK3Agg, GTK3Cairo, MacOSX, nbAgg, + Qt4Agg, Qt4Cairo, Qt5Agg, Qt5Cairo, + TkAgg, TkCairo, WebAgg, WX, WXAgg, WXCairo + + - non-interactive backends: + agg, cairo, pdf, pgf, ps, svg, template + + or a string of the form: ``module://my.module.name``. + + force : bool, default: True + If True (the default), raise an `ImportError` if the backend cannot be + set up (either because it fails to import, or because an incompatible + GUI interactive framework is already running); if False, ignore the + failure. + + See Also + -------- + :ref:`backends` + matplotlib.get_backend + """ + name = validate_backend(backend) + # we need to use the base-class method here to avoid (prematurely) + # resolving the "auto" backend setting + if dict.__getitem__(rcParams, 'backend') == name: + # Nothing to do if the requested backend is already set + pass + else: + # if pyplot is not already imported, do not import it. Doing + # so may trigger a `plt.switch_backend` to the _default_ backend + # before we get a chance to change to the one the user just requested + plt = sys.modules.get('matplotlib.pyplot') + # if pyplot is imported, then try to change backends + if plt is not None: + try: + # we need this import check here to re-raise if the + # user does not have the libraries to support their + # chosen backend installed. + plt.switch_backend(name) + except ImportError: + if force: + raise + # if we have not imported pyplot, then we can set the rcParam + # value which will be respected when the user finally imports + # pyplot + else: + rcParams['backend'] = backend + # if the user has asked for a given backend, do not helpfully + # fallback + rcParams['backend_fallback'] = False + + +if os.environ.get('MPLBACKEND'): + rcParams['backend'] = os.environ.get('MPLBACKEND') + + +def get_backend(): + """ + Return the name of the current backend. + + See Also + -------- + matplotlib.use + """ + return rcParams['backend'] + + +def interactive(b): + """ + Set whether to redraw after every plotting command (e.g. `.pyplot.xlabel`). + """ + rcParams['interactive'] = b + + +def is_interactive(): + """Return whether to redraw after every plotting command.""" + return rcParams['interactive'] + + +default_test_modules = [ + 'matplotlib.tests', + 'mpl_toolkits.tests', +] + + +def _init_tests(): + # The version of FreeType to install locally for running the + # tests. This must match the value in `setupext.py` + LOCAL_FREETYPE_VERSION = '2.6.1' + + from matplotlib import ft2font + if (ft2font.__freetype_version__ != LOCAL_FREETYPE_VERSION or + ft2font.__freetype_build_type__ != 'local'): + _log.warning( + f"Matplotlib is not built with the correct FreeType version to " + f"run tests. Rebuild without setting system_freetype=1 in " + f"setup.cfg. Expect many image comparison failures below. " + f"Expected freetype version {LOCAL_FREETYPE_VERSION}. " + f"Found freetype version {ft2font.__freetype_version__}. " + "Freetype build type is {}local".format( + "" if ft2font.__freetype_build_type__ == 'local' else "not ")) + + +@cbook._delete_parameter("3.2", "switch_backend_warn") +@cbook._delete_parameter("3.3", "recursionlimit") +def test(verbosity=None, coverage=False, switch_backend_warn=True, + recursionlimit=0, **kwargs): + """Run the matplotlib test suite.""" + + try: + import pytest + except ImportError: + print("matplotlib.test requires pytest to run.") + return -1 + + if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'tests')): + print("Matplotlib test data is not installed") + return -1 + + old_backend = get_backend() + old_recursionlimit = sys.getrecursionlimit() + try: + use('agg') + if recursionlimit: + sys.setrecursionlimit(recursionlimit) + + args = kwargs.pop('argv', []) + provide_default_modules = True + use_pyargs = True + for arg in args: + if any(arg.startswith(module_path) + for module_path in default_test_modules): + provide_default_modules = False + break + if os.path.exists(arg): + provide_default_modules = False + use_pyargs = False + break + if use_pyargs: + args += ['--pyargs'] + if provide_default_modules: + args += default_test_modules + + if coverage: + args += ['--cov'] + + if verbosity: + args += ['-' + 'v' * verbosity] + + retcode = pytest.main(args, **kwargs) + finally: + if old_backend.lower() != 'agg': + use(old_backend) + if recursionlimit: + sys.setrecursionlimit(old_recursionlimit) + + return retcode + + +test.__test__ = False # pytest: this function is not a test + + +def _replacer(data, value): + """ + Either returns ``data[value]`` or passes ``data`` back, converts either to + a sequence. + """ + try: + # if key isn't a string don't bother + if isinstance(value, str): + # try to use __getitem__ + value = data[value] + except Exception: + # key does not exist, silently fall back to key + pass + return sanitize_sequence(value) + + +def _label_from_arg(y, default_name): + try: + return y.name + except AttributeError: + if isinstance(default_name, str): + return default_name + return None + + +_DATA_DOC_TITLE = """ + +Notes +----- +""" + +_DATA_DOC_APPENDIX = """ + +.. note:: + In addition to the above described arguments, this function can take + a *data* keyword argument. If such a *data* argument is given, +{replaced} + + Objects passed as **data** must support item access (``data[s]``) and + membership test (``s in data``). +""" + + +def _add_data_doc(docstring, replace_names): + """ + Add documentation for a *data* field to the given docstring. + + Parameters + ---------- + docstring : str + The input docstring. + replace_names : list of str or None + The list of parameter names which arguments should be replaced by + ``data[name]`` (if ``data[name]`` does not throw an exception). If + None, replacement is attempted for all arguments. + + Returns + ------- + str + The augmented docstring. + """ + if (docstring is None + or replace_names is not None and len(replace_names) == 0): + return docstring + docstring = inspect.cleandoc(docstring) + repl = ( + (" every other argument can also be string ``s``, which is\n" + " interpreted as ``data[s]`` (unless this raises an exception).") + if replace_names is None else + (" the following arguments can also be string ``s``, which is\n" + " interpreted as ``data[s]`` (unless this raises an exception):\n" + " " + ", ".join(map("*{}*".format, replace_names))) + ".") + addendum = _DATA_DOC_APPENDIX.format(replaced=repl) + if _DATA_DOC_TITLE not in docstring: + addendum = _DATA_DOC_TITLE + addendum + return docstring + addendum + + +def _preprocess_data(func=None, *, replace_names=None, label_namer=None): + """ + A decorator to add a 'data' kwarg to a function. + + When applied:: + + @_preprocess_data() + def func(ax, *args, **kwargs): ... + + the signature is modified to ``decorated(ax, *args, data=None, **kwargs)`` + with the following behavior: + + - if called with ``data=None``, forward the other arguments to ``func``; + - otherwise, *data* must be a mapping; for any argument passed in as a + string ``name``, replace the argument by ``data[name]`` (if this does not + throw an exception), then forward the arguments to ``func``. + + In either case, any argument that is a `MappingView` is also converted to a + list. + + Parameters + ---------- + replace_names : list of str or None, default: None + The list of parameter names for which lookup into *data* should be + attempted. If None, replacement is attempted for all arguments. + label_namer : str, default: None + If set e.g. to "namer" (which must be a kwarg in the function's + signature -- not as ``**kwargs``), if the *namer* argument passed in is + a (string) key of *data* and no *label* kwarg is passed, then use the + (string) value of the *namer* as *label*. :: + + @_preprocess_data(label_namer="foo") + def func(foo, label=None): ... + + func("key", data={"key": value}) + # is equivalent to + func.__wrapped__(value, label="key") + """ + + if func is None: # Return the actual decorator. + return functools.partial( + _preprocess_data, + replace_names=replace_names, label_namer=label_namer) + + sig = inspect.signature(func) + varargs_name = None + varkwargs_name = None + arg_names = [] + params = list(sig.parameters.values()) + for p in params: + if p.kind is Parameter.VAR_POSITIONAL: + varargs_name = p.name + elif p.kind is Parameter.VAR_KEYWORD: + varkwargs_name = p.name + else: + arg_names.append(p.name) + data_param = Parameter("data", Parameter.KEYWORD_ONLY, default=None) + if varkwargs_name: + params.insert(-1, data_param) + else: + params.append(data_param) + new_sig = sig.replace(parameters=params) + arg_names = arg_names[1:] # remove the first "ax" / self arg + + assert {*arg_names}.issuperset(replace_names or []) or varkwargs_name, ( + "Matplotlib internal error: invalid replace_names ({!r}) for {!r}" + .format(replace_names, func.__name__)) + assert label_namer is None or label_namer in arg_names, ( + "Matplotlib internal error: invalid label_namer ({!r}) for {!r}" + .format(label_namer, func.__name__)) + + @functools.wraps(func) + def inner(ax, *args, data=None, **kwargs): + if data is None: + return func(ax, *map(sanitize_sequence, args), **kwargs) + + bound = new_sig.bind(ax, *args, **kwargs) + auto_label = (bound.arguments.get(label_namer) + or bound.kwargs.get(label_namer)) + + for k, v in bound.arguments.items(): + if k == varkwargs_name: + for k1, v1 in v.items(): + if replace_names is None or k1 in replace_names: + v[k1] = _replacer(data, v1) + elif k == varargs_name: + if replace_names is None: + bound.arguments[k] = tuple(_replacer(data, v1) for v1 in v) + else: + if replace_names is None or k in replace_names: + bound.arguments[k] = _replacer(data, v) + + new_args = bound.args + new_kwargs = bound.kwargs + + args_and_kwargs = {**bound.arguments, **bound.kwargs} + if label_namer and "label" not in args_and_kwargs: + new_kwargs["label"] = _label_from_arg( + args_and_kwargs.get(label_namer), auto_label) + + return func(*new_args, **new_kwargs) + + inner.__doc__ = _add_data_doc(inner.__doc__, replace_names) + inner.__signature__ = new_sig + return inner + + +_log.debug('matplotlib version %s', __version__) +_log.debug('interactive is %s', is_interactive()) +_log.debug('platform is %s', sys.platform) +_log.debug('loaded modules: %s', list(sys.modules)) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/__init__.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..5d284ed Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/__init__.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_animation_data.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_animation_data.cpython-36.pyc new file mode 100644 index 0000000..df606ca Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_animation_data.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_cm.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_cm.cpython-36.pyc new file mode 100644 index 0000000..3a5ff0f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_cm.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_cm_listed.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_cm_listed.cpython-36.pyc new file mode 100644 index 0000000..570a3b0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_cm_listed.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_color_data.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_color_data.cpython-36.pyc new file mode 100644 index 0000000..97a7731 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_color_data.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-36.pyc new file mode 100644 index 0000000..e1fd01f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_constrained_layout.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_internal_utils.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_internal_utils.cpython-36.pyc new file mode 100644 index 0000000..eea7005 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_internal_utils.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_layoutbox.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_layoutbox.cpython-36.pyc new file mode 100644 index 0000000..9de83cc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_layoutbox.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-36.pyc new file mode 100644 index 0000000..e33587e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_mathtext_data.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-36.pyc new file mode 100644 index 0000000..cf2187f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_pylab_helpers.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_text_layout.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_text_layout.cpython-36.pyc new file mode 100644 index 0000000..88f5623 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_text_layout.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_version.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_version.cpython-36.pyc new file mode 100644 index 0000000..4bc53e6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/_version.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/afm.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/afm.cpython-36.pyc new file mode 100644 index 0000000..6bb618b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/afm.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/animation.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/animation.cpython-36.pyc new file mode 100644 index 0000000..2366bab Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/animation.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/artist.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/artist.cpython-36.pyc new file mode 100644 index 0000000..ec0fe1d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/artist.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/axis.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/axis.cpython-36.pyc new file mode 100644 index 0000000..a06ea7b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/axis.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_bases.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_bases.cpython-36.pyc new file mode 100644 index 0000000..8f62496 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_bases.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_managers.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_managers.cpython-36.pyc new file mode 100644 index 0000000..663dccb Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_managers.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_tools.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_tools.cpython-36.pyc new file mode 100644 index 0000000..ef4a61e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/backend_tools.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/bezier.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/bezier.cpython-36.pyc new file mode 100644 index 0000000..9be0996 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/bezier.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/blocking_input.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/blocking_input.cpython-36.pyc new file mode 100644 index 0000000..5753634 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/blocking_input.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/category.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/category.cpython-36.pyc new file mode 100644 index 0000000..d9f9a2a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/category.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/cm.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/cm.cpython-36.pyc new file mode 100644 index 0000000..2574eca Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/cm.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/collections.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/collections.cpython-36.pyc new file mode 100644 index 0000000..9b3137a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/collections.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/colorbar.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/colorbar.cpython-36.pyc new file mode 100644 index 0000000..e824236 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/colorbar.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/colors.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/colors.cpython-36.pyc new file mode 100644 index 0000000..d0157a1 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/colors.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/container.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/container.cpython-36.pyc new file mode 100644 index 0000000..cb3a2bc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/container.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/contour.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/contour.cpython-36.pyc new file mode 100644 index 0000000..d6c82df Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/contour.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/dates.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/dates.cpython-36.pyc new file mode 100644 index 0000000..535a8fa Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/dates.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/docstring.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/docstring.cpython-36.pyc new file mode 100644 index 0000000..dad2f9b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/docstring.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/dviread.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/dviread.cpython-36.pyc new file mode 100644 index 0000000..42964f0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/dviread.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/figure.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/figure.cpython-36.pyc new file mode 100644 index 0000000..ceec0db Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/figure.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/font_manager.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/font_manager.cpython-36.pyc new file mode 100644 index 0000000..e76f5c6 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/font_manager.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/fontconfig_pattern.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/fontconfig_pattern.cpython-36.pyc new file mode 100644 index 0000000..70d7da4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/fontconfig_pattern.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/gridspec.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/gridspec.cpython-36.pyc new file mode 100644 index 0000000..42c258b Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/gridspec.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/hatch.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/hatch.cpython-36.pyc new file mode 100644 index 0000000..b76830d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/hatch.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/image.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/image.cpython-36.pyc new file mode 100644 index 0000000..6c217ef Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/image.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/legend.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/legend.cpython-36.pyc new file mode 100644 index 0000000..7ac7218 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/legend.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/legend_handler.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/legend_handler.cpython-36.pyc new file mode 100644 index 0000000..54bb781 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/legend_handler.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/lines.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/lines.cpython-36.pyc new file mode 100644 index 0000000..ea2c0e4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/lines.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/markers.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/markers.cpython-36.pyc new file mode 100644 index 0000000..035c105 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/markers.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/mathtext.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/mathtext.cpython-36.pyc new file mode 100644 index 0000000..daeb95e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/mathtext.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/mlab.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/mlab.cpython-36.pyc new file mode 100644 index 0000000..081b94d Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/mlab.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/offsetbox.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/offsetbox.cpython-36.pyc new file mode 100644 index 0000000..7ed0a01 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/offsetbox.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/patches.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/patches.cpython-36.pyc new file mode 100644 index 0000000..515fe9e Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/patches.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/path.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/path.cpython-36.pyc new file mode 100644 index 0000000..14689fc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/path.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/patheffects.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/patheffects.cpython-36.pyc new file mode 100644 index 0000000..f0ad953 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/patheffects.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/pylab.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/pylab.cpython-36.pyc new file mode 100644 index 0000000..7a50624 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/pylab.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/pyplot.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/pyplot.cpython-36.pyc new file mode 100644 index 0000000..2fc594f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/pyplot.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/quiver.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/quiver.cpython-36.pyc new file mode 100644 index 0000000..0e5c9fe Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/quiver.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/rcsetup.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/rcsetup.cpython-36.pyc new file mode 100644 index 0000000..1291656 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/rcsetup.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/sankey.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/sankey.cpython-36.pyc new file mode 100644 index 0000000..10a8f2f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/sankey.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/scale.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/scale.cpython-36.pyc new file mode 100644 index 0000000..f1daafd Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/scale.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/spines.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/spines.cpython-36.pyc new file mode 100644 index 0000000..bf35a33 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/spines.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/stackplot.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/stackplot.cpython-36.pyc new file mode 100644 index 0000000..dab5b54 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/stackplot.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/streamplot.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/streamplot.cpython-36.pyc new file mode 100644 index 0000000..1988b58 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/streamplot.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/table.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/table.cpython-36.pyc new file mode 100644 index 0000000..6722f4f Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/table.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/texmanager.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/texmanager.cpython-36.pyc new file mode 100644 index 0000000..9c16144 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/texmanager.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/text.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/text.cpython-36.pyc new file mode 100644 index 0000000..6874610 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/text.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/textpath.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/textpath.cpython-36.pyc new file mode 100644 index 0000000..614c651 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/textpath.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/ticker.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/ticker.cpython-36.pyc new file mode 100644 index 0000000..5e7df84 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/ticker.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/tight_bbox.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/tight_bbox.cpython-36.pyc new file mode 100644 index 0000000..33cab03 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/tight_bbox.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/tight_layout.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/tight_layout.cpython-36.pyc new file mode 100644 index 0000000..dd336b4 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/tight_layout.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/transforms.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/transforms.cpython-36.pyc new file mode 100644 index 0000000..4e8eebc Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/transforms.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/ttconv.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/ttconv.cpython-36.pyc new file mode 100644 index 0000000..f284d06 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/ttconv.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/type1font.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/type1font.cpython-36.pyc new file mode 100644 index 0000000..af1f1f8 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/type1font.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/units.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/units.cpython-36.pyc new file mode 100644 index 0000000..e476128 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/units.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/widgets.cpython-36.pyc b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/widgets.cpython-36.pyc new file mode 100644 index 0000000..fcedf7a Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/__pycache__/widgets.cpython-36.pyc differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_animation_data.py b/minor_project/lib/python3.6/site-packages/matplotlib/_animation_data.py new file mode 100644 index 0000000..b7d3acc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_animation_data.py @@ -0,0 +1,262 @@ +# Javascript template for HTMLWriter +JS_INCLUDE = """ + + +""" + + +# Style definitions for the HTML template +STYLE_INCLUDE = """ + +""" + + +# HTML template for HTMLWriter +DISPLAY_TEMPLATE = """ +
+ +
+ +
+ + + + + + + + + +
+
+ + + + + + +
+
+
+ + + +""" + + +INCLUDED_FRAMES = """ + for (var i=0; i<{Nframes}; i++){{ + frames[i] = "{frame_dir}/frame" + ("0000000" + i).slice(-7) + + ".{frame_format}"; + }} +""" diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_cm.py b/minor_project/lib/python3.6/site-packages/matplotlib/_cm.py new file mode 100644 index 0000000..f51b759 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_cm.py @@ -0,0 +1,1434 @@ +""" +Nothing here but dictionaries for generating LinearSegmentedColormaps, +and a dictionary of these dictionaries. + +Documentation for each is in pyplot.colormaps(). Please update this +with the purpose and type of your colormap if you add data for one here. +""" + +from functools import partial + +import numpy as np + +_binary_data = { + 'red': ((0., 1., 1.), (1., 0., 0.)), + 'green': ((0., 1., 1.), (1., 0., 0.)), + 'blue': ((0., 1., 1.), (1., 0., 0.)) + } + +_autumn_data = {'red': ((0., 1.0, 1.0), (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), (1.0, 1.0, 1.0)), + 'blue': ((0., 0., 0.), (1.0, 0., 0.))} + +_bone_data = {'red': ((0., 0., 0.), + (0.746032, 0.652778, 0.652778), + (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), + (0.365079, 0.319444, 0.319444), + (0.746032, 0.777778, 0.777778), + (1.0, 1.0, 1.0)), + 'blue': ((0., 0., 0.), + (0.365079, 0.444444, 0.444444), + (1.0, 1.0, 1.0))} + +_cool_data = {'red': ((0., 0., 0.), (1.0, 1.0, 1.0)), + 'green': ((0., 1., 1.), (1.0, 0., 0.)), + 'blue': ((0., 1., 1.), (1.0, 1., 1.))} + +_copper_data = {'red': ((0., 0., 0.), + (0.809524, 1.000000, 1.000000), + (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), + (1.0, 0.7812, 0.7812)), + 'blue': ((0., 0., 0.), + (1.0, 0.4975, 0.4975))} + +def _flag_red(x): return 0.75 * np.sin((x * 31.5 + 0.25) * np.pi) + 0.5 +def _flag_green(x): return np.sin(x * 31.5 * np.pi) +def _flag_blue(x): return 0.75 * np.sin((x * 31.5 - 0.25) * np.pi) + 0.5 +_flag_data = {'red': _flag_red, 'green': _flag_green, 'blue': _flag_blue} + +def _prism_red(x): return 0.75 * np.sin((x * 20.9 + 0.25) * np.pi) + 0.67 +def _prism_green(x): return 0.75 * np.sin((x * 20.9 - 0.25) * np.pi) + 0.33 +def _prism_blue(x): return -1.1 * np.sin((x * 20.9) * np.pi) +_prism_data = {'red': _prism_red, 'green': _prism_green, 'blue': _prism_blue} + +def _ch_helper(gamma, s, r, h, p0, p1, x): + """Helper function for generating picklable cubehelix color maps.""" + # Apply gamma factor to emphasise low or high intensity values + xg = x ** gamma + # Calculate amplitude and angle of deviation from the black to white + # diagonal in the plane of constant perceived intensity. + a = h * xg * (1 - xg) / 2 + phi = 2 * np.pi * (s / 3 + r * x) + return xg + a * (p0 * np.cos(phi) + p1 * np.sin(phi)) + +def cubehelix(gamma=1.0, s=0.5, r=-1.5, h=1.0): + """ + Return custom data dictionary of (r, g, b) conversion functions, which can + be used with :func:`register_cmap`, for the cubehelix color scheme. + + Unlike most other color schemes cubehelix was designed by D.A. Green to + be monotonically increasing in terms of perceived brightness. + Also, when printed on a black and white postscript printer, the scheme + results in a greyscale with monotonically increasing brightness. + This color scheme is named cubehelix because the (r, g, b) values produced + can be visualised as a squashed helix around the diagonal in the + (r, g, b) color cube. + + For a unit color cube (i.e. 3-D coordinates for (r, g, b) each in the + range 0 to 1) the color scheme starts at (r, g, b) = (0, 0, 0), i.e. black, + and finishes at (r, g, b) = (1, 1, 1), i.e. white. For some fraction *x*, + between 0 and 1, the color is the corresponding grey value at that + fraction along the black to white diagonal (x, x, x) plus a color + element. This color element is calculated in a plane of constant + perceived intensity and controlled by the following parameters. + + Parameters + ---------- + gamma : float, default: 1 + Gamma factor emphasizing either low intensity values (gamma < 1), or + high intensity values (gamma > 1). + s : float, default: 0.5 (purple) + The starting color. + r : float, default: -1.5 + The number of r, g, b rotations in color that are made from the start + to the end of the color scheme. The default of -1.5 corresponds to -> + B -> G -> R -> B. + h : float, default: 1 + The hue, i.e. how saturated the colors are. If this parameter is zero + then the color scheme is purely a greyscale. + """ + return {'red': partial(_ch_helper, gamma, s, r, h, -0.14861, 1.78277), + 'green': partial(_ch_helper, gamma, s, r, h, -0.29227, -0.90649), + 'blue': partial(_ch_helper, gamma, s, r, h, 1.97294, 0.0)} + +_cubehelix_data = cubehelix() + +_bwr_data = ((0.0, 0.0, 1.0), (1.0, 1.0, 1.0), (1.0, 0.0, 0.0)) +_brg_data = ((0.0, 0.0, 1.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0)) + +# Gnuplot palette functions +def _g0(x): return 0 +def _g1(x): return 0.5 +def _g2(x): return 1 +def _g3(x): return x +def _g4(x): return x ** 2 +def _g5(x): return x ** 3 +def _g6(x): return x ** 4 +def _g7(x): return np.sqrt(x) +def _g8(x): return np.sqrt(np.sqrt(x)) +def _g9(x): return np.sin(x * np.pi / 2) +def _g10(x): return np.cos(x * np.pi / 2) +def _g11(x): return np.abs(x - 0.5) +def _g12(x): return (2 * x - 1) ** 2 +def _g13(x): return np.sin(x * np.pi) +def _g14(x): return np.abs(np.cos(x * np.pi)) +def _g15(x): return np.sin(x * 2 * np.pi) +def _g16(x): return np.cos(x * 2 * np.pi) +def _g17(x): return np.abs(np.sin(x * 2 * np.pi)) +def _g18(x): return np.abs(np.cos(x * 2 * np.pi)) +def _g19(x): return np.abs(np.sin(x * 4 * np.pi)) +def _g20(x): return np.abs(np.cos(x * 4 * np.pi)) +def _g21(x): return 3 * x +def _g22(x): return 3 * x - 1 +def _g23(x): return 3 * x - 2 +def _g24(x): return np.abs(3 * x - 1) +def _g25(x): return np.abs(3 * x - 2) +def _g26(x): return (3 * x - 1) / 2 +def _g27(x): return (3 * x - 2) / 2 +def _g28(x): return np.abs((3 * x - 1) / 2) +def _g29(x): return np.abs((3 * x - 2) / 2) +def _g30(x): return x / 0.32 - 0.78125 +def _g31(x): return 2 * x - 0.84 +def _g32(x): + ret = np.zeros(len(x)) + m = (x < 0.25) + ret[m] = 4 * x[m] + m = (x >= 0.25) & (x < 0.92) + ret[m] = -2 * x[m] + 1.84 + m = (x >= 0.92) + ret[m] = x[m] / 0.08 - 11.5 + return ret +def _g33(x): return np.abs(2 * x - 0.5) +def _g34(x): return 2 * x +def _g35(x): return 2 * x - 0.5 +def _g36(x): return 2 * x - 1 + +gfunc = {i: globals()["_g{}".format(i)] for i in range(37)} + +_gnuplot_data = { + 'red': gfunc[7], + 'green': gfunc[5], + 'blue': gfunc[15], +} + +_gnuplot2_data = { + 'red': gfunc[30], + 'green': gfunc[31], + 'blue': gfunc[32], +} + +_ocean_data = { + 'red': gfunc[23], + 'green': gfunc[28], + 'blue': gfunc[3], +} + +_afmhot_data = { + 'red': gfunc[34], + 'green': gfunc[35], + 'blue': gfunc[36], +} + +_rainbow_data = { + 'red': gfunc[33], + 'green': gfunc[13], + 'blue': gfunc[10], +} + +_seismic_data = ( + (0.0, 0.0, 0.3), (0.0, 0.0, 1.0), + (1.0, 1.0, 1.0), (1.0, 0.0, 0.0), + (0.5, 0.0, 0.0)) + +_terrain_data = ( + (0.00, (0.2, 0.2, 0.6)), + (0.15, (0.0, 0.6, 1.0)), + (0.25, (0.0, 0.8, 0.4)), + (0.50, (1.0, 1.0, 0.6)), + (0.75, (0.5, 0.36, 0.33)), + (1.00, (1.0, 1.0, 1.0))) + +_gray_data = {'red': ((0., 0, 0), (1., 1, 1)), + 'green': ((0., 0, 0), (1., 1, 1)), + 'blue': ((0., 0, 0), (1., 1, 1))} + +_hot_data = {'red': ((0., 0.0416, 0.0416), + (0.365079, 1.000000, 1.000000), + (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), + (0.365079, 0.000000, 0.000000), + (0.746032, 1.000000, 1.000000), + (1.0, 1.0, 1.0)), + 'blue': ((0., 0., 0.), + (0.746032, 0.000000, 0.000000), + (1.0, 1.0, 1.0))} + +_hsv_data = {'red': ((0., 1., 1.), + (0.158730, 1.000000, 1.000000), + (0.174603, 0.968750, 0.968750), + (0.333333, 0.031250, 0.031250), + (0.349206, 0.000000, 0.000000), + (0.666667, 0.000000, 0.000000), + (0.682540, 0.031250, 0.031250), + (0.841270, 0.968750, 0.968750), + (0.857143, 1.000000, 1.000000), + (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), + (0.158730, 0.937500, 0.937500), + (0.174603, 1.000000, 1.000000), + (0.507937, 1.000000, 1.000000), + (0.666667, 0.062500, 0.062500), + (0.682540, 0.000000, 0.000000), + (1.0, 0., 0.)), + 'blue': ((0., 0., 0.), + (0.333333, 0.000000, 0.000000), + (0.349206, 0.062500, 0.062500), + (0.507937, 1.000000, 1.000000), + (0.841270, 1.000000, 1.000000), + (0.857143, 0.937500, 0.937500), + (1.0, 0.09375, 0.09375))} + +_jet_data = {'red': ((0.00, 0, 0), + (0.35, 0, 0), + (0.66, 1, 1), + (0.89, 1, 1), + (1.00, 0.5, 0.5)), + 'green': ((0.000, 0, 0), + (0.125, 0, 0), + (0.375, 1, 1), + (0.640, 1, 1), + (0.910, 0, 0), + (1.000, 0, 0)), + 'blue': ((0.00, 0.5, 0.5), + (0.11, 1, 1), + (0.34, 1, 1), + (0.65, 0, 0), + (1.00, 0, 0))} + +_pink_data = {'red': ((0., 0.1178, 0.1178), (0.015873, 0.195857, 0.195857), + (0.031746, 0.250661, 0.250661), + (0.047619, 0.295468, 0.295468), + (0.063492, 0.334324, 0.334324), + (0.079365, 0.369112, 0.369112), + (0.095238, 0.400892, 0.400892), + (0.111111, 0.430331, 0.430331), + (0.126984, 0.457882, 0.457882), + (0.142857, 0.483867, 0.483867), + (0.158730, 0.508525, 0.508525), + (0.174603, 0.532042, 0.532042), + (0.190476, 0.554563, 0.554563), + (0.206349, 0.576204, 0.576204), + (0.222222, 0.597061, 0.597061), + (0.238095, 0.617213, 0.617213), + (0.253968, 0.636729, 0.636729), + (0.269841, 0.655663, 0.655663), + (0.285714, 0.674066, 0.674066), + (0.301587, 0.691980, 0.691980), + (0.317460, 0.709441, 0.709441), + (0.333333, 0.726483, 0.726483), + (0.349206, 0.743134, 0.743134), + (0.365079, 0.759421, 0.759421), + (0.380952, 0.766356, 0.766356), + (0.396825, 0.773229, 0.773229), + (0.412698, 0.780042, 0.780042), + (0.428571, 0.786796, 0.786796), + (0.444444, 0.793492, 0.793492), + (0.460317, 0.800132, 0.800132), + (0.476190, 0.806718, 0.806718), + (0.492063, 0.813250, 0.813250), + (0.507937, 0.819730, 0.819730), + (0.523810, 0.826160, 0.826160), + (0.539683, 0.832539, 0.832539), + (0.555556, 0.838870, 0.838870), + (0.571429, 0.845154, 0.845154), + (0.587302, 0.851392, 0.851392), + (0.603175, 0.857584, 0.857584), + (0.619048, 0.863731, 0.863731), + (0.634921, 0.869835, 0.869835), + (0.650794, 0.875897, 0.875897), + (0.666667, 0.881917, 0.881917), + (0.682540, 0.887896, 0.887896), + (0.698413, 0.893835, 0.893835), + (0.714286, 0.899735, 0.899735), + (0.730159, 0.905597, 0.905597), + (0.746032, 0.911421, 0.911421), + (0.761905, 0.917208, 0.917208), + (0.777778, 0.922958, 0.922958), + (0.793651, 0.928673, 0.928673), + (0.809524, 0.934353, 0.934353), + (0.825397, 0.939999, 0.939999), + (0.841270, 0.945611, 0.945611), + (0.857143, 0.951190, 0.951190), + (0.873016, 0.956736, 0.956736), + (0.888889, 0.962250, 0.962250), + (0.904762, 0.967733, 0.967733), + (0.920635, 0.973185, 0.973185), + (0.936508, 0.978607, 0.978607), + (0.952381, 0.983999, 0.983999), + (0.968254, 0.989361, 0.989361), + (0.984127, 0.994695, 0.994695), (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), (0.015873, 0.102869, 0.102869), + (0.031746, 0.145479, 0.145479), + (0.047619, 0.178174, 0.178174), + (0.063492, 0.205738, 0.205738), + (0.079365, 0.230022, 0.230022), + (0.095238, 0.251976, 0.251976), + (0.111111, 0.272166, 0.272166), + (0.126984, 0.290957, 0.290957), + (0.142857, 0.308607, 0.308607), + (0.158730, 0.325300, 0.325300), + (0.174603, 0.341178, 0.341178), + (0.190476, 0.356348, 0.356348), + (0.206349, 0.370899, 0.370899), + (0.222222, 0.384900, 0.384900), + (0.238095, 0.398410, 0.398410), + (0.253968, 0.411476, 0.411476), + (0.269841, 0.424139, 0.424139), + (0.285714, 0.436436, 0.436436), + (0.301587, 0.448395, 0.448395), + (0.317460, 0.460044, 0.460044), + (0.333333, 0.471405, 0.471405), + (0.349206, 0.482498, 0.482498), + (0.365079, 0.493342, 0.493342), + (0.380952, 0.517549, 0.517549), + (0.396825, 0.540674, 0.540674), + (0.412698, 0.562849, 0.562849), + (0.428571, 0.584183, 0.584183), + (0.444444, 0.604765, 0.604765), + (0.460317, 0.624669, 0.624669), + (0.476190, 0.643958, 0.643958), + (0.492063, 0.662687, 0.662687), + (0.507937, 0.680900, 0.680900), + (0.523810, 0.698638, 0.698638), + (0.539683, 0.715937, 0.715937), + (0.555556, 0.732828, 0.732828), + (0.571429, 0.749338, 0.749338), + (0.587302, 0.765493, 0.765493), + (0.603175, 0.781313, 0.781313), + (0.619048, 0.796819, 0.796819), + (0.634921, 0.812029, 0.812029), + (0.650794, 0.826960, 0.826960), + (0.666667, 0.841625, 0.841625), + (0.682540, 0.856040, 0.856040), + (0.698413, 0.870216, 0.870216), + (0.714286, 0.884164, 0.884164), + (0.730159, 0.897896, 0.897896), + (0.746032, 0.911421, 0.911421), + (0.761905, 0.917208, 0.917208), + (0.777778, 0.922958, 0.922958), + (0.793651, 0.928673, 0.928673), + (0.809524, 0.934353, 0.934353), + (0.825397, 0.939999, 0.939999), + (0.841270, 0.945611, 0.945611), + (0.857143, 0.951190, 0.951190), + (0.873016, 0.956736, 0.956736), + (0.888889, 0.962250, 0.962250), + (0.904762, 0.967733, 0.967733), + (0.920635, 0.973185, 0.973185), + (0.936508, 0.978607, 0.978607), + (0.952381, 0.983999, 0.983999), + (0.968254, 0.989361, 0.989361), + (0.984127, 0.994695, 0.994695), (1.0, 1.0, 1.0)), + 'blue': ((0., 0., 0.), (0.015873, 0.102869, 0.102869), + (0.031746, 0.145479, 0.145479), + (0.047619, 0.178174, 0.178174), + (0.063492, 0.205738, 0.205738), + (0.079365, 0.230022, 0.230022), + (0.095238, 0.251976, 0.251976), + (0.111111, 0.272166, 0.272166), + (0.126984, 0.290957, 0.290957), + (0.142857, 0.308607, 0.308607), + (0.158730, 0.325300, 0.325300), + (0.174603, 0.341178, 0.341178), + (0.190476, 0.356348, 0.356348), + (0.206349, 0.370899, 0.370899), + (0.222222, 0.384900, 0.384900), + (0.238095, 0.398410, 0.398410), + (0.253968, 0.411476, 0.411476), + (0.269841, 0.424139, 0.424139), + (0.285714, 0.436436, 0.436436), + (0.301587, 0.448395, 0.448395), + (0.317460, 0.460044, 0.460044), + (0.333333, 0.471405, 0.471405), + (0.349206, 0.482498, 0.482498), + (0.365079, 0.493342, 0.493342), + (0.380952, 0.503953, 0.503953), + (0.396825, 0.514344, 0.514344), + (0.412698, 0.524531, 0.524531), + (0.428571, 0.534522, 0.534522), + (0.444444, 0.544331, 0.544331), + (0.460317, 0.553966, 0.553966), + (0.476190, 0.563436, 0.563436), + (0.492063, 0.572750, 0.572750), + (0.507937, 0.581914, 0.581914), + (0.523810, 0.590937, 0.590937), + (0.539683, 0.599824, 0.599824), + (0.555556, 0.608581, 0.608581), + (0.571429, 0.617213, 0.617213), + (0.587302, 0.625727, 0.625727), + (0.603175, 0.634126, 0.634126), + (0.619048, 0.642416, 0.642416), + (0.634921, 0.650600, 0.650600), + (0.650794, 0.658682, 0.658682), + (0.666667, 0.666667, 0.666667), + (0.682540, 0.674556, 0.674556), + (0.698413, 0.682355, 0.682355), + (0.714286, 0.690066, 0.690066), + (0.730159, 0.697691, 0.697691), + (0.746032, 0.705234, 0.705234), + (0.761905, 0.727166, 0.727166), + (0.777778, 0.748455, 0.748455), + (0.793651, 0.769156, 0.769156), + (0.809524, 0.789314, 0.789314), + (0.825397, 0.808969, 0.808969), + (0.841270, 0.828159, 0.828159), + (0.857143, 0.846913, 0.846913), + (0.873016, 0.865261, 0.865261), + (0.888889, 0.883229, 0.883229), + (0.904762, 0.900837, 0.900837), + (0.920635, 0.918109, 0.918109), + (0.936508, 0.935061, 0.935061), + (0.952381, 0.951711, 0.951711), + (0.968254, 0.968075, 0.968075), + (0.984127, 0.984167, 0.984167), (1.0, 1.0, 1.0))} + +_spring_data = {'red': ((0., 1., 1.), (1.0, 1.0, 1.0)), + 'green': ((0., 0., 0.), (1.0, 1.0, 1.0)), + 'blue': ((0., 1., 1.), (1.0, 0.0, 0.0))} + + +_summer_data = {'red': ((0., 0., 0.), (1.0, 1.0, 1.0)), + 'green': ((0., 0.5, 0.5), (1.0, 1.0, 1.0)), + 'blue': ((0., 0.4, 0.4), (1.0, 0.4, 0.4))} + + +_winter_data = {'red': ((0., 0., 0.), (1.0, 0.0, 0.0)), + 'green': ((0., 0., 0.), (1.0, 1.0, 1.0)), + 'blue': ((0., 1., 1.), (1.0, 0.5, 0.5))} + +_nipy_spectral_data = { + 'red': [(0.0, 0.0, 0.0), (0.05, 0.4667, 0.4667), + (0.10, 0.5333, 0.5333), (0.15, 0.0, 0.0), + (0.20, 0.0, 0.0), (0.25, 0.0, 0.0), + (0.30, 0.0, 0.0), (0.35, 0.0, 0.0), + (0.40, 0.0, 0.0), (0.45, 0.0, 0.0), + (0.50, 0.0, 0.0), (0.55, 0.0, 0.0), + (0.60, 0.0, 0.0), (0.65, 0.7333, 0.7333), + (0.70, 0.9333, 0.9333), (0.75, 1.0, 1.0), + (0.80, 1.0, 1.0), (0.85, 1.0, 1.0), + (0.90, 0.8667, 0.8667), (0.95, 0.80, 0.80), + (1.0, 0.80, 0.80)], + 'green': [(0.0, 0.0, 0.0), (0.05, 0.0, 0.0), + (0.10, 0.0, 0.0), (0.15, 0.0, 0.0), + (0.20, 0.0, 0.0), (0.25, 0.4667, 0.4667), + (0.30, 0.6000, 0.6000), (0.35, 0.6667, 0.6667), + (0.40, 0.6667, 0.6667), (0.45, 0.6000, 0.6000), + (0.50, 0.7333, 0.7333), (0.55, 0.8667, 0.8667), + (0.60, 1.0, 1.0), (0.65, 1.0, 1.0), + (0.70, 0.9333, 0.9333), (0.75, 0.8000, 0.8000), + (0.80, 0.6000, 0.6000), (0.85, 0.0, 0.0), + (0.90, 0.0, 0.0), (0.95, 0.0, 0.0), + (1.0, 0.80, 0.80)], + 'blue': [(0.0, 0.0, 0.0), (0.05, 0.5333, 0.5333), + (0.10, 0.6000, 0.6000), (0.15, 0.6667, 0.6667), + (0.20, 0.8667, 0.8667), (0.25, 0.8667, 0.8667), + (0.30, 0.8667, 0.8667), (0.35, 0.6667, 0.6667), + (0.40, 0.5333, 0.5333), (0.45, 0.0, 0.0), + (0.5, 0.0, 0.0), (0.55, 0.0, 0.0), + (0.60, 0.0, 0.0), (0.65, 0.0, 0.0), + (0.70, 0.0, 0.0), (0.75, 0.0, 0.0), + (0.80, 0.0, 0.0), (0.85, 0.0, 0.0), + (0.90, 0.0, 0.0), (0.95, 0.0, 0.0), + (1.0, 0.80, 0.80)], +} + + +# 34 colormaps based on color specifications and designs +# developed by Cynthia Brewer (http://colorbrewer.org). +# The ColorBrewer palettes have been included under the terms +# of an Apache-stype license (for details, see the file +# LICENSE_COLORBREWER in the license directory of the matplotlib +# source distribution). + +# RGB values taken from Brewer's Excel sheet, divided by 255 + +_Blues_data = ( + (0.96862745098039216, 0.98431372549019602, 1.0 ), + (0.87058823529411766, 0.92156862745098034, 0.96862745098039216), + (0.77647058823529413, 0.85882352941176465, 0.93725490196078431), + (0.61960784313725492, 0.792156862745098 , 0.88235294117647056), + (0.41960784313725491, 0.68235294117647061, 0.83921568627450982), + (0.25882352941176473, 0.5725490196078431 , 0.77647058823529413), + (0.12941176470588237, 0.44313725490196076, 0.70980392156862748), + (0.03137254901960784, 0.31764705882352939, 0.61176470588235299), + (0.03137254901960784, 0.18823529411764706, 0.41960784313725491) + ) + +_BrBG_data = ( + (0.32941176470588235, 0.18823529411764706, 0.0196078431372549 ), + (0.5490196078431373 , 0.31764705882352939, 0.0392156862745098 ), + (0.74901960784313726, 0.50588235294117645, 0.17647058823529413), + (0.87450980392156863, 0.76078431372549016, 0.49019607843137253), + (0.96470588235294119, 0.90980392156862744, 0.76470588235294112), + (0.96078431372549022, 0.96078431372549022, 0.96078431372549022), + (0.7803921568627451 , 0.91764705882352937, 0.89803921568627454), + (0.50196078431372548, 0.80392156862745101, 0.75686274509803919), + (0.20784313725490197, 0.59215686274509804, 0.5607843137254902 ), + (0.00392156862745098, 0.4 , 0.36862745098039218), + (0.0 , 0.23529411764705882, 0.18823529411764706) + ) + +_BuGn_data = ( + (0.96862745098039216, 0.9882352941176471 , 0.99215686274509807), + (0.89803921568627454, 0.96078431372549022, 0.97647058823529409), + (0.8 , 0.92549019607843142, 0.90196078431372551), + (0.6 , 0.84705882352941175, 0.78823529411764703), + (0.4 , 0.76078431372549016, 0.64313725490196083), + (0.25490196078431371, 0.68235294117647061, 0.46274509803921571), + (0.13725490196078433, 0.54509803921568623, 0.27058823529411763), + (0.0 , 0.42745098039215684, 0.17254901960784313), + (0.0 , 0.26666666666666666, 0.10588235294117647) + ) + +_BuPu_data = ( + (0.96862745098039216, 0.9882352941176471 , 0.99215686274509807), + (0.8784313725490196 , 0.92549019607843142, 0.95686274509803926), + (0.74901960784313726, 0.82745098039215681, 0.90196078431372551), + (0.61960784313725492, 0.73725490196078436, 0.85490196078431369), + (0.5490196078431373 , 0.58823529411764708, 0.77647058823529413), + (0.5490196078431373 , 0.41960784313725491, 0.69411764705882351), + (0.53333333333333333, 0.25490196078431371, 0.61568627450980395), + (0.50588235294117645, 0.05882352941176471, 0.48627450980392156), + (0.30196078431372547, 0.0 , 0.29411764705882354) + ) + +_GnBu_data = ( + (0.96862745098039216, 0.9882352941176471 , 0.94117647058823528), + (0.8784313725490196 , 0.95294117647058818, 0.85882352941176465), + (0.8 , 0.92156862745098034, 0.77254901960784317), + (0.6588235294117647 , 0.8666666666666667 , 0.70980392156862748), + (0.4823529411764706 , 0.8 , 0.7686274509803922 ), + (0.30588235294117649, 0.70196078431372544, 0.82745098039215681), + (0.16862745098039217, 0.5490196078431373 , 0.74509803921568629), + (0.03137254901960784, 0.40784313725490196, 0.67450980392156867), + (0.03137254901960784, 0.25098039215686274, 0.50588235294117645) + ) + +_Greens_data = ( + (0.96862745098039216, 0.9882352941176471 , 0.96078431372549022), + (0.89803921568627454, 0.96078431372549022, 0.8784313725490196 ), + (0.7803921568627451 , 0.9137254901960784 , 0.75294117647058822), + (0.63137254901960782, 0.85098039215686272, 0.60784313725490191), + (0.45490196078431372, 0.7686274509803922 , 0.46274509803921571), + (0.25490196078431371, 0.6705882352941176 , 0.36470588235294116), + (0.13725490196078433, 0.54509803921568623, 0.27058823529411763), + (0.0 , 0.42745098039215684, 0.17254901960784313), + (0.0 , 0.26666666666666666, 0.10588235294117647) + ) + +_Greys_data = ( + (1.0 , 1.0 , 1.0 ), + (0.94117647058823528, 0.94117647058823528, 0.94117647058823528), + (0.85098039215686272, 0.85098039215686272, 0.85098039215686272), + (0.74117647058823533, 0.74117647058823533, 0.74117647058823533), + (0.58823529411764708, 0.58823529411764708, 0.58823529411764708), + (0.45098039215686275, 0.45098039215686275, 0.45098039215686275), + (0.32156862745098042, 0.32156862745098042, 0.32156862745098042), + (0.14509803921568629, 0.14509803921568629, 0.14509803921568629), + (0.0 , 0.0 , 0.0 ) + ) + +_Oranges_data = ( + (1.0 , 0.96078431372549022, 0.92156862745098034), + (0.99607843137254903, 0.90196078431372551, 0.80784313725490198), + (0.99215686274509807, 0.81568627450980391, 0.63529411764705879), + (0.99215686274509807, 0.68235294117647061, 0.41960784313725491), + (0.99215686274509807, 0.55294117647058827, 0.23529411764705882), + (0.94509803921568625, 0.41176470588235292, 0.07450980392156863), + (0.85098039215686272, 0.28235294117647058, 0.00392156862745098), + (0.65098039215686276, 0.21176470588235294, 0.01176470588235294), + (0.49803921568627452, 0.15294117647058825, 0.01568627450980392) + ) + +_OrRd_data = ( + (1.0 , 0.96862745098039216, 0.92549019607843142), + (0.99607843137254903, 0.90980392156862744, 0.78431372549019607), + (0.99215686274509807, 0.83137254901960789, 0.61960784313725492), + (0.99215686274509807, 0.73333333333333328, 0.51764705882352946), + (0.9882352941176471 , 0.55294117647058827, 0.34901960784313724), + (0.93725490196078431, 0.396078431372549 , 0.28235294117647058), + (0.84313725490196079, 0.18823529411764706, 0.12156862745098039), + (0.70196078431372544, 0.0 , 0.0 ), + (0.49803921568627452, 0.0 , 0.0 ) + ) + +_PiYG_data = ( + (0.55686274509803924, 0.00392156862745098, 0.32156862745098042), + (0.77254901960784317, 0.10588235294117647, 0.49019607843137253), + (0.87058823529411766, 0.46666666666666667, 0.68235294117647061), + (0.94509803921568625, 0.71372549019607845, 0.85490196078431369), + (0.99215686274509807, 0.8784313725490196 , 0.93725490196078431), + (0.96862745098039216, 0.96862745098039216, 0.96862745098039216), + (0.90196078431372551, 0.96078431372549022, 0.81568627450980391), + (0.72156862745098038, 0.88235294117647056, 0.52549019607843139), + (0.49803921568627452, 0.73725490196078436, 0.25490196078431371), + (0.30196078431372547, 0.5725490196078431 , 0.12941176470588237), + (0.15294117647058825, 0.39215686274509803, 0.09803921568627451) + ) + +_PRGn_data = ( + (0.25098039215686274, 0.0 , 0.29411764705882354), + (0.46274509803921571, 0.16470588235294117, 0.51372549019607838), + (0.6 , 0.4392156862745098 , 0.6705882352941176 ), + (0.76078431372549016, 0.6470588235294118 , 0.81176470588235294), + (0.90588235294117647, 0.83137254901960789, 0.90980392156862744), + (0.96862745098039216, 0.96862745098039216, 0.96862745098039216), + (0.85098039215686272, 0.94117647058823528, 0.82745098039215681), + (0.65098039215686276, 0.85882352941176465, 0.62745098039215685), + (0.35294117647058826, 0.68235294117647061, 0.38039215686274508), + (0.10588235294117647, 0.47058823529411764, 0.21568627450980393), + (0.0 , 0.26666666666666666, 0.10588235294117647) + ) + +_PuBu_data = ( + (1.0 , 0.96862745098039216, 0.98431372549019602), + (0.92549019607843142, 0.90588235294117647, 0.94901960784313721), + (0.81568627450980391, 0.81960784313725488, 0.90196078431372551), + (0.65098039215686276, 0.74117647058823533, 0.85882352941176465), + (0.45490196078431372, 0.66274509803921566, 0.81176470588235294), + (0.21176470588235294, 0.56470588235294117, 0.75294117647058822), + (0.0196078431372549 , 0.4392156862745098 , 0.69019607843137254), + (0.01568627450980392, 0.35294117647058826, 0.55294117647058827), + (0.00784313725490196, 0.2196078431372549 , 0.34509803921568627) + ) + +_PuBuGn_data = ( + (1.0 , 0.96862745098039216, 0.98431372549019602), + (0.92549019607843142, 0.88627450980392153, 0.94117647058823528), + (0.81568627450980391, 0.81960784313725488, 0.90196078431372551), + (0.65098039215686276, 0.74117647058823533, 0.85882352941176465), + (0.40392156862745099, 0.66274509803921566, 0.81176470588235294), + (0.21176470588235294, 0.56470588235294117, 0.75294117647058822), + (0.00784313725490196, 0.50588235294117645, 0.54117647058823526), + (0.00392156862745098, 0.42352941176470588, 0.34901960784313724), + (0.00392156862745098, 0.27450980392156865, 0.21176470588235294) + ) + +_PuOr_data = ( + (0.49803921568627452, 0.23137254901960785, 0.03137254901960784), + (0.70196078431372544, 0.34509803921568627, 0.02352941176470588), + (0.8784313725490196 , 0.50980392156862742, 0.07843137254901961), + (0.99215686274509807, 0.72156862745098038, 0.38823529411764707), + (0.99607843137254903, 0.8784313725490196 , 0.71372549019607845), + (0.96862745098039216, 0.96862745098039216, 0.96862745098039216), + (0.84705882352941175, 0.85490196078431369, 0.92156862745098034), + (0.69803921568627447, 0.6705882352941176 , 0.82352941176470584), + (0.50196078431372548, 0.45098039215686275, 0.67450980392156867), + (0.32941176470588235, 0.15294117647058825, 0.53333333333333333), + (0.17647058823529413, 0.0 , 0.29411764705882354) + ) + +_PuRd_data = ( + (0.96862745098039216, 0.95686274509803926, 0.97647058823529409), + (0.90588235294117647, 0.88235294117647056, 0.93725490196078431), + (0.83137254901960789, 0.72549019607843135, 0.85490196078431369), + (0.78823529411764703, 0.58039215686274515, 0.7803921568627451 ), + (0.87450980392156863, 0.396078431372549 , 0.69019607843137254), + (0.90588235294117647, 0.16078431372549021, 0.54117647058823526), + (0.80784313725490198, 0.07058823529411765, 0.33725490196078434), + (0.59607843137254901, 0.0 , 0.2627450980392157 ), + (0.40392156862745099, 0.0 , 0.12156862745098039) + ) + +_Purples_data = ( + (0.9882352941176471 , 0.98431372549019602, 0.99215686274509807), + (0.93725490196078431, 0.92941176470588238, 0.96078431372549022), + (0.85490196078431369, 0.85490196078431369, 0.92156862745098034), + (0.73725490196078436, 0.74117647058823533, 0.86274509803921573), + (0.61960784313725492, 0.60392156862745094, 0.78431372549019607), + (0.50196078431372548, 0.49019607843137253, 0.72941176470588232), + (0.41568627450980394, 0.31764705882352939, 0.63921568627450975), + (0.32941176470588235, 0.15294117647058825, 0.5607843137254902 ), + (0.24705882352941178, 0.0 , 0.49019607843137253) + ) + +_RdBu_data = ( + (0.40392156862745099, 0.0 , 0.12156862745098039), + (0.69803921568627447, 0.09411764705882353, 0.16862745098039217), + (0.83921568627450982, 0.37647058823529411, 0.30196078431372547), + (0.95686274509803926, 0.6470588235294118 , 0.50980392156862742), + (0.99215686274509807, 0.85882352941176465, 0.7803921568627451 ), + (0.96862745098039216, 0.96862745098039216, 0.96862745098039216), + (0.81960784313725488, 0.89803921568627454, 0.94117647058823528), + (0.5725490196078431 , 0.77254901960784317, 0.87058823529411766), + (0.2627450980392157 , 0.57647058823529407, 0.76470588235294112), + (0.12941176470588237, 0.4 , 0.67450980392156867), + (0.0196078431372549 , 0.18823529411764706, 0.38039215686274508) + ) + +_RdGy_data = ( + (0.40392156862745099, 0.0 , 0.12156862745098039), + (0.69803921568627447, 0.09411764705882353, 0.16862745098039217), + (0.83921568627450982, 0.37647058823529411, 0.30196078431372547), + (0.95686274509803926, 0.6470588235294118 , 0.50980392156862742), + (0.99215686274509807, 0.85882352941176465, 0.7803921568627451 ), + (1.0 , 1.0 , 1.0 ), + (0.8784313725490196 , 0.8784313725490196 , 0.8784313725490196 ), + (0.72941176470588232, 0.72941176470588232, 0.72941176470588232), + (0.52941176470588236, 0.52941176470588236, 0.52941176470588236), + (0.30196078431372547, 0.30196078431372547, 0.30196078431372547), + (0.10196078431372549, 0.10196078431372549, 0.10196078431372549) + ) + +_RdPu_data = ( + (1.0 , 0.96862745098039216, 0.95294117647058818), + (0.99215686274509807, 0.8784313725490196 , 0.86666666666666667), + (0.9882352941176471 , 0.77254901960784317, 0.75294117647058822), + (0.98039215686274506, 0.62352941176470589, 0.70980392156862748), + (0.96862745098039216, 0.40784313725490196, 0.63137254901960782), + (0.86666666666666667, 0.20392156862745098, 0.59215686274509804), + (0.68235294117647061, 0.00392156862745098, 0.49411764705882355), + (0.47843137254901963, 0.00392156862745098, 0.46666666666666667), + (0.28627450980392155, 0.0 , 0.41568627450980394) + ) + +_RdYlBu_data = ( + (0.6470588235294118 , 0.0 , 0.14901960784313725), + (0.84313725490196079, 0.18823529411764706 , 0.15294117647058825), + (0.95686274509803926, 0.42745098039215684 , 0.2627450980392157 ), + (0.99215686274509807, 0.68235294117647061 , 0.38039215686274508), + (0.99607843137254903, 0.8784313725490196 , 0.56470588235294117), + (1.0 , 1.0 , 0.74901960784313726), + (0.8784313725490196 , 0.95294117647058818 , 0.97254901960784312), + (0.6705882352941176 , 0.85098039215686272 , 0.9137254901960784 ), + (0.45490196078431372, 0.67843137254901964 , 0.81960784313725488), + (0.27058823529411763, 0.45882352941176469 , 0.70588235294117652), + (0.19215686274509805, 0.21176470588235294 , 0.58431372549019611) + ) + +_RdYlGn_data = ( + (0.6470588235294118 , 0.0 , 0.14901960784313725), + (0.84313725490196079, 0.18823529411764706 , 0.15294117647058825), + (0.95686274509803926, 0.42745098039215684 , 0.2627450980392157 ), + (0.99215686274509807, 0.68235294117647061 , 0.38039215686274508), + (0.99607843137254903, 0.8784313725490196 , 0.54509803921568623), + (1.0 , 1.0 , 0.74901960784313726), + (0.85098039215686272, 0.93725490196078431 , 0.54509803921568623), + (0.65098039215686276, 0.85098039215686272 , 0.41568627450980394), + (0.4 , 0.74117647058823533 , 0.38823529411764707), + (0.10196078431372549, 0.59607843137254901 , 0.31372549019607843), + (0.0 , 0.40784313725490196 , 0.21568627450980393) + ) + +_Reds_data = ( + (1.0 , 0.96078431372549022 , 0.94117647058823528), + (0.99607843137254903, 0.8784313725490196 , 0.82352941176470584), + (0.9882352941176471 , 0.73333333333333328 , 0.63137254901960782), + (0.9882352941176471 , 0.5725490196078431 , 0.44705882352941179), + (0.98431372549019602, 0.41568627450980394 , 0.29019607843137257), + (0.93725490196078431, 0.23137254901960785 , 0.17254901960784313), + (0.79607843137254897, 0.094117647058823528, 0.11372549019607843), + (0.6470588235294118 , 0.058823529411764705, 0.08235294117647058), + (0.40392156862745099, 0.0 , 0.05098039215686274) + ) + +_Spectral_data = ( + (0.61960784313725492, 0.003921568627450980, 0.25882352941176473), + (0.83529411764705885, 0.24313725490196078 , 0.30980392156862746), + (0.95686274509803926, 0.42745098039215684 , 0.2627450980392157 ), + (0.99215686274509807, 0.68235294117647061 , 0.38039215686274508), + (0.99607843137254903, 0.8784313725490196 , 0.54509803921568623), + (1.0 , 1.0 , 0.74901960784313726), + (0.90196078431372551, 0.96078431372549022 , 0.59607843137254901), + (0.6705882352941176 , 0.8666666666666667 , 0.64313725490196083), + (0.4 , 0.76078431372549016 , 0.6470588235294118 ), + (0.19607843137254902, 0.53333333333333333 , 0.74117647058823533), + (0.36862745098039218, 0.30980392156862746 , 0.63529411764705879) + ) + +_YlGn_data = ( + (1.0 , 1.0 , 0.89803921568627454), + (0.96862745098039216, 0.9882352941176471 , 0.72549019607843135), + (0.85098039215686272, 0.94117647058823528 , 0.63921568627450975), + (0.67843137254901964, 0.8666666666666667 , 0.55686274509803924), + (0.47058823529411764, 0.77647058823529413 , 0.47450980392156861), + (0.25490196078431371, 0.6705882352941176 , 0.36470588235294116), + (0.13725490196078433, 0.51764705882352946 , 0.2627450980392157 ), + (0.0 , 0.40784313725490196 , 0.21568627450980393), + (0.0 , 0.27058823529411763 , 0.16078431372549021) + ) + +_YlGnBu_data = ( + (1.0 , 1.0 , 0.85098039215686272), + (0.92941176470588238, 0.97254901960784312 , 0.69411764705882351), + (0.7803921568627451 , 0.9137254901960784 , 0.70588235294117652), + (0.49803921568627452, 0.80392156862745101 , 0.73333333333333328), + (0.25490196078431371, 0.71372549019607845 , 0.7686274509803922 ), + (0.11372549019607843, 0.56862745098039214 , 0.75294117647058822), + (0.13333333333333333, 0.36862745098039218 , 0.6588235294117647 ), + (0.14509803921568629, 0.20392156862745098 , 0.58039215686274515), + (0.03137254901960784, 0.11372549019607843 , 0.34509803921568627) + ) + +_YlOrBr_data = ( + (1.0 , 1.0 , 0.89803921568627454), + (1.0 , 0.96862745098039216 , 0.73725490196078436), + (0.99607843137254903, 0.8901960784313725 , 0.56862745098039214), + (0.99607843137254903, 0.7686274509803922 , 0.30980392156862746), + (0.99607843137254903, 0.6 , 0.16078431372549021), + (0.92549019607843142, 0.4392156862745098 , 0.07843137254901961), + (0.8 , 0.29803921568627451 , 0.00784313725490196), + (0.6 , 0.20392156862745098 , 0.01568627450980392), + (0.4 , 0.14509803921568629 , 0.02352941176470588) + ) + +_YlOrRd_data = ( + (1.0 , 1.0 , 0.8 ), + (1.0 , 0.92941176470588238 , 0.62745098039215685), + (0.99607843137254903, 0.85098039215686272 , 0.46274509803921571), + (0.99607843137254903, 0.69803921568627447 , 0.29803921568627451), + (0.99215686274509807, 0.55294117647058827 , 0.23529411764705882), + (0.9882352941176471 , 0.30588235294117649 , 0.16470588235294117), + (0.8901960784313725 , 0.10196078431372549 , 0.10980392156862745), + (0.74117647058823533, 0.0 , 0.14901960784313725), + (0.50196078431372548, 0.0 , 0.14901960784313725) + ) + + +# ColorBrewer's qualitative maps, implemented using ListedColormap +# for use with mpl.colors.NoNorm + +_Accent_data = ( + (0.49803921568627452, 0.78823529411764703, 0.49803921568627452), + (0.74509803921568629, 0.68235294117647061, 0.83137254901960789), + (0.99215686274509807, 0.75294117647058822, 0.52549019607843139), + (1.0, 1.0, 0.6 ), + (0.2196078431372549, 0.42352941176470588, 0.69019607843137254), + (0.94117647058823528, 0.00784313725490196, 0.49803921568627452), + (0.74901960784313726, 0.35686274509803922, 0.09019607843137254), + (0.4, 0.4, 0.4 ), + ) + +_Dark2_data = ( + (0.10588235294117647, 0.61960784313725492, 0.46666666666666667), + (0.85098039215686272, 0.37254901960784315, 0.00784313725490196), + (0.45882352941176469, 0.4392156862745098, 0.70196078431372544), + (0.90588235294117647, 0.16078431372549021, 0.54117647058823526), + (0.4, 0.65098039215686276, 0.11764705882352941), + (0.90196078431372551, 0.6705882352941176, 0.00784313725490196), + (0.65098039215686276, 0.46274509803921571, 0.11372549019607843), + (0.4, 0.4, 0.4 ), + ) + +_Paired_data = ( + (0.65098039215686276, 0.80784313725490198, 0.8901960784313725 ), + (0.12156862745098039, 0.47058823529411764, 0.70588235294117652), + (0.69803921568627447, 0.87450980392156863, 0.54117647058823526), + (0.2, 0.62745098039215685, 0.17254901960784313), + (0.98431372549019602, 0.60392156862745094, 0.6 ), + (0.8901960784313725, 0.10196078431372549, 0.10980392156862745), + (0.99215686274509807, 0.74901960784313726, 0.43529411764705883), + (1.0, 0.49803921568627452, 0.0 ), + (0.792156862745098, 0.69803921568627447, 0.83921568627450982), + (0.41568627450980394, 0.23921568627450981, 0.60392156862745094), + (1.0, 1.0, 0.6 ), + (0.69411764705882351, 0.34901960784313724, 0.15686274509803921), + ) + +_Pastel1_data = ( + (0.98431372549019602, 0.70588235294117652, 0.68235294117647061), + (0.70196078431372544, 0.80392156862745101, 0.8901960784313725 ), + (0.8, 0.92156862745098034, 0.77254901960784317), + (0.87058823529411766, 0.79607843137254897, 0.89411764705882357), + (0.99607843137254903, 0.85098039215686272, 0.65098039215686276), + (1.0, 1.0, 0.8 ), + (0.89803921568627454, 0.84705882352941175, 0.74117647058823533), + (0.99215686274509807, 0.85490196078431369, 0.92549019607843142), + (0.94901960784313721, 0.94901960784313721, 0.94901960784313721), + ) + +_Pastel2_data = ( + (0.70196078431372544, 0.88627450980392153, 0.80392156862745101), + (0.99215686274509807, 0.80392156862745101, 0.67450980392156867), + (0.79607843137254897, 0.83529411764705885, 0.90980392156862744), + (0.95686274509803926, 0.792156862745098, 0.89411764705882357), + (0.90196078431372551, 0.96078431372549022, 0.78823529411764703), + (1.0, 0.94901960784313721, 0.68235294117647061), + (0.94509803921568625, 0.88627450980392153, 0.8 ), + (0.8, 0.8, 0.8 ), + ) + +_Set1_data = ( + (0.89411764705882357, 0.10196078431372549, 0.10980392156862745), + (0.21568627450980393, 0.49411764705882355, 0.72156862745098038), + (0.30196078431372547, 0.68627450980392157, 0.29019607843137257), + (0.59607843137254901, 0.30588235294117649, 0.63921568627450975), + (1.0, 0.49803921568627452, 0.0 ), + (1.0, 1.0, 0.2 ), + (0.65098039215686276, 0.33725490196078434, 0.15686274509803921), + (0.96862745098039216, 0.50588235294117645, 0.74901960784313726), + (0.6, 0.6, 0.6), + ) + +_Set2_data = ( + (0.4, 0.76078431372549016, 0.6470588235294118 ), + (0.9882352941176471, 0.55294117647058827, 0.3843137254901961 ), + (0.55294117647058827, 0.62745098039215685, 0.79607843137254897), + (0.90588235294117647, 0.54117647058823526, 0.76470588235294112), + (0.65098039215686276, 0.84705882352941175, 0.32941176470588235), + (1.0, 0.85098039215686272, 0.18431372549019609), + (0.89803921568627454, 0.7686274509803922, 0.58039215686274515), + (0.70196078431372544, 0.70196078431372544, 0.70196078431372544), + ) + +_Set3_data = ( + (0.55294117647058827, 0.82745098039215681, 0.7803921568627451 ), + (1.0, 1.0, 0.70196078431372544), + (0.74509803921568629, 0.72941176470588232, 0.85490196078431369), + (0.98431372549019602, 0.50196078431372548, 0.44705882352941179), + (0.50196078431372548, 0.69411764705882351, 0.82745098039215681), + (0.99215686274509807, 0.70588235294117652, 0.3843137254901961 ), + (0.70196078431372544, 0.87058823529411766, 0.41176470588235292), + (0.9882352941176471, 0.80392156862745101, 0.89803921568627454), + (0.85098039215686272, 0.85098039215686272, 0.85098039215686272), + (0.73725490196078436, 0.50196078431372548, 0.74117647058823533), + (0.8, 0.92156862745098034, 0.77254901960784317), + (1.0, 0.92941176470588238, 0.43529411764705883), + ) + + +# The next 7 palettes are from the Yorick scientific visualization package, +# an evolution of the GIST package, both by David H. Munro. +# They are released under a BSD-like license (see LICENSE_YORICK in +# the license directory of the matplotlib source distribution). +# +# Most palette functions have been reduced to simple function descriptions +# by Reinier Heeres, since the rgb components were mostly straight lines. +# gist_earth_data and gist_ncar_data were simplified by a script and some +# manual effort. + +_gist_earth_data = \ +{'red': ( +(0.0, 0.0, 0.0000), +(0.2824, 0.1882, 0.1882), +(0.4588, 0.2714, 0.2714), +(0.5490, 0.4719, 0.4719), +(0.6980, 0.7176, 0.7176), +(0.7882, 0.7553, 0.7553), +(1.0000, 0.9922, 0.9922), +), 'green': ( +(0.0, 0.0, 0.0000), +(0.0275, 0.0000, 0.0000), +(0.1098, 0.1893, 0.1893), +(0.1647, 0.3035, 0.3035), +(0.2078, 0.3841, 0.3841), +(0.2824, 0.5020, 0.5020), +(0.5216, 0.6397, 0.6397), +(0.6980, 0.7171, 0.7171), +(0.7882, 0.6392, 0.6392), +(0.7922, 0.6413, 0.6413), +(0.8000, 0.6447, 0.6447), +(0.8078, 0.6481, 0.6481), +(0.8157, 0.6549, 0.6549), +(0.8667, 0.6991, 0.6991), +(0.8745, 0.7103, 0.7103), +(0.8824, 0.7216, 0.7216), +(0.8902, 0.7323, 0.7323), +(0.8980, 0.7430, 0.7430), +(0.9412, 0.8275, 0.8275), +(0.9569, 0.8635, 0.8635), +(0.9647, 0.8816, 0.8816), +(0.9961, 0.9733, 0.9733), +(1.0000, 0.9843, 0.9843), +), 'blue': ( +(0.0, 0.0, 0.0000), +(0.0039, 0.1684, 0.1684), +(0.0078, 0.2212, 0.2212), +(0.0275, 0.4329, 0.4329), +(0.0314, 0.4549, 0.4549), +(0.2824, 0.5004, 0.5004), +(0.4667, 0.2748, 0.2748), +(0.5451, 0.3205, 0.3205), +(0.7843, 0.3961, 0.3961), +(0.8941, 0.6651, 0.6651), +(1.0000, 0.9843, 0.9843), +)} + +_gist_gray_data = { + 'red': gfunc[3], + 'green': gfunc[3], + 'blue': gfunc[3], +} + +def _gist_heat_red(x): return 1.5 * x +def _gist_heat_green(x): return 2 * x - 1 +def _gist_heat_blue(x): return 4 * x - 3 +_gist_heat_data = { + 'red': _gist_heat_red, 'green': _gist_heat_green, 'blue': _gist_heat_blue} + +_gist_ncar_data = \ +{'red': ( +(0.0, 0.0, 0.0000), +(0.3098, 0.0000, 0.0000), +(0.3725, 0.3993, 0.3993), +(0.4235, 0.5003, 0.5003), +(0.5333, 1.0000, 1.0000), +(0.7922, 1.0000, 1.0000), +(0.8471, 0.6218, 0.6218), +(0.8980, 0.9235, 0.9235), +(1.0000, 0.9961, 0.9961), +), 'green': ( +(0.0, 0.0, 0.0000), +(0.0510, 0.3722, 0.3722), +(0.1059, 0.0000, 0.0000), +(0.1569, 0.7202, 0.7202), +(0.1608, 0.7537, 0.7537), +(0.1647, 0.7752, 0.7752), +(0.2157, 1.0000, 1.0000), +(0.2588, 0.9804, 0.9804), +(0.2706, 0.9804, 0.9804), +(0.3176, 1.0000, 1.0000), +(0.3686, 0.8081, 0.8081), +(0.4275, 1.0000, 1.0000), +(0.5216, 1.0000, 1.0000), +(0.6314, 0.7292, 0.7292), +(0.6863, 0.2796, 0.2796), +(0.7451, 0.0000, 0.0000), +(0.7922, 0.0000, 0.0000), +(0.8431, 0.1753, 0.1753), +(0.8980, 0.5000, 0.5000), +(1.0000, 0.9725, 0.9725), +), 'blue': ( +(0.0, 0.5020, 0.5020), +(0.0510, 0.0222, 0.0222), +(0.1098, 1.0000, 1.0000), +(0.2039, 1.0000, 1.0000), +(0.2627, 0.6145, 0.6145), +(0.3216, 0.0000, 0.0000), +(0.4157, 0.0000, 0.0000), +(0.4745, 0.2342, 0.2342), +(0.5333, 0.0000, 0.0000), +(0.5804, 0.0000, 0.0000), +(0.6314, 0.0549, 0.0549), +(0.6902, 0.0000, 0.0000), +(0.7373, 0.0000, 0.0000), +(0.7922, 0.9738, 0.9738), +(0.8000, 1.0000, 1.0000), +(0.8431, 1.0000, 1.0000), +(0.8980, 0.9341, 0.9341), +(1.0000, 0.9961, 0.9961), +)} + +_gist_rainbow_data = ( + (0.000, (1.00, 0.00, 0.16)), + (0.030, (1.00, 0.00, 0.00)), + (0.215, (1.00, 1.00, 0.00)), + (0.400, (0.00, 1.00, 0.00)), + (0.586, (0.00, 1.00, 1.00)), + (0.770, (0.00, 0.00, 1.00)), + (0.954, (1.00, 0.00, 1.00)), + (1.000, (1.00, 0.00, 0.75)) +) + +_gist_stern_data = { + 'red': ( + (0.000, 0.000, 0.000), (0.0547, 1.000, 1.000), + (0.250, 0.027, 0.250), # (0.2500, 0.250, 0.250), + (1.000, 1.000, 1.000)), + 'green': ((0, 0, 0), (1, 1, 1)), + 'blue': ( + (0.000, 0.000, 0.000), (0.500, 1.000, 1.000), + (0.735, 0.000, 0.000), (1.000, 1.000, 1.000)) +} + +def _gist_yarg(x): return 1 - x +_gist_yarg_data = {'red': _gist_yarg, 'green': _gist_yarg, 'blue': _gist_yarg} + +# This bipolar color map was generated from CoolWarmFloat33.csv of +# "Diverging Color Maps for Scientific Visualization" by Kenneth Moreland. +# +_coolwarm_data = { + 'red': [ + (0.0, 0.2298057, 0.2298057), + (0.03125, 0.26623388, 0.26623388), + (0.0625, 0.30386891, 0.30386891), + (0.09375, 0.342804478, 0.342804478), + (0.125, 0.38301334, 0.38301334), + (0.15625, 0.424369608, 0.424369608), + (0.1875, 0.46666708, 0.46666708), + (0.21875, 0.509635204, 0.509635204), + (0.25, 0.552953156, 0.552953156), + (0.28125, 0.596262162, 0.596262162), + (0.3125, 0.639176211, 0.639176211), + (0.34375, 0.681291281, 0.681291281), + (0.375, 0.722193294, 0.722193294), + (0.40625, 0.761464949, 0.761464949), + (0.4375, 0.798691636, 0.798691636), + (0.46875, 0.833466556, 0.833466556), + (0.5, 0.865395197, 0.865395197), + (0.53125, 0.897787179, 0.897787179), + (0.5625, 0.924127593, 0.924127593), + (0.59375, 0.944468518, 0.944468518), + (0.625, 0.958852946, 0.958852946), + (0.65625, 0.96732803, 0.96732803), + (0.6875, 0.969954137, 0.969954137), + (0.71875, 0.966811177, 0.966811177), + (0.75, 0.958003065, 0.958003065), + (0.78125, 0.943660866, 0.943660866), + (0.8125, 0.923944917, 0.923944917), + (0.84375, 0.89904617, 0.89904617), + (0.875, 0.869186849, 0.869186849), + (0.90625, 0.834620542, 0.834620542), + (0.9375, 0.795631745, 0.795631745), + (0.96875, 0.752534934, 0.752534934), + (1.0, 0.705673158, 0.705673158)], + 'green': [ + (0.0, 0.298717966, 0.298717966), + (0.03125, 0.353094838, 0.353094838), + (0.0625, 0.406535296, 0.406535296), + (0.09375, 0.458757618, 0.458757618), + (0.125, 0.50941904, 0.50941904), + (0.15625, 0.558148092, 0.558148092), + (0.1875, 0.604562568, 0.604562568), + (0.21875, 0.648280772, 0.648280772), + (0.25, 0.688929332, 0.688929332), + (0.28125, 0.726149107, 0.726149107), + (0.3125, 0.759599947, 0.759599947), + (0.34375, 0.788964712, 0.788964712), + (0.375, 0.813952739, 0.813952739), + (0.40625, 0.834302879, 0.834302879), + (0.4375, 0.849786142, 0.849786142), + (0.46875, 0.860207984, 0.860207984), + (0.5, 0.86541021, 0.86541021), + (0.53125, 0.848937047, 0.848937047), + (0.5625, 0.827384882, 0.827384882), + (0.59375, 0.800927443, 0.800927443), + (0.625, 0.769767752, 0.769767752), + (0.65625, 0.734132809, 0.734132809), + (0.6875, 0.694266682, 0.694266682), + (0.71875, 0.650421156, 0.650421156), + (0.75, 0.602842431, 0.602842431), + (0.78125, 0.551750968, 0.551750968), + (0.8125, 0.49730856, 0.49730856), + (0.84375, 0.439559467, 0.439559467), + (0.875, 0.378313092, 0.378313092), + (0.90625, 0.312874446, 0.312874446), + (0.9375, 0.24128379, 0.24128379), + (0.96875, 0.157246067, 0.157246067), + (1.0, 0.01555616, 0.01555616)], + 'blue': [ + (0.0, 0.753683153, 0.753683153), + (0.03125, 0.801466763, 0.801466763), + (0.0625, 0.84495867, 0.84495867), + (0.09375, 0.883725899, 0.883725899), + (0.125, 0.917387822, 0.917387822), + (0.15625, 0.945619588, 0.945619588), + (0.1875, 0.968154911, 0.968154911), + (0.21875, 0.98478814, 0.98478814), + (0.25, 0.995375608, 0.995375608), + (0.28125, 0.999836203, 0.999836203), + (0.3125, 0.998151185, 0.998151185), + (0.34375, 0.990363227, 0.990363227), + (0.375, 0.976574709, 0.976574709), + (0.40625, 0.956945269, 0.956945269), + (0.4375, 0.931688648, 0.931688648), + (0.46875, 0.901068838, 0.901068838), + (0.5, 0.865395561, 0.865395561), + (0.53125, 0.820880546, 0.820880546), + (0.5625, 0.774508472, 0.774508472), + (0.59375, 0.726736146, 0.726736146), + (0.625, 0.678007945, 0.678007945), + (0.65625, 0.628751763, 0.628751763), + (0.6875, 0.579375448, 0.579375448), + (0.71875, 0.530263762, 0.530263762), + (0.75, 0.481775914, 0.481775914), + (0.78125, 0.434243684, 0.434243684), + (0.8125, 0.387970225, 0.387970225), + (0.84375, 0.343229596, 0.343229596), + (0.875, 0.300267182, 0.300267182), + (0.90625, 0.259301199, 0.259301199), + (0.9375, 0.220525627, 0.220525627), + (0.96875, 0.184115123, 0.184115123), + (1.0, 0.150232812, 0.150232812)] + } + +# Implementation of Carey Rappaport's CMRmap. +# See `A Color Map for Effective Black-and-White Rendering of Color-Scale +# Images' by Carey Rappaport +# http://www.mathworks.com/matlabcentral/fileexchange/2662-cmrmap-m +_CMRmap_data = {'red': ((0.000, 0.00, 0.00), + (0.125, 0.15, 0.15), + (0.250, 0.30, 0.30), + (0.375, 0.60, 0.60), + (0.500, 1.00, 1.00), + (0.625, 0.90, 0.90), + (0.750, 0.90, 0.90), + (0.875, 0.90, 0.90), + (1.000, 1.00, 1.00)), + 'green': ((0.000, 0.00, 0.00), + (0.125, 0.15, 0.15), + (0.250, 0.15, 0.15), + (0.375, 0.20, 0.20), + (0.500, 0.25, 0.25), + (0.625, 0.50, 0.50), + (0.750, 0.75, 0.75), + (0.875, 0.90, 0.90), + (1.000, 1.00, 1.00)), + 'blue': ((0.000, 0.00, 0.00), + (0.125, 0.50, 0.50), + (0.250, 0.75, 0.75), + (0.375, 0.50, 0.50), + (0.500, 0.15, 0.15), + (0.625, 0.00, 0.00), + (0.750, 0.10, 0.10), + (0.875, 0.50, 0.50), + (1.000, 1.00, 1.00))} + + +# An MIT licensed, colorblind-friendly heatmap from Wistia: +# https://github.com/wistia/heatmap-palette +# http://wistia.com/blog/heatmaps-for-colorblindness +# +# >>> import matplotlib.colors as c +# >>> colors = ["#e4ff7a", "#ffe81a", "#ffbd00", "#ffa000", "#fc7f00"] +# >>> cm = c.LinearSegmentedColormap.from_list('wistia', colors) +# >>> _wistia_data = cm._segmentdata +# >>> del _wistia_data['alpha'] +# +_wistia_data = { + 'red': [(0.0, 0.8941176470588236, 0.8941176470588236), + (0.25, 1.0, 1.0), + (0.5, 1.0, 1.0), + (0.75, 1.0, 1.0), + (1.0, 0.9882352941176471, 0.9882352941176471)], + 'green': [(0.0, 1.0, 1.0), + (0.25, 0.9098039215686274, 0.9098039215686274), + (0.5, 0.7411764705882353, 0.7411764705882353), + (0.75, 0.6274509803921569, 0.6274509803921569), + (1.0, 0.4980392156862745, 0.4980392156862745)], + 'blue': [(0.0, 0.47843137254901963, 0.47843137254901963), + (0.25, 0.10196078431372549, 0.10196078431372549), + (0.5, 0.0, 0.0), + (0.75, 0.0, 0.0), + (1.0, 0.0, 0.0)], +} + + +# Categorical palettes from Vega: +# https://github.com/vega/vega/wiki/Scales +# (divided by 255) +# + +_tab10_data = ( + (0.12156862745098039, 0.4666666666666667, 0.7058823529411765 ), # 1f77b4 + (1.0, 0.4980392156862745, 0.054901960784313725), # ff7f0e + (0.17254901960784313, 0.6274509803921569, 0.17254901960784313 ), # 2ca02c + (0.8392156862745098, 0.15294117647058825, 0.1568627450980392 ), # d62728 + (0.5803921568627451, 0.403921568627451, 0.7411764705882353 ), # 9467bd + (0.5490196078431373, 0.33725490196078434, 0.29411764705882354 ), # 8c564b + (0.8901960784313725, 0.4666666666666667, 0.7607843137254902 ), # e377c2 + (0.4980392156862745, 0.4980392156862745, 0.4980392156862745 ), # 7f7f7f + (0.7372549019607844, 0.7411764705882353, 0.13333333333333333 ), # bcbd22 + (0.09019607843137255, 0.7450980392156863, 0.8117647058823529), # 17becf +) + +_tab20_data = ( + (0.12156862745098039, 0.4666666666666667, 0.7058823529411765 ), # 1f77b4 + (0.6823529411764706, 0.7803921568627451, 0.9098039215686274 ), # aec7e8 + (1.0, 0.4980392156862745, 0.054901960784313725), # ff7f0e + (1.0, 0.7333333333333333, 0.47058823529411764 ), # ffbb78 + (0.17254901960784313, 0.6274509803921569, 0.17254901960784313 ), # 2ca02c + (0.596078431372549, 0.8745098039215686, 0.5411764705882353 ), # 98df8a + (0.8392156862745098, 0.15294117647058825, 0.1568627450980392 ), # d62728 + (1.0, 0.596078431372549, 0.5882352941176471 ), # ff9896 + (0.5803921568627451, 0.403921568627451, 0.7411764705882353 ), # 9467bd + (0.7725490196078432, 0.6901960784313725, 0.8352941176470589 ), # c5b0d5 + (0.5490196078431373, 0.33725490196078434, 0.29411764705882354 ), # 8c564b + (0.7686274509803922, 0.611764705882353, 0.5803921568627451 ), # c49c94 + (0.8901960784313725, 0.4666666666666667, 0.7607843137254902 ), # e377c2 + (0.9686274509803922, 0.7137254901960784, 0.8235294117647058 ), # f7b6d2 + (0.4980392156862745, 0.4980392156862745, 0.4980392156862745 ), # 7f7f7f + (0.7803921568627451, 0.7803921568627451, 0.7803921568627451 ), # c7c7c7 + (0.7372549019607844, 0.7411764705882353, 0.13333333333333333 ), # bcbd22 + (0.8588235294117647, 0.8588235294117647, 0.5529411764705883 ), # dbdb8d + (0.09019607843137255, 0.7450980392156863, 0.8117647058823529 ), # 17becf + (0.6196078431372549, 0.8549019607843137, 0.8980392156862745), # 9edae5 +) + +_tab20b_data = ( + (0.2235294117647059, 0.23137254901960785, 0.4745098039215686 ), # 393b79 + (0.3215686274509804, 0.32941176470588235, 0.6392156862745098 ), # 5254a3 + (0.4196078431372549, 0.43137254901960786, 0.8117647058823529 ), # 6b6ecf + (0.611764705882353, 0.6196078431372549, 0.8705882352941177 ), # 9c9ede + (0.38823529411764707, 0.4745098039215686, 0.2235294117647059 ), # 637939 + (0.5490196078431373, 0.6352941176470588, 0.3215686274509804 ), # 8ca252 + (0.7098039215686275, 0.8117647058823529, 0.4196078431372549 ), # b5cf6b + (0.807843137254902, 0.8588235294117647, 0.611764705882353 ), # cedb9c + (0.5490196078431373, 0.42745098039215684, 0.19215686274509805), # 8c6d31 + (0.7411764705882353, 0.6196078431372549, 0.2235294117647059 ), # bd9e39 + (0.9058823529411765, 0.7294117647058823, 0.3215686274509804 ), # e7ba52 + (0.9058823529411765, 0.796078431372549, 0.5803921568627451 ), # e7cb94 + (0.5176470588235295, 0.23529411764705882, 0.2235294117647059 ), # 843c39 + (0.6784313725490196, 0.28627450980392155, 0.2901960784313726 ), # ad494a + (0.8392156862745098, 0.3803921568627451, 0.4196078431372549 ), # d6616b + (0.9058823529411765, 0.5882352941176471, 0.611764705882353 ), # e7969c + (0.4823529411764706, 0.2549019607843137, 0.45098039215686275), # 7b4173 + (0.6470588235294118, 0.3176470588235294, 0.5803921568627451 ), # a55194 + (0.807843137254902, 0.42745098039215684, 0.7411764705882353 ), # ce6dbd + (0.8705882352941177, 0.6196078431372549, 0.8392156862745098 ), # de9ed6 +) + +_tab20c_data = ( + (0.19215686274509805, 0.5098039215686274, 0.7411764705882353 ), # 3182bd + (0.4196078431372549, 0.6823529411764706, 0.8392156862745098 ), # 6baed6 + (0.6196078431372549, 0.792156862745098, 0.8823529411764706 ), # 9ecae1 + (0.7764705882352941, 0.8588235294117647, 0.9372549019607843 ), # c6dbef + (0.9019607843137255, 0.3333333333333333, 0.050980392156862744), # e6550d + (0.9921568627450981, 0.5529411764705883, 0.23529411764705882 ), # fd8d3c + (0.9921568627450981, 0.6823529411764706, 0.4196078431372549 ), # fdae6b + (0.9921568627450981, 0.8156862745098039, 0.6352941176470588 ), # fdd0a2 + (0.19215686274509805, 0.6392156862745098, 0.32941176470588235 ), # 31a354 + (0.4549019607843137, 0.7686274509803922, 0.4627450980392157 ), # 74c476 + (0.6313725490196078, 0.8509803921568627, 0.6078431372549019 ), # a1d99b + (0.7803921568627451, 0.9137254901960784, 0.7529411764705882 ), # c7e9c0 + (0.4588235294117647, 0.4196078431372549, 0.6941176470588235 ), # 756bb1 + (0.6196078431372549, 0.6039215686274509, 0.7843137254901961 ), # 9e9ac8 + (0.7372549019607844, 0.7411764705882353, 0.8627450980392157 ), # bcbddc + (0.8549019607843137, 0.8549019607843137, 0.9215686274509803 ), # dadaeb + (0.38823529411764707, 0.38823529411764707, 0.38823529411764707 ), # 636363 + (0.5882352941176471, 0.5882352941176471, 0.5882352941176471 ), # 969696 + (0.7411764705882353, 0.7411764705882353, 0.7411764705882353 ), # bdbdbd + (0.8509803921568627, 0.8509803921568627, 0.8509803921568627 ), # d9d9d9 +) + + +datad = { + 'Blues': _Blues_data, + 'BrBG': _BrBG_data, + 'BuGn': _BuGn_data, + 'BuPu': _BuPu_data, + 'CMRmap': _CMRmap_data, + 'GnBu': _GnBu_data, + 'Greens': _Greens_data, + 'Greys': _Greys_data, + 'OrRd': _OrRd_data, + 'Oranges': _Oranges_data, + 'PRGn': _PRGn_data, + 'PiYG': _PiYG_data, + 'PuBu': _PuBu_data, + 'PuBuGn': _PuBuGn_data, + 'PuOr': _PuOr_data, + 'PuRd': _PuRd_data, + 'Purples': _Purples_data, + 'RdBu': _RdBu_data, + 'RdGy': _RdGy_data, + 'RdPu': _RdPu_data, + 'RdYlBu': _RdYlBu_data, + 'RdYlGn': _RdYlGn_data, + 'Reds': _Reds_data, + 'Spectral': _Spectral_data, + 'Wistia': _wistia_data, + 'YlGn': _YlGn_data, + 'YlGnBu': _YlGnBu_data, + 'YlOrBr': _YlOrBr_data, + 'YlOrRd': _YlOrRd_data, + 'afmhot': _afmhot_data, + 'autumn': _autumn_data, + 'binary': _binary_data, + 'bone': _bone_data, + 'brg': _brg_data, + 'bwr': _bwr_data, + 'cool': _cool_data, + 'coolwarm': _coolwarm_data, + 'copper': _copper_data, + 'cubehelix': _cubehelix_data, + 'flag': _flag_data, + 'gist_earth': _gist_earth_data, + 'gist_gray': _gist_gray_data, + 'gist_heat': _gist_heat_data, + 'gist_ncar': _gist_ncar_data, + 'gist_rainbow': _gist_rainbow_data, + 'gist_stern': _gist_stern_data, + 'gist_yarg': _gist_yarg_data, + 'gnuplot': _gnuplot_data, + 'gnuplot2': _gnuplot2_data, + 'gray': _gray_data, + 'hot': _hot_data, + 'hsv': _hsv_data, + 'jet': _jet_data, + 'nipy_spectral': _nipy_spectral_data, + 'ocean': _ocean_data, + 'pink': _pink_data, + 'prism': _prism_data, + 'rainbow': _rainbow_data, + 'seismic': _seismic_data, + 'spring': _spring_data, + 'summer': _summer_data, + 'terrain': _terrain_data, + 'winter': _winter_data, + # Qualitative + 'Accent': {'listed': _Accent_data}, + 'Dark2': {'listed': _Dark2_data}, + 'Paired': {'listed': _Paired_data}, + 'Pastel1': {'listed': _Pastel1_data}, + 'Pastel2': {'listed': _Pastel2_data}, + 'Set1': {'listed': _Set1_data}, + 'Set2': {'listed': _Set2_data}, + 'Set3': {'listed': _Set3_data}, + 'tab10': {'listed': _tab10_data}, + 'tab20': {'listed': _tab20_data}, + 'tab20b': {'listed': _tab20b_data}, + 'tab20c': {'listed': _tab20c_data}, +} diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_cm_listed.py b/minor_project/lib/python3.6/site-packages/matplotlib/_cm_listed.py new file mode 100644 index 0000000..a331ad7 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_cm_listed.py @@ -0,0 +1,2071 @@ +from .colors import ListedColormap + +_magma_data = [[0.001462, 0.000466, 0.013866], + [0.002258, 0.001295, 0.018331], + [0.003279, 0.002305, 0.023708], + [0.004512, 0.003490, 0.029965], + [0.005950, 0.004843, 0.037130], + [0.007588, 0.006356, 0.044973], + [0.009426, 0.008022, 0.052844], + [0.011465, 0.009828, 0.060750], + [0.013708, 0.011771, 0.068667], + [0.016156, 0.013840, 0.076603], + [0.018815, 0.016026, 0.084584], + [0.021692, 0.018320, 0.092610], + [0.024792, 0.020715, 0.100676], + [0.028123, 0.023201, 0.108787], + [0.031696, 0.025765, 0.116965], + [0.035520, 0.028397, 0.125209], + [0.039608, 0.031090, 0.133515], + [0.043830, 0.033830, 0.141886], + [0.048062, 0.036607, 0.150327], + [0.052320, 0.039407, 0.158841], + [0.056615, 0.042160, 0.167446], + [0.060949, 0.044794, 0.176129], + [0.065330, 0.047318, 0.184892], + [0.069764, 0.049726, 0.193735], + [0.074257, 0.052017, 0.202660], + [0.078815, 0.054184, 0.211667], + [0.083446, 0.056225, 0.220755], + [0.088155, 0.058133, 0.229922], + [0.092949, 0.059904, 0.239164], + [0.097833, 0.061531, 0.248477], + [0.102815, 0.063010, 0.257854], + [0.107899, 0.064335, 0.267289], + [0.113094, 0.065492, 0.276784], + [0.118405, 0.066479, 0.286321], + [0.123833, 0.067295, 0.295879], + [0.129380, 0.067935, 0.305443], + [0.135053, 0.068391, 0.315000], + [0.140858, 0.068654, 0.324538], + [0.146785, 0.068738, 0.334011], + [0.152839, 0.068637, 0.343404], + [0.159018, 0.068354, 0.352688], + [0.165308, 0.067911, 0.361816], + [0.171713, 0.067305, 0.370771], + [0.178212, 0.066576, 0.379497], + [0.184801, 0.065732, 0.387973], + [0.191460, 0.064818, 0.396152], + [0.198177, 0.063862, 0.404009], + [0.204935, 0.062907, 0.411514], + [0.211718, 0.061992, 0.418647], + [0.218512, 0.061158, 0.425392], + [0.225302, 0.060445, 0.431742], + [0.232077, 0.059889, 0.437695], + [0.238826, 0.059517, 0.443256], + [0.245543, 0.059352, 0.448436], + [0.252220, 0.059415, 0.453248], + [0.258857, 0.059706, 0.457710], + [0.265447, 0.060237, 0.461840], + [0.271994, 0.060994, 0.465660], + [0.278493, 0.061978, 0.469190], + [0.284951, 0.063168, 0.472451], + [0.291366, 0.064553, 0.475462], + [0.297740, 0.066117, 0.478243], + [0.304081, 0.067835, 0.480812], + [0.310382, 0.069702, 0.483186], + [0.316654, 0.071690, 0.485380], + [0.322899, 0.073782, 0.487408], + [0.329114, 0.075972, 0.489287], + [0.335308, 0.078236, 0.491024], + [0.341482, 0.080564, 0.492631], + [0.347636, 0.082946, 0.494121], + [0.353773, 0.085373, 0.495501], + [0.359898, 0.087831, 0.496778], + [0.366012, 0.090314, 0.497960], + [0.372116, 0.092816, 0.499053], + [0.378211, 0.095332, 0.500067], + [0.384299, 0.097855, 0.501002], + [0.390384, 0.100379, 0.501864], + [0.396467, 0.102902, 0.502658], + [0.402548, 0.105420, 0.503386], + [0.408629, 0.107930, 0.504052], + [0.414709, 0.110431, 0.504662], + [0.420791, 0.112920, 0.505215], + [0.426877, 0.115395, 0.505714], + [0.432967, 0.117855, 0.506160], + [0.439062, 0.120298, 0.506555], + [0.445163, 0.122724, 0.506901], + [0.451271, 0.125132, 0.507198], + [0.457386, 0.127522, 0.507448], + [0.463508, 0.129893, 0.507652], + [0.469640, 0.132245, 0.507809], + [0.475780, 0.134577, 0.507921], + [0.481929, 0.136891, 0.507989], + [0.488088, 0.139186, 0.508011], + [0.494258, 0.141462, 0.507988], + [0.500438, 0.143719, 0.507920], + [0.506629, 0.145958, 0.507806], + [0.512831, 0.148179, 0.507648], + [0.519045, 0.150383, 0.507443], + [0.525270, 0.152569, 0.507192], + [0.531507, 0.154739, 0.506895], + [0.537755, 0.156894, 0.506551], + [0.544015, 0.159033, 0.506159], + [0.550287, 0.161158, 0.505719], + [0.556571, 0.163269, 0.505230], + [0.562866, 0.165368, 0.504692], + [0.569172, 0.167454, 0.504105], + [0.575490, 0.169530, 0.503466], + [0.581819, 0.171596, 0.502777], + [0.588158, 0.173652, 0.502035], + [0.594508, 0.175701, 0.501241], + [0.600868, 0.177743, 0.500394], + [0.607238, 0.179779, 0.499492], + [0.613617, 0.181811, 0.498536], + [0.620005, 0.183840, 0.497524], + [0.626401, 0.185867, 0.496456], + [0.632805, 0.187893, 0.495332], + [0.639216, 0.189921, 0.494150], + [0.645633, 0.191952, 0.492910], + [0.652056, 0.193986, 0.491611], + [0.658483, 0.196027, 0.490253], + [0.664915, 0.198075, 0.488836], + [0.671349, 0.200133, 0.487358], + [0.677786, 0.202203, 0.485819], + [0.684224, 0.204286, 0.484219], + [0.690661, 0.206384, 0.482558], + [0.697098, 0.208501, 0.480835], + [0.703532, 0.210638, 0.479049], + [0.709962, 0.212797, 0.477201], + [0.716387, 0.214982, 0.475290], + [0.722805, 0.217194, 0.473316], + [0.729216, 0.219437, 0.471279], + [0.735616, 0.221713, 0.469180], + [0.742004, 0.224025, 0.467018], + [0.748378, 0.226377, 0.464794], + [0.754737, 0.228772, 0.462509], + [0.761077, 0.231214, 0.460162], + [0.767398, 0.233705, 0.457755], + [0.773695, 0.236249, 0.455289], + [0.779968, 0.238851, 0.452765], + [0.786212, 0.241514, 0.450184], + [0.792427, 0.244242, 0.447543], + [0.798608, 0.247040, 0.444848], + [0.804752, 0.249911, 0.442102], + [0.810855, 0.252861, 0.439305], + [0.816914, 0.255895, 0.436461], + [0.822926, 0.259016, 0.433573], + [0.828886, 0.262229, 0.430644], + [0.834791, 0.265540, 0.427671], + [0.840636, 0.268953, 0.424666], + [0.846416, 0.272473, 0.421631], + [0.852126, 0.276106, 0.418573], + [0.857763, 0.279857, 0.415496], + [0.863320, 0.283729, 0.412403], + [0.868793, 0.287728, 0.409303], + [0.874176, 0.291859, 0.406205], + [0.879464, 0.296125, 0.403118], + [0.884651, 0.300530, 0.400047], + [0.889731, 0.305079, 0.397002], + [0.894700, 0.309773, 0.393995], + [0.899552, 0.314616, 0.391037], + [0.904281, 0.319610, 0.388137], + [0.908884, 0.324755, 0.385308], + [0.913354, 0.330052, 0.382563], + [0.917689, 0.335500, 0.379915], + [0.921884, 0.341098, 0.377376], + [0.925937, 0.346844, 0.374959], + [0.929845, 0.352734, 0.372677], + [0.933606, 0.358764, 0.370541], + [0.937221, 0.364929, 0.368567], + [0.940687, 0.371224, 0.366762], + [0.944006, 0.377643, 0.365136], + [0.947180, 0.384178, 0.363701], + [0.950210, 0.390820, 0.362468], + [0.953099, 0.397563, 0.361438], + [0.955849, 0.404400, 0.360619], + [0.958464, 0.411324, 0.360014], + [0.960949, 0.418323, 0.359630], + [0.963310, 0.425390, 0.359469], + [0.965549, 0.432519, 0.359529], + [0.967671, 0.439703, 0.359810], + [0.969680, 0.446936, 0.360311], + [0.971582, 0.454210, 0.361030], + [0.973381, 0.461520, 0.361965], + [0.975082, 0.468861, 0.363111], + [0.976690, 0.476226, 0.364466], + [0.978210, 0.483612, 0.366025], + [0.979645, 0.491014, 0.367783], + [0.981000, 0.498428, 0.369734], + [0.982279, 0.505851, 0.371874], + [0.983485, 0.513280, 0.374198], + [0.984622, 0.520713, 0.376698], + [0.985693, 0.528148, 0.379371], + [0.986700, 0.535582, 0.382210], + [0.987646, 0.543015, 0.385210], + [0.988533, 0.550446, 0.388365], + [0.989363, 0.557873, 0.391671], + [0.990138, 0.565296, 0.395122], + [0.990871, 0.572706, 0.398714], + [0.991558, 0.580107, 0.402441], + [0.992196, 0.587502, 0.406299], + [0.992785, 0.594891, 0.410283], + [0.993326, 0.602275, 0.414390], + [0.993834, 0.609644, 0.418613], + [0.994309, 0.616999, 0.422950], + [0.994738, 0.624350, 0.427397], + [0.995122, 0.631696, 0.431951], + [0.995480, 0.639027, 0.436607], + [0.995810, 0.646344, 0.441361], + [0.996096, 0.653659, 0.446213], + [0.996341, 0.660969, 0.451160], + [0.996580, 0.668256, 0.456192], + [0.996775, 0.675541, 0.461314], + [0.996925, 0.682828, 0.466526], + [0.997077, 0.690088, 0.471811], + [0.997186, 0.697349, 0.477182], + [0.997254, 0.704611, 0.482635], + [0.997325, 0.711848, 0.488154], + [0.997351, 0.719089, 0.493755], + [0.997351, 0.726324, 0.499428], + [0.997341, 0.733545, 0.505167], + [0.997285, 0.740772, 0.510983], + [0.997228, 0.747981, 0.516859], + [0.997138, 0.755190, 0.522806], + [0.997019, 0.762398, 0.528821], + [0.996898, 0.769591, 0.534892], + [0.996727, 0.776795, 0.541039], + [0.996571, 0.783977, 0.547233], + [0.996369, 0.791167, 0.553499], + [0.996162, 0.798348, 0.559820], + [0.995932, 0.805527, 0.566202], + [0.995680, 0.812706, 0.572645], + [0.995424, 0.819875, 0.579140], + [0.995131, 0.827052, 0.585701], + [0.994851, 0.834213, 0.592307], + [0.994524, 0.841387, 0.598983], + [0.994222, 0.848540, 0.605696], + [0.993866, 0.855711, 0.612482], + [0.993545, 0.862859, 0.619299], + [0.993170, 0.870024, 0.626189], + [0.992831, 0.877168, 0.633109], + [0.992440, 0.884330, 0.640099], + [0.992089, 0.891470, 0.647116], + [0.991688, 0.898627, 0.654202], + [0.991332, 0.905763, 0.661309], + [0.990930, 0.912915, 0.668481], + [0.990570, 0.920049, 0.675675], + [0.990175, 0.927196, 0.682926], + [0.989815, 0.934329, 0.690198], + [0.989434, 0.941470, 0.697519], + [0.989077, 0.948604, 0.704863], + [0.988717, 0.955742, 0.712242], + [0.988367, 0.962878, 0.719649], + [0.988033, 0.970012, 0.727077], + [0.987691, 0.977154, 0.734536], + [0.987387, 0.984288, 0.742002], + [0.987053, 0.991438, 0.749504]] + +_inferno_data = [[0.001462, 0.000466, 0.013866], + [0.002267, 0.001270, 0.018570], + [0.003299, 0.002249, 0.024239], + [0.004547, 0.003392, 0.030909], + [0.006006, 0.004692, 0.038558], + [0.007676, 0.006136, 0.046836], + [0.009561, 0.007713, 0.055143], + [0.011663, 0.009417, 0.063460], + [0.013995, 0.011225, 0.071862], + [0.016561, 0.013136, 0.080282], + [0.019373, 0.015133, 0.088767], + [0.022447, 0.017199, 0.097327], + [0.025793, 0.019331, 0.105930], + [0.029432, 0.021503, 0.114621], + [0.033385, 0.023702, 0.123397], + [0.037668, 0.025921, 0.132232], + [0.042253, 0.028139, 0.141141], + [0.046915, 0.030324, 0.150164], + [0.051644, 0.032474, 0.159254], + [0.056449, 0.034569, 0.168414], + [0.061340, 0.036590, 0.177642], + [0.066331, 0.038504, 0.186962], + [0.071429, 0.040294, 0.196354], + [0.076637, 0.041905, 0.205799], + [0.081962, 0.043328, 0.215289], + [0.087411, 0.044556, 0.224813], + [0.092990, 0.045583, 0.234358], + [0.098702, 0.046402, 0.243904], + [0.104551, 0.047008, 0.253430], + [0.110536, 0.047399, 0.262912], + [0.116656, 0.047574, 0.272321], + [0.122908, 0.047536, 0.281624], + [0.129285, 0.047293, 0.290788], + [0.135778, 0.046856, 0.299776], + [0.142378, 0.046242, 0.308553], + [0.149073, 0.045468, 0.317085], + [0.155850, 0.044559, 0.325338], + [0.162689, 0.043554, 0.333277], + [0.169575, 0.042489, 0.340874], + [0.176493, 0.041402, 0.348111], + [0.183429, 0.040329, 0.354971], + [0.190367, 0.039309, 0.361447], + [0.197297, 0.038400, 0.367535], + [0.204209, 0.037632, 0.373238], + [0.211095, 0.037030, 0.378563], + [0.217949, 0.036615, 0.383522], + [0.224763, 0.036405, 0.388129], + [0.231538, 0.036405, 0.392400], + [0.238273, 0.036621, 0.396353], + [0.244967, 0.037055, 0.400007], + [0.251620, 0.037705, 0.403378], + [0.258234, 0.038571, 0.406485], + [0.264810, 0.039647, 0.409345], + [0.271347, 0.040922, 0.411976], + [0.277850, 0.042353, 0.414392], + [0.284321, 0.043933, 0.416608], + [0.290763, 0.045644, 0.418637], + [0.297178, 0.047470, 0.420491], + [0.303568, 0.049396, 0.422182], + [0.309935, 0.051407, 0.423721], + [0.316282, 0.053490, 0.425116], + [0.322610, 0.055634, 0.426377], + [0.328921, 0.057827, 0.427511], + [0.335217, 0.060060, 0.428524], + [0.341500, 0.062325, 0.429425], + [0.347771, 0.064616, 0.430217], + [0.354032, 0.066925, 0.430906], + [0.360284, 0.069247, 0.431497], + [0.366529, 0.071579, 0.431994], + [0.372768, 0.073915, 0.432400], + [0.379001, 0.076253, 0.432719], + [0.385228, 0.078591, 0.432955], + [0.391453, 0.080927, 0.433109], + [0.397674, 0.083257, 0.433183], + [0.403894, 0.085580, 0.433179], + [0.410113, 0.087896, 0.433098], + [0.416331, 0.090203, 0.432943], + [0.422549, 0.092501, 0.432714], + [0.428768, 0.094790, 0.432412], + [0.434987, 0.097069, 0.432039], + [0.441207, 0.099338, 0.431594], + [0.447428, 0.101597, 0.431080], + [0.453651, 0.103848, 0.430498], + [0.459875, 0.106089, 0.429846], + [0.466100, 0.108322, 0.429125], + [0.472328, 0.110547, 0.428334], + [0.478558, 0.112764, 0.427475], + [0.484789, 0.114974, 0.426548], + [0.491022, 0.117179, 0.425552], + [0.497257, 0.119379, 0.424488], + [0.503493, 0.121575, 0.423356], + [0.509730, 0.123769, 0.422156], + [0.515967, 0.125960, 0.420887], + [0.522206, 0.128150, 0.419549], + [0.528444, 0.130341, 0.418142], + [0.534683, 0.132534, 0.416667], + [0.540920, 0.134729, 0.415123], + [0.547157, 0.136929, 0.413511], + [0.553392, 0.139134, 0.411829], + [0.559624, 0.141346, 0.410078], + [0.565854, 0.143567, 0.408258], + [0.572081, 0.145797, 0.406369], + [0.578304, 0.148039, 0.404411], + [0.584521, 0.150294, 0.402385], + [0.590734, 0.152563, 0.400290], + [0.596940, 0.154848, 0.398125], + [0.603139, 0.157151, 0.395891], + [0.609330, 0.159474, 0.393589], + [0.615513, 0.161817, 0.391219], + [0.621685, 0.164184, 0.388781], + [0.627847, 0.166575, 0.386276], + [0.633998, 0.168992, 0.383704], + [0.640135, 0.171438, 0.381065], + [0.646260, 0.173914, 0.378359], + [0.652369, 0.176421, 0.375586], + [0.658463, 0.178962, 0.372748], + [0.664540, 0.181539, 0.369846], + [0.670599, 0.184153, 0.366879], + [0.676638, 0.186807, 0.363849], + [0.682656, 0.189501, 0.360757], + [0.688653, 0.192239, 0.357603], + [0.694627, 0.195021, 0.354388], + [0.700576, 0.197851, 0.351113], + [0.706500, 0.200728, 0.347777], + [0.712396, 0.203656, 0.344383], + [0.718264, 0.206636, 0.340931], + [0.724103, 0.209670, 0.337424], + [0.729909, 0.212759, 0.333861], + [0.735683, 0.215906, 0.330245], + [0.741423, 0.219112, 0.326576], + [0.747127, 0.222378, 0.322856], + [0.752794, 0.225706, 0.319085], + [0.758422, 0.229097, 0.315266], + [0.764010, 0.232554, 0.311399], + [0.769556, 0.236077, 0.307485], + [0.775059, 0.239667, 0.303526], + [0.780517, 0.243327, 0.299523], + [0.785929, 0.247056, 0.295477], + [0.791293, 0.250856, 0.291390], + [0.796607, 0.254728, 0.287264], + [0.801871, 0.258674, 0.283099], + [0.807082, 0.262692, 0.278898], + [0.812239, 0.266786, 0.274661], + [0.817341, 0.270954, 0.270390], + [0.822386, 0.275197, 0.266085], + [0.827372, 0.279517, 0.261750], + [0.832299, 0.283913, 0.257383], + [0.837165, 0.288385, 0.252988], + [0.841969, 0.292933, 0.248564], + [0.846709, 0.297559, 0.244113], + [0.851384, 0.302260, 0.239636], + [0.855992, 0.307038, 0.235133], + [0.860533, 0.311892, 0.230606], + [0.865006, 0.316822, 0.226055], + [0.869409, 0.321827, 0.221482], + [0.873741, 0.326906, 0.216886], + [0.878001, 0.332060, 0.212268], + [0.882188, 0.337287, 0.207628], + [0.886302, 0.342586, 0.202968], + [0.890341, 0.347957, 0.198286], + [0.894305, 0.353399, 0.193584], + [0.898192, 0.358911, 0.188860], + [0.902003, 0.364492, 0.184116], + [0.905735, 0.370140, 0.179350], + [0.909390, 0.375856, 0.174563], + [0.912966, 0.381636, 0.169755], + [0.916462, 0.387481, 0.164924], + [0.919879, 0.393389, 0.160070], + [0.923215, 0.399359, 0.155193], + [0.926470, 0.405389, 0.150292], + [0.929644, 0.411479, 0.145367], + [0.932737, 0.417627, 0.140417], + [0.935747, 0.423831, 0.135440], + [0.938675, 0.430091, 0.130438], + [0.941521, 0.436405, 0.125409], + [0.944285, 0.442772, 0.120354], + [0.946965, 0.449191, 0.115272], + [0.949562, 0.455660, 0.110164], + [0.952075, 0.462178, 0.105031], + [0.954506, 0.468744, 0.099874], + [0.956852, 0.475356, 0.094695], + [0.959114, 0.482014, 0.089499], + [0.961293, 0.488716, 0.084289], + [0.963387, 0.495462, 0.079073], + [0.965397, 0.502249, 0.073859], + [0.967322, 0.509078, 0.068659], + [0.969163, 0.515946, 0.063488], + [0.970919, 0.522853, 0.058367], + [0.972590, 0.529798, 0.053324], + [0.974176, 0.536780, 0.048392], + [0.975677, 0.543798, 0.043618], + [0.977092, 0.550850, 0.039050], + [0.978422, 0.557937, 0.034931], + [0.979666, 0.565057, 0.031409], + [0.980824, 0.572209, 0.028508], + [0.981895, 0.579392, 0.026250], + [0.982881, 0.586606, 0.024661], + [0.983779, 0.593849, 0.023770], + [0.984591, 0.601122, 0.023606], + [0.985315, 0.608422, 0.024202], + [0.985952, 0.615750, 0.025592], + [0.986502, 0.623105, 0.027814], + [0.986964, 0.630485, 0.030908], + [0.987337, 0.637890, 0.034916], + [0.987622, 0.645320, 0.039886], + [0.987819, 0.652773, 0.045581], + [0.987926, 0.660250, 0.051750], + [0.987945, 0.667748, 0.058329], + [0.987874, 0.675267, 0.065257], + [0.987714, 0.682807, 0.072489], + [0.987464, 0.690366, 0.079990], + [0.987124, 0.697944, 0.087731], + [0.986694, 0.705540, 0.095694], + [0.986175, 0.713153, 0.103863], + [0.985566, 0.720782, 0.112229], + [0.984865, 0.728427, 0.120785], + [0.984075, 0.736087, 0.129527], + [0.983196, 0.743758, 0.138453], + [0.982228, 0.751442, 0.147565], + [0.981173, 0.759135, 0.156863], + [0.980032, 0.766837, 0.166353], + [0.978806, 0.774545, 0.176037], + [0.977497, 0.782258, 0.185923], + [0.976108, 0.789974, 0.196018], + [0.974638, 0.797692, 0.206332], + [0.973088, 0.805409, 0.216877], + [0.971468, 0.813122, 0.227658], + [0.969783, 0.820825, 0.238686], + [0.968041, 0.828515, 0.249972], + [0.966243, 0.836191, 0.261534], + [0.964394, 0.843848, 0.273391], + [0.962517, 0.851476, 0.285546], + [0.960626, 0.859069, 0.298010], + [0.958720, 0.866624, 0.310820], + [0.956834, 0.874129, 0.323974], + [0.954997, 0.881569, 0.337475], + [0.953215, 0.888942, 0.351369], + [0.951546, 0.896226, 0.365627], + [0.950018, 0.903409, 0.380271], + [0.948683, 0.910473, 0.395289], + [0.947594, 0.917399, 0.410665], + [0.946809, 0.924168, 0.426373], + [0.946392, 0.930761, 0.442367], + [0.946403, 0.937159, 0.458592], + [0.946903, 0.943348, 0.474970], + [0.947937, 0.949318, 0.491426], + [0.949545, 0.955063, 0.507860], + [0.951740, 0.960587, 0.524203], + [0.954529, 0.965896, 0.540361], + [0.957896, 0.971003, 0.556275], + [0.961812, 0.975924, 0.571925], + [0.966249, 0.980678, 0.587206], + [0.971162, 0.985282, 0.602154], + [0.976511, 0.989753, 0.616760], + [0.982257, 0.994109, 0.631017], + [0.988362, 0.998364, 0.644924]] + +_plasma_data = [[0.050383, 0.029803, 0.527975], + [0.063536, 0.028426, 0.533124], + [0.075353, 0.027206, 0.538007], + [0.086222, 0.026125, 0.542658], + [0.096379, 0.025165, 0.547103], + [0.105980, 0.024309, 0.551368], + [0.115124, 0.023556, 0.555468], + [0.123903, 0.022878, 0.559423], + [0.132381, 0.022258, 0.563250], + [0.140603, 0.021687, 0.566959], + [0.148607, 0.021154, 0.570562], + [0.156421, 0.020651, 0.574065], + [0.164070, 0.020171, 0.577478], + [0.171574, 0.019706, 0.580806], + [0.178950, 0.019252, 0.584054], + [0.186213, 0.018803, 0.587228], + [0.193374, 0.018354, 0.590330], + [0.200445, 0.017902, 0.593364], + [0.207435, 0.017442, 0.596333], + [0.214350, 0.016973, 0.599239], + [0.221197, 0.016497, 0.602083], + [0.227983, 0.016007, 0.604867], + [0.234715, 0.015502, 0.607592], + [0.241396, 0.014979, 0.610259], + [0.248032, 0.014439, 0.612868], + [0.254627, 0.013882, 0.615419], + [0.261183, 0.013308, 0.617911], + [0.267703, 0.012716, 0.620346], + [0.274191, 0.012109, 0.622722], + [0.280648, 0.011488, 0.625038], + [0.287076, 0.010855, 0.627295], + [0.293478, 0.010213, 0.629490], + [0.299855, 0.009561, 0.631624], + [0.306210, 0.008902, 0.633694], + [0.312543, 0.008239, 0.635700], + [0.318856, 0.007576, 0.637640], + [0.325150, 0.006915, 0.639512], + [0.331426, 0.006261, 0.641316], + [0.337683, 0.005618, 0.643049], + [0.343925, 0.004991, 0.644710], + [0.350150, 0.004382, 0.646298], + [0.356359, 0.003798, 0.647810], + [0.362553, 0.003243, 0.649245], + [0.368733, 0.002724, 0.650601], + [0.374897, 0.002245, 0.651876], + [0.381047, 0.001814, 0.653068], + [0.387183, 0.001434, 0.654177], + [0.393304, 0.001114, 0.655199], + [0.399411, 0.000859, 0.656133], + [0.405503, 0.000678, 0.656977], + [0.411580, 0.000577, 0.657730], + [0.417642, 0.000564, 0.658390], + [0.423689, 0.000646, 0.658956], + [0.429719, 0.000831, 0.659425], + [0.435734, 0.001127, 0.659797], + [0.441732, 0.001540, 0.660069], + [0.447714, 0.002080, 0.660240], + [0.453677, 0.002755, 0.660310], + [0.459623, 0.003574, 0.660277], + [0.465550, 0.004545, 0.660139], + [0.471457, 0.005678, 0.659897], + [0.477344, 0.006980, 0.659549], + [0.483210, 0.008460, 0.659095], + [0.489055, 0.010127, 0.658534], + [0.494877, 0.011990, 0.657865], + [0.500678, 0.014055, 0.657088], + [0.506454, 0.016333, 0.656202], + [0.512206, 0.018833, 0.655209], + [0.517933, 0.021563, 0.654109], + [0.523633, 0.024532, 0.652901], + [0.529306, 0.027747, 0.651586], + [0.534952, 0.031217, 0.650165], + [0.540570, 0.034950, 0.648640], + [0.546157, 0.038954, 0.647010], + [0.551715, 0.043136, 0.645277], + [0.557243, 0.047331, 0.643443], + [0.562738, 0.051545, 0.641509], + [0.568201, 0.055778, 0.639477], + [0.573632, 0.060028, 0.637349], + [0.579029, 0.064296, 0.635126], + [0.584391, 0.068579, 0.632812], + [0.589719, 0.072878, 0.630408], + [0.595011, 0.077190, 0.627917], + [0.600266, 0.081516, 0.625342], + [0.605485, 0.085854, 0.622686], + [0.610667, 0.090204, 0.619951], + [0.615812, 0.094564, 0.617140], + [0.620919, 0.098934, 0.614257], + [0.625987, 0.103312, 0.611305], + [0.631017, 0.107699, 0.608287], + [0.636008, 0.112092, 0.605205], + [0.640959, 0.116492, 0.602065], + [0.645872, 0.120898, 0.598867], + [0.650746, 0.125309, 0.595617], + [0.655580, 0.129725, 0.592317], + [0.660374, 0.134144, 0.588971], + [0.665129, 0.138566, 0.585582], + [0.669845, 0.142992, 0.582154], + [0.674522, 0.147419, 0.578688], + [0.679160, 0.151848, 0.575189], + [0.683758, 0.156278, 0.571660], + [0.688318, 0.160709, 0.568103], + [0.692840, 0.165141, 0.564522], + [0.697324, 0.169573, 0.560919], + [0.701769, 0.174005, 0.557296], + [0.706178, 0.178437, 0.553657], + [0.710549, 0.182868, 0.550004], + [0.714883, 0.187299, 0.546338], + [0.719181, 0.191729, 0.542663], + [0.723444, 0.196158, 0.538981], + [0.727670, 0.200586, 0.535293], + [0.731862, 0.205013, 0.531601], + [0.736019, 0.209439, 0.527908], + [0.740143, 0.213864, 0.524216], + [0.744232, 0.218288, 0.520524], + [0.748289, 0.222711, 0.516834], + [0.752312, 0.227133, 0.513149], + [0.756304, 0.231555, 0.509468], + [0.760264, 0.235976, 0.505794], + [0.764193, 0.240396, 0.502126], + [0.768090, 0.244817, 0.498465], + [0.771958, 0.249237, 0.494813], + [0.775796, 0.253658, 0.491171], + [0.779604, 0.258078, 0.487539], + [0.783383, 0.262500, 0.483918], + [0.787133, 0.266922, 0.480307], + [0.790855, 0.271345, 0.476706], + [0.794549, 0.275770, 0.473117], + [0.798216, 0.280197, 0.469538], + [0.801855, 0.284626, 0.465971], + [0.805467, 0.289057, 0.462415], + [0.809052, 0.293491, 0.458870], + [0.812612, 0.297928, 0.455338], + [0.816144, 0.302368, 0.451816], + [0.819651, 0.306812, 0.448306], + [0.823132, 0.311261, 0.444806], + [0.826588, 0.315714, 0.441316], + [0.830018, 0.320172, 0.437836], + [0.833422, 0.324635, 0.434366], + [0.836801, 0.329105, 0.430905], + [0.840155, 0.333580, 0.427455], + [0.843484, 0.338062, 0.424013], + [0.846788, 0.342551, 0.420579], + [0.850066, 0.347048, 0.417153], + [0.853319, 0.351553, 0.413734], + [0.856547, 0.356066, 0.410322], + [0.859750, 0.360588, 0.406917], + [0.862927, 0.365119, 0.403519], + [0.866078, 0.369660, 0.400126], + [0.869203, 0.374212, 0.396738], + [0.872303, 0.378774, 0.393355], + [0.875376, 0.383347, 0.389976], + [0.878423, 0.387932, 0.386600], + [0.881443, 0.392529, 0.383229], + [0.884436, 0.397139, 0.379860], + [0.887402, 0.401762, 0.376494], + [0.890340, 0.406398, 0.373130], + [0.893250, 0.411048, 0.369768], + [0.896131, 0.415712, 0.366407], + [0.898984, 0.420392, 0.363047], + [0.901807, 0.425087, 0.359688], + [0.904601, 0.429797, 0.356329], + [0.907365, 0.434524, 0.352970], + [0.910098, 0.439268, 0.349610], + [0.912800, 0.444029, 0.346251], + [0.915471, 0.448807, 0.342890], + [0.918109, 0.453603, 0.339529], + [0.920714, 0.458417, 0.336166], + [0.923287, 0.463251, 0.332801], + [0.925825, 0.468103, 0.329435], + [0.928329, 0.472975, 0.326067], + [0.930798, 0.477867, 0.322697], + [0.933232, 0.482780, 0.319325], + [0.935630, 0.487712, 0.315952], + [0.937990, 0.492667, 0.312575], + [0.940313, 0.497642, 0.309197], + [0.942598, 0.502639, 0.305816], + [0.944844, 0.507658, 0.302433], + [0.947051, 0.512699, 0.299049], + [0.949217, 0.517763, 0.295662], + [0.951344, 0.522850, 0.292275], + [0.953428, 0.527960, 0.288883], + [0.955470, 0.533093, 0.285490], + [0.957469, 0.538250, 0.282096], + [0.959424, 0.543431, 0.278701], + [0.961336, 0.548636, 0.275305], + [0.963203, 0.553865, 0.271909], + [0.965024, 0.559118, 0.268513], + [0.966798, 0.564396, 0.265118], + [0.968526, 0.569700, 0.261721], + [0.970205, 0.575028, 0.258325], + [0.971835, 0.580382, 0.254931], + [0.973416, 0.585761, 0.251540], + [0.974947, 0.591165, 0.248151], + [0.976428, 0.596595, 0.244767], + [0.977856, 0.602051, 0.241387], + [0.979233, 0.607532, 0.238013], + [0.980556, 0.613039, 0.234646], + [0.981826, 0.618572, 0.231287], + [0.983041, 0.624131, 0.227937], + [0.984199, 0.629718, 0.224595], + [0.985301, 0.635330, 0.221265], + [0.986345, 0.640969, 0.217948], + [0.987332, 0.646633, 0.214648], + [0.988260, 0.652325, 0.211364], + [0.989128, 0.658043, 0.208100], + [0.989935, 0.663787, 0.204859], + [0.990681, 0.669558, 0.201642], + [0.991365, 0.675355, 0.198453], + [0.991985, 0.681179, 0.195295], + [0.992541, 0.687030, 0.192170], + [0.993032, 0.692907, 0.189084], + [0.993456, 0.698810, 0.186041], + [0.993814, 0.704741, 0.183043], + [0.994103, 0.710698, 0.180097], + [0.994324, 0.716681, 0.177208], + [0.994474, 0.722691, 0.174381], + [0.994553, 0.728728, 0.171622], + [0.994561, 0.734791, 0.168938], + [0.994495, 0.740880, 0.166335], + [0.994355, 0.746995, 0.163821], + [0.994141, 0.753137, 0.161404], + [0.993851, 0.759304, 0.159092], + [0.993482, 0.765499, 0.156891], + [0.993033, 0.771720, 0.154808], + [0.992505, 0.777967, 0.152855], + [0.991897, 0.784239, 0.151042], + [0.991209, 0.790537, 0.149377], + [0.990439, 0.796859, 0.147870], + [0.989587, 0.803205, 0.146529], + [0.988648, 0.809579, 0.145357], + [0.987621, 0.815978, 0.144363], + [0.986509, 0.822401, 0.143557], + [0.985314, 0.828846, 0.142945], + [0.984031, 0.835315, 0.142528], + [0.982653, 0.841812, 0.142303], + [0.981190, 0.848329, 0.142279], + [0.979644, 0.854866, 0.142453], + [0.977995, 0.861432, 0.142808], + [0.976265, 0.868016, 0.143351], + [0.974443, 0.874622, 0.144061], + [0.972530, 0.881250, 0.144923], + [0.970533, 0.887896, 0.145919], + [0.968443, 0.894564, 0.147014], + [0.966271, 0.901249, 0.148180], + [0.964021, 0.907950, 0.149370], + [0.961681, 0.914672, 0.150520], + [0.959276, 0.921407, 0.151566], + [0.956808, 0.928152, 0.152409], + [0.954287, 0.934908, 0.152921], + [0.951726, 0.941671, 0.152925], + [0.949151, 0.948435, 0.152178], + [0.946602, 0.955190, 0.150328], + [0.944152, 0.961916, 0.146861], + [0.941896, 0.968590, 0.140956], + [0.940015, 0.975158, 0.131326]] + +_viridis_data = [[0.267004, 0.004874, 0.329415], + [0.268510, 0.009605, 0.335427], + [0.269944, 0.014625, 0.341379], + [0.271305, 0.019942, 0.347269], + [0.272594, 0.025563, 0.353093], + [0.273809, 0.031497, 0.358853], + [0.274952, 0.037752, 0.364543], + [0.276022, 0.044167, 0.370164], + [0.277018, 0.050344, 0.375715], + [0.277941, 0.056324, 0.381191], + [0.278791, 0.062145, 0.386592], + [0.279566, 0.067836, 0.391917], + [0.280267, 0.073417, 0.397163], + [0.280894, 0.078907, 0.402329], + [0.281446, 0.084320, 0.407414], + [0.281924, 0.089666, 0.412415], + [0.282327, 0.094955, 0.417331], + [0.282656, 0.100196, 0.422160], + [0.282910, 0.105393, 0.426902], + [0.283091, 0.110553, 0.431554], + [0.283197, 0.115680, 0.436115], + [0.283229, 0.120777, 0.440584], + [0.283187, 0.125848, 0.444960], + [0.283072, 0.130895, 0.449241], + [0.282884, 0.135920, 0.453427], + [0.282623, 0.140926, 0.457517], + [0.282290, 0.145912, 0.461510], + [0.281887, 0.150881, 0.465405], + [0.281412, 0.155834, 0.469201], + [0.280868, 0.160771, 0.472899], + [0.280255, 0.165693, 0.476498], + [0.279574, 0.170599, 0.479997], + [0.278826, 0.175490, 0.483397], + [0.278012, 0.180367, 0.486697], + [0.277134, 0.185228, 0.489898], + [0.276194, 0.190074, 0.493001], + [0.275191, 0.194905, 0.496005], + [0.274128, 0.199721, 0.498911], + [0.273006, 0.204520, 0.501721], + [0.271828, 0.209303, 0.504434], + [0.270595, 0.214069, 0.507052], + [0.269308, 0.218818, 0.509577], + [0.267968, 0.223549, 0.512008], + [0.266580, 0.228262, 0.514349], + [0.265145, 0.232956, 0.516599], + [0.263663, 0.237631, 0.518762], + [0.262138, 0.242286, 0.520837], + [0.260571, 0.246922, 0.522828], + [0.258965, 0.251537, 0.524736], + [0.257322, 0.256130, 0.526563], + [0.255645, 0.260703, 0.528312], + [0.253935, 0.265254, 0.529983], + [0.252194, 0.269783, 0.531579], + [0.250425, 0.274290, 0.533103], + [0.248629, 0.278775, 0.534556], + [0.246811, 0.283237, 0.535941], + [0.244972, 0.287675, 0.537260], + [0.243113, 0.292092, 0.538516], + [0.241237, 0.296485, 0.539709], + [0.239346, 0.300855, 0.540844], + [0.237441, 0.305202, 0.541921], + [0.235526, 0.309527, 0.542944], + [0.233603, 0.313828, 0.543914], + [0.231674, 0.318106, 0.544834], + [0.229739, 0.322361, 0.545706], + [0.227802, 0.326594, 0.546532], + [0.225863, 0.330805, 0.547314], + [0.223925, 0.334994, 0.548053], + [0.221989, 0.339161, 0.548752], + [0.220057, 0.343307, 0.549413], + [0.218130, 0.347432, 0.550038], + [0.216210, 0.351535, 0.550627], + [0.214298, 0.355619, 0.551184], + [0.212395, 0.359683, 0.551710], + [0.210503, 0.363727, 0.552206], + [0.208623, 0.367752, 0.552675], + [0.206756, 0.371758, 0.553117], + [0.204903, 0.375746, 0.553533], + [0.203063, 0.379716, 0.553925], + [0.201239, 0.383670, 0.554294], + [0.199430, 0.387607, 0.554642], + [0.197636, 0.391528, 0.554969], + [0.195860, 0.395433, 0.555276], + [0.194100, 0.399323, 0.555565], + [0.192357, 0.403199, 0.555836], + [0.190631, 0.407061, 0.556089], + [0.188923, 0.410910, 0.556326], + [0.187231, 0.414746, 0.556547], + [0.185556, 0.418570, 0.556753], + [0.183898, 0.422383, 0.556944], + [0.182256, 0.426184, 0.557120], + [0.180629, 0.429975, 0.557282], + [0.179019, 0.433756, 0.557430], + [0.177423, 0.437527, 0.557565], + [0.175841, 0.441290, 0.557685], + [0.174274, 0.445044, 0.557792], + [0.172719, 0.448791, 0.557885], + [0.171176, 0.452530, 0.557965], + [0.169646, 0.456262, 0.558030], + [0.168126, 0.459988, 0.558082], + [0.166617, 0.463708, 0.558119], + [0.165117, 0.467423, 0.558141], + [0.163625, 0.471133, 0.558148], + [0.162142, 0.474838, 0.558140], + [0.160665, 0.478540, 0.558115], + [0.159194, 0.482237, 0.558073], + [0.157729, 0.485932, 0.558013], + [0.156270, 0.489624, 0.557936], + [0.154815, 0.493313, 0.557840], + [0.153364, 0.497000, 0.557724], + [0.151918, 0.500685, 0.557587], + [0.150476, 0.504369, 0.557430], + [0.149039, 0.508051, 0.557250], + [0.147607, 0.511733, 0.557049], + [0.146180, 0.515413, 0.556823], + [0.144759, 0.519093, 0.556572], + [0.143343, 0.522773, 0.556295], + [0.141935, 0.526453, 0.555991], + [0.140536, 0.530132, 0.555659], + [0.139147, 0.533812, 0.555298], + [0.137770, 0.537492, 0.554906], + [0.136408, 0.541173, 0.554483], + [0.135066, 0.544853, 0.554029], + [0.133743, 0.548535, 0.553541], + [0.132444, 0.552216, 0.553018], + [0.131172, 0.555899, 0.552459], + [0.129933, 0.559582, 0.551864], + [0.128729, 0.563265, 0.551229], + [0.127568, 0.566949, 0.550556], + [0.126453, 0.570633, 0.549841], + [0.125394, 0.574318, 0.549086], + [0.124395, 0.578002, 0.548287], + [0.123463, 0.581687, 0.547445], + [0.122606, 0.585371, 0.546557], + [0.121831, 0.589055, 0.545623], + [0.121148, 0.592739, 0.544641], + [0.120565, 0.596422, 0.543611], + [0.120092, 0.600104, 0.542530], + [0.119738, 0.603785, 0.541400], + [0.119512, 0.607464, 0.540218], + [0.119423, 0.611141, 0.538982], + [0.119483, 0.614817, 0.537692], + [0.119699, 0.618490, 0.536347], + [0.120081, 0.622161, 0.534946], + [0.120638, 0.625828, 0.533488], + [0.121380, 0.629492, 0.531973], + [0.122312, 0.633153, 0.530398], + [0.123444, 0.636809, 0.528763], + [0.124780, 0.640461, 0.527068], + [0.126326, 0.644107, 0.525311], + [0.128087, 0.647749, 0.523491], + [0.130067, 0.651384, 0.521608], + [0.132268, 0.655014, 0.519661], + [0.134692, 0.658636, 0.517649], + [0.137339, 0.662252, 0.515571], + [0.140210, 0.665859, 0.513427], + [0.143303, 0.669459, 0.511215], + [0.146616, 0.673050, 0.508936], + [0.150148, 0.676631, 0.506589], + [0.153894, 0.680203, 0.504172], + [0.157851, 0.683765, 0.501686], + [0.162016, 0.687316, 0.499129], + [0.166383, 0.690856, 0.496502], + [0.170948, 0.694384, 0.493803], + [0.175707, 0.697900, 0.491033], + [0.180653, 0.701402, 0.488189], + [0.185783, 0.704891, 0.485273], + [0.191090, 0.708366, 0.482284], + [0.196571, 0.711827, 0.479221], + [0.202219, 0.715272, 0.476084], + [0.208030, 0.718701, 0.472873], + [0.214000, 0.722114, 0.469588], + [0.220124, 0.725509, 0.466226], + [0.226397, 0.728888, 0.462789], + [0.232815, 0.732247, 0.459277], + [0.239374, 0.735588, 0.455688], + [0.246070, 0.738910, 0.452024], + [0.252899, 0.742211, 0.448284], + [0.259857, 0.745492, 0.444467], + [0.266941, 0.748751, 0.440573], + [0.274149, 0.751988, 0.436601], + [0.281477, 0.755203, 0.432552], + [0.288921, 0.758394, 0.428426], + [0.296479, 0.761561, 0.424223], + [0.304148, 0.764704, 0.419943], + [0.311925, 0.767822, 0.415586], + [0.319809, 0.770914, 0.411152], + [0.327796, 0.773980, 0.406640], + [0.335885, 0.777018, 0.402049], + [0.344074, 0.780029, 0.397381], + [0.352360, 0.783011, 0.392636], + [0.360741, 0.785964, 0.387814], + [0.369214, 0.788888, 0.382914], + [0.377779, 0.791781, 0.377939], + [0.386433, 0.794644, 0.372886], + [0.395174, 0.797475, 0.367757], + [0.404001, 0.800275, 0.362552], + [0.412913, 0.803041, 0.357269], + [0.421908, 0.805774, 0.351910], + [0.430983, 0.808473, 0.346476], + [0.440137, 0.811138, 0.340967], + [0.449368, 0.813768, 0.335384], + [0.458674, 0.816363, 0.329727], + [0.468053, 0.818921, 0.323998], + [0.477504, 0.821444, 0.318195], + [0.487026, 0.823929, 0.312321], + [0.496615, 0.826376, 0.306377], + [0.506271, 0.828786, 0.300362], + [0.515992, 0.831158, 0.294279], + [0.525776, 0.833491, 0.288127], + [0.535621, 0.835785, 0.281908], + [0.545524, 0.838039, 0.275626], + [0.555484, 0.840254, 0.269281], + [0.565498, 0.842430, 0.262877], + [0.575563, 0.844566, 0.256415], + [0.585678, 0.846661, 0.249897], + [0.595839, 0.848717, 0.243329], + [0.606045, 0.850733, 0.236712], + [0.616293, 0.852709, 0.230052], + [0.626579, 0.854645, 0.223353], + [0.636902, 0.856542, 0.216620], + [0.647257, 0.858400, 0.209861], + [0.657642, 0.860219, 0.203082], + [0.668054, 0.861999, 0.196293], + [0.678489, 0.863742, 0.189503], + [0.688944, 0.865448, 0.182725], + [0.699415, 0.867117, 0.175971], + [0.709898, 0.868751, 0.169257], + [0.720391, 0.870350, 0.162603], + [0.730889, 0.871916, 0.156029], + [0.741388, 0.873449, 0.149561], + [0.751884, 0.874951, 0.143228], + [0.762373, 0.876424, 0.137064], + [0.772852, 0.877868, 0.131109], + [0.783315, 0.879285, 0.125405], + [0.793760, 0.880678, 0.120005], + [0.804182, 0.882046, 0.114965], + [0.814576, 0.883393, 0.110347], + [0.824940, 0.884720, 0.106217], + [0.835270, 0.886029, 0.102646], + [0.845561, 0.887322, 0.099702], + [0.855810, 0.888601, 0.097452], + [0.866013, 0.889868, 0.095953], + [0.876168, 0.891125, 0.095250], + [0.886271, 0.892374, 0.095374], + [0.896320, 0.893616, 0.096335], + [0.906311, 0.894855, 0.098125], + [0.916242, 0.896091, 0.100717], + [0.926106, 0.897330, 0.104071], + [0.935904, 0.898570, 0.108131], + [0.945636, 0.899815, 0.112838], + [0.955300, 0.901065, 0.118128], + [0.964894, 0.902323, 0.123941], + [0.974417, 0.903590, 0.130215], + [0.983868, 0.904867, 0.136897], + [0.993248, 0.906157, 0.143936]] + +_cividis_data = [[0.000000, 0.135112, 0.304751], + [0.000000, 0.138068, 0.311105], + [0.000000, 0.141013, 0.317579], + [0.000000, 0.143951, 0.323982], + [0.000000, 0.146877, 0.330479], + [0.000000, 0.149791, 0.337065], + [0.000000, 0.152673, 0.343704], + [0.000000, 0.155377, 0.350500], + [0.000000, 0.157932, 0.357521], + [0.000000, 0.160495, 0.364534], + [0.000000, 0.163058, 0.371608], + [0.000000, 0.165621, 0.378769], + [0.000000, 0.168204, 0.385902], + [0.000000, 0.170800, 0.393100], + [0.000000, 0.173420, 0.400353], + [0.000000, 0.176082, 0.407577], + [0.000000, 0.178802, 0.414764], + [0.000000, 0.181610, 0.421859], + [0.000000, 0.184550, 0.428802], + [0.000000, 0.186915, 0.435532], + [0.000000, 0.188769, 0.439563], + [0.000000, 0.190950, 0.441085], + [0.000000, 0.193366, 0.441561], + [0.003602, 0.195911, 0.441564], + [0.017852, 0.198528, 0.441248], + [0.032110, 0.201199, 0.440785], + [0.046205, 0.203903, 0.440196], + [0.058378, 0.206629, 0.439531], + [0.068968, 0.209372, 0.438863], + [0.078624, 0.212122, 0.438105], + [0.087465, 0.214879, 0.437342], + [0.095645, 0.217643, 0.436593], + [0.103401, 0.220406, 0.435790], + [0.110658, 0.223170, 0.435067], + [0.117612, 0.225935, 0.434308], + [0.124291, 0.228697, 0.433547], + [0.130669, 0.231458, 0.432840], + [0.136830, 0.234216, 0.432148], + [0.142852, 0.236972, 0.431404], + [0.148638, 0.239724, 0.430752], + [0.154261, 0.242475, 0.430120], + [0.159733, 0.245221, 0.429528], + [0.165113, 0.247965, 0.428908], + [0.170362, 0.250707, 0.428325], + [0.175490, 0.253444, 0.427790], + [0.180503, 0.256180, 0.427299], + [0.185453, 0.258914, 0.426788], + [0.190303, 0.261644, 0.426329], + [0.195057, 0.264372, 0.425924], + [0.199764, 0.267099, 0.425497], + [0.204385, 0.269823, 0.425126], + [0.208926, 0.272546, 0.424809], + [0.213431, 0.275266, 0.424480], + [0.217863, 0.277985, 0.424206], + [0.222264, 0.280702, 0.423914], + [0.226598, 0.283419, 0.423678], + [0.230871, 0.286134, 0.423498], + [0.235120, 0.288848, 0.423304], + [0.239312, 0.291562, 0.423167], + [0.243485, 0.294274, 0.423014], + [0.247605, 0.296986, 0.422917], + [0.251675, 0.299698, 0.422873], + [0.255731, 0.302409, 0.422814], + [0.259740, 0.305120, 0.422810], + [0.263738, 0.307831, 0.422789], + [0.267693, 0.310542, 0.422821], + [0.271639, 0.313253, 0.422837], + [0.275513, 0.315965, 0.422979], + [0.279411, 0.318677, 0.423031], + [0.283240, 0.321390, 0.423211], + [0.287065, 0.324103, 0.423373], + [0.290884, 0.326816, 0.423517], + [0.294669, 0.329531, 0.423716], + [0.298421, 0.332247, 0.423973], + [0.302169, 0.334963, 0.424213], + [0.305886, 0.337681, 0.424512], + [0.309601, 0.340399, 0.424790], + [0.313287, 0.343120, 0.425120], + [0.316941, 0.345842, 0.425512], + [0.320595, 0.348565, 0.425889], + [0.324250, 0.351289, 0.426250], + [0.327875, 0.354016, 0.426670], + [0.331474, 0.356744, 0.427144], + [0.335073, 0.359474, 0.427605], + [0.338673, 0.362206, 0.428053], + [0.342246, 0.364939, 0.428559], + [0.345793, 0.367676, 0.429127], + [0.349341, 0.370414, 0.429685], + [0.352892, 0.373153, 0.430226], + [0.356418, 0.375896, 0.430823], + [0.359916, 0.378641, 0.431501], + [0.363446, 0.381388, 0.432075], + [0.366923, 0.384139, 0.432796], + [0.370430, 0.386890, 0.433428], + [0.373884, 0.389646, 0.434209], + [0.377371, 0.392404, 0.434890], + [0.380830, 0.395164, 0.435653], + [0.384268, 0.397928, 0.436475], + [0.387705, 0.400694, 0.437305], + [0.391151, 0.403464, 0.438096], + [0.394568, 0.406236, 0.438986], + [0.397991, 0.409011, 0.439848], + [0.401418, 0.411790, 0.440708], + [0.404820, 0.414572, 0.441642], + [0.408226, 0.417357, 0.442570], + [0.411607, 0.420145, 0.443577], + [0.414992, 0.422937, 0.444578], + [0.418383, 0.425733, 0.445560], + [0.421748, 0.428531, 0.446640], + [0.425120, 0.431334, 0.447692], + [0.428462, 0.434140, 0.448864], + [0.431817, 0.436950, 0.449982], + [0.435168, 0.439763, 0.451134], + [0.438504, 0.442580, 0.452341], + [0.441810, 0.445402, 0.453659], + [0.445148, 0.448226, 0.454885], + [0.448447, 0.451053, 0.456264], + [0.451759, 0.453887, 0.457582], + [0.455072, 0.456718, 0.458976], + [0.458366, 0.459552, 0.460457], + [0.461616, 0.462405, 0.461969], + [0.464947, 0.465241, 0.463395], + [0.468254, 0.468083, 0.464908], + [0.471501, 0.470960, 0.466357], + [0.474812, 0.473832, 0.467681], + [0.478186, 0.476699, 0.468845], + [0.481622, 0.479573, 0.469767], + [0.485141, 0.482451, 0.470384], + [0.488697, 0.485318, 0.471008], + [0.492278, 0.488198, 0.471453], + [0.495913, 0.491076, 0.471751], + [0.499552, 0.493960, 0.472032], + [0.503185, 0.496851, 0.472305], + [0.506866, 0.499743, 0.472432], + [0.510540, 0.502643, 0.472550], + [0.514226, 0.505546, 0.472640], + [0.517920, 0.508454, 0.472707], + [0.521643, 0.511367, 0.472639], + [0.525348, 0.514285, 0.472660], + [0.529086, 0.517207, 0.472543], + [0.532829, 0.520135, 0.472401], + [0.536553, 0.523067, 0.472352], + [0.540307, 0.526005, 0.472163], + [0.544069, 0.528948, 0.471947], + [0.547840, 0.531895, 0.471704], + [0.551612, 0.534849, 0.471439], + [0.555393, 0.537807, 0.471147], + [0.559181, 0.540771, 0.470829], + [0.562972, 0.543741, 0.470488], + [0.566802, 0.546715, 0.469988], + [0.570607, 0.549695, 0.469593], + [0.574417, 0.552682, 0.469172], + [0.578236, 0.555673, 0.468724], + [0.582087, 0.558670, 0.468118], + [0.585916, 0.561674, 0.467618], + [0.589753, 0.564682, 0.467090], + [0.593622, 0.567697, 0.466401], + [0.597469, 0.570718, 0.465821], + [0.601354, 0.573743, 0.465074], + [0.605211, 0.576777, 0.464441], + [0.609105, 0.579816, 0.463638], + [0.612977, 0.582861, 0.462950], + [0.616852, 0.585913, 0.462237], + [0.620765, 0.588970, 0.461351], + [0.624654, 0.592034, 0.460583], + [0.628576, 0.595104, 0.459641], + [0.632506, 0.598180, 0.458668], + [0.636412, 0.601264, 0.457818], + [0.640352, 0.604354, 0.456791], + [0.644270, 0.607450, 0.455886], + [0.648222, 0.610553, 0.454801], + [0.652178, 0.613664, 0.453689], + [0.656114, 0.616780, 0.452702], + [0.660082, 0.619904, 0.451534], + [0.664055, 0.623034, 0.450338], + [0.668008, 0.626171, 0.449270], + [0.671991, 0.629316, 0.448018], + [0.675981, 0.632468, 0.446736], + [0.679979, 0.635626, 0.445424], + [0.683950, 0.638793, 0.444251], + [0.687957, 0.641966, 0.442886], + [0.691971, 0.645145, 0.441491], + [0.695985, 0.648334, 0.440072], + [0.700008, 0.651529, 0.438624], + [0.704037, 0.654731, 0.437147], + [0.708067, 0.657942, 0.435647], + [0.712105, 0.661160, 0.434117], + [0.716177, 0.664384, 0.432386], + [0.720222, 0.667618, 0.430805], + [0.724274, 0.670859, 0.429194], + [0.728334, 0.674107, 0.427554], + [0.732422, 0.677364, 0.425717], + [0.736488, 0.680629, 0.424028], + [0.740589, 0.683900, 0.422131], + [0.744664, 0.687181, 0.420393], + [0.748772, 0.690470, 0.418448], + [0.752886, 0.693766, 0.416472], + [0.756975, 0.697071, 0.414659], + [0.761096, 0.700384, 0.412638], + [0.765223, 0.703705, 0.410587], + [0.769353, 0.707035, 0.408516], + [0.773486, 0.710373, 0.406422], + [0.777651, 0.713719, 0.404112], + [0.781795, 0.717074, 0.401966], + [0.785965, 0.720438, 0.399613], + [0.790116, 0.723810, 0.397423], + [0.794298, 0.727190, 0.395016], + [0.798480, 0.730580, 0.392597], + [0.802667, 0.733978, 0.390153], + [0.806859, 0.737385, 0.387684], + [0.811054, 0.740801, 0.385198], + [0.815274, 0.744226, 0.382504], + [0.819499, 0.747659, 0.379785], + [0.823729, 0.751101, 0.377043], + [0.827959, 0.754553, 0.374292], + [0.832192, 0.758014, 0.371529], + [0.836429, 0.761483, 0.368747], + [0.840693, 0.764962, 0.365746], + [0.844957, 0.768450, 0.362741], + [0.849223, 0.771947, 0.359729], + [0.853515, 0.775454, 0.356500], + [0.857809, 0.778969, 0.353259], + [0.862105, 0.782494, 0.350011], + [0.866421, 0.786028, 0.346571], + [0.870717, 0.789572, 0.343333], + [0.875057, 0.793125, 0.339685], + [0.879378, 0.796687, 0.336241], + [0.883720, 0.800258, 0.332599], + [0.888081, 0.803839, 0.328770], + [0.892440, 0.807430, 0.324968], + [0.896818, 0.811030, 0.320982], + [0.901195, 0.814639, 0.317021], + [0.905589, 0.818257, 0.312889], + [0.910000, 0.821885, 0.308594], + [0.914407, 0.825522, 0.304348], + [0.918828, 0.829168, 0.299960], + [0.923279, 0.832822, 0.295244], + [0.927724, 0.836486, 0.290611], + [0.932180, 0.840159, 0.285880], + [0.936660, 0.843841, 0.280876], + [0.941147, 0.847530, 0.275815], + [0.945654, 0.851228, 0.270532], + [0.950178, 0.854933, 0.265085], + [0.954725, 0.858646, 0.259365], + [0.959284, 0.862365, 0.253563], + [0.963872, 0.866089, 0.247445], + [0.968469, 0.869819, 0.241310], + [0.973114, 0.873550, 0.234677], + [0.977780, 0.877281, 0.227954], + [0.982497, 0.881008, 0.220878], + [0.987293, 0.884718, 0.213336], + [0.992218, 0.888385, 0.205468], + [0.994847, 0.892954, 0.203445], + [0.995249, 0.898384, 0.207561], + [0.995503, 0.903866, 0.212370], + [0.995737, 0.909344, 0.217772]] + +_twilight_data = [ + [0.88575015840754434, 0.85000924943067835, 0.8879736506427196], + [0.88378520195539056, 0.85072940540310626, 0.88723222096949894], + [0.88172231059285788, 0.85127594077653468, 0.88638056925514819], + [0.8795410528270573, 0.85165675407495722, 0.8854143767924102], + [0.87724880858965482, 0.85187028338870274, 0.88434120381311432], + [0.87485347508575972, 0.85191526123023187, 0.88316926967613829], + [0.87233134085124076, 0.85180165478080894, 0.88189704355001619], + [0.86970474853509816, 0.85152403004797894, 0.88053883390003362], + [0.86696015505333579, 0.8510896085314068, 0.87909766977173343], + [0.86408985081463996, 0.85050391167507788, 0.87757925784892632], + [0.86110245436899846, 0.84976754857001258, 0.87599242923439569], + [0.85798259245670372, 0.84888934810281835, 0.87434038553446281], + [0.85472593189256985, 0.84787488124672816, 0.8726282980930582], + [0.85133714570857189, 0.84672735796116472, 0.87086081657350445], + [0.84780710702577922, 0.8454546229209523, 0.86904036783694438], + [0.8441261828674842, 0.84406482711037389, 0.86716973322690072], + [0.84030420805957784, 0.8425605950855084, 0.865250882410458], + [0.83634031809191178, 0.84094796518951942, 0.86328528001070159], + [0.83222705712934408, 0.83923490627754482, 0.86127563500427884], + [0.82796894316013536, 0.83742600751395202, 0.85922399451306786], + [0.82357429680252847, 0.83552487764795436, 0.85713191328514948], + [0.81904654677937527, 0.8335364929949034, 0.85500206287010105], + [0.81438982121143089, 0.83146558694197847, 0.85283759062147024], + [0.8095999819094809, 0.82931896673505456, 0.85064441601050367], + [0.80469164429814577, 0.82709838780560663, 0.84842449296974021], + [0.79967075421267997, 0.82480781812080928, 0.84618210029578533], + [0.79454305089231114, 0.82245116226304615, 0.84392184786827984], + [0.78931445564608915, 0.82003213188702007, 0.8416486380471222], + [0.78399101042764918, 0.81755426400533426, 0.83936747464036732], + [0.77857892008227592, 0.81502089378742548, 0.8370834463093898], + [0.77308416590170936, 0.81243524735466011, 0.83480172950579679], + [0.76751108504417864, 0.8098007598713145, 0.83252816638059668], + [0.76186907937980286, 0.80711949387647486, 0.830266486168872], + [0.75616443584381976, 0.80439408733477935, 0.82802138994719998], + [0.75040346765406696, 0.80162699008965321, 0.82579737851082424], + [0.74459247771890169, 0.79882047719583249, 0.82359867586156521], + [0.73873771700494939, 0.79597665735031009, 0.82142922780433014], + [0.73284543645523459, 0.79309746468844067, 0.81929263384230377], + [0.72692177512829703, 0.7901846863592763, 0.81719217466726379], + [0.72097280665536778, 0.78723995923452639, 0.81513073920879264], + [0.71500403076252128, 0.78426487091581187, 0.81311116559949914], + [0.70902078134539304, 0.78126088716070907, 0.81113591855117928], + [0.7030297722540817, 0.77822904973358131, 0.80920618848056969], + [0.6970365443886174, 0.77517050008066057, 0.80732335380063447], + [0.69104641009309098, 0.77208629460678091, 0.80548841690679074], + [0.68506446154395928, 0.7689774029354699, 0.80370206267176914], + [0.67909554499882152, 0.76584472131395898, 0.8019646617300199], + [0.67314422559426212, 0.76268908733890484, 0.80027628545809526], + [0.66721479803752815, 0.7595112803730375, 0.79863674654537764], + [0.6613112930078745, 0.75631202708719025, 0.7970456043491897], + [0.65543692326454717, 0.75309208756768431, 0.79550271129031047], + [0.64959573004253479, 0.74985201221941766, 0.79400674021499107], + [0.6437910831099849, 0.7465923800833657, 0.79255653201306053], + [0.63802586828545982, 0.74331376714033193, 0.79115100459573173], + [0.6323027138710603, 0.74001672160131404, 0.78978892762640429], + [0.62662402022604591, 0.73670175403699445, 0.78846901316334561], + [0.62099193064817548, 0.73336934798923203, 0.78718994624696581], + [0.61540846411770478, 0.73001995232739691, 0.78595022706750484], + [0.60987543176093062, 0.72665398759758293, 0.78474835732694714], + [0.60439434200274855, 0.7232718614323369, 0.78358295593535587], + [0.5989665814482068, 0.71987394892246725, 0.78245259899346642], + [0.59359335696837223, 0.7164606049658685, 0.78135588237640097], + [0.58827579780555495, 0.71303214646458135, 0.78029141405636515], + [0.58301487036932409, 0.70958887676997473, 0.77925781820476592], + [0.5778116438998202, 0.70613106157153982, 0.77825345121025524], + [0.5726668948158774, 0.7026589535425779, 0.77727702680911992], + [0.56758117853861967, 0.69917279302646274, 0.77632748534275298], + [0.56255515357219343, 0.69567278381629649, 0.77540359142309845], + [0.55758940419605174, 0.69215911458254054, 0.7745041337932782], + [0.55268450589347129, 0.68863194515166382, 0.7736279426902245], + [0.54784098153018634, 0.68509142218509878, 0.77277386473440868], + [0.54305932424018233, 0.68153767253065878, 0.77194079697835083], + [0.53834015575176275, 0.67797081129095405, 0.77112734439057717], + [0.53368389147728401, 0.67439093705212727, 0.7703325054879735], + [0.529090861832473, 0.67079812302806219, 0.76955552292313134], + [0.52456151470593582, 0.66719242996142225, 0.76879541714230948], + [0.52009627392235558, 0.66357391434030388, 0.76805119403344102], + [0.5156955988596057, 0.65994260812897998, 0.76732191489596169], + [0.51135992541601927, 0.65629853981831865, 0.76660663780645333], + [0.50708969576451657, 0.65264172403146448, 0.76590445660835849], + [0.5028853540415561, 0.64897216734095264, 0.76521446718174913], + [0.49874733661356069, 0.6452898684900934, 0.76453578734180083], + [0.4946761847863938, 0.64159484119504429, 0.76386719002130909], + [0.49067224938561221, 0.63788704858847078, 0.76320812763163837], + [0.4867359599430568, 0.63416646251100506, 0.76255780085924041], + [0.4828677867260272, 0.6304330455306234, 0.76191537149895305], + [0.47906816236197386, 0.62668676251860134, 0.76128000375662419], + [0.47533752394906287, 0.62292757283835809, 0.76065085571817748], + [0.47167629518877091, 0.61915543242884641, 0.76002709227883047], + [0.46808490970531597, 0.61537028695790286, 0.75940789891092741], + [0.46456376716303932, 0.61157208822864151, 0.75879242623025811], + [0.46111326647023881, 0.607760777169989, 0.75817986436807139], + [0.45773377230160567, 0.60393630046586455, 0.75756936901859162], + [0.45442563977552913, 0.60009859503858665, 0.75696013660606487], + [0.45118918687617743, 0.59624762051353541, 0.75635120643246645], + [0.44802470933589172, 0.59238331452146575, 0.75574176474107924], + [0.44493246854215379, 0.5885055998308617, 0.7551311041857901], + [0.44191271766696399, 0.58461441100175571, 0.75451838884410671], + [0.43896563958048396, 0.58070969241098491, 0.75390276208285945], + [0.43609138958356369, 0.57679137998186081, 0.7532834105961016], + [0.43329008867358393, 0.57285941625606673, 0.75265946532566674], + [0.43056179073057571, 0.56891374572457176, 0.75203008099312696], + [0.42790652284925834, 0.5649543060909209, 0.75139443521914839], + [0.42532423665011354, 0.56098104959950301, 0.75075164989005116], + [0.42281485675772662, 0.55699392126996583, 0.75010086988227642], + [0.42037822361396326, 0.55299287158108168, 0.7494412559451894], + [0.41801414079233629, 0.54897785421888889, 0.74877193167001121], + [0.4157223260454232, 0.54494882715350401, 0.74809204459000522], + [0.41350245743314729, 0.54090574771098476, 0.74740073297543086], + [0.41135414697304568, 0.53684857765005933, 0.74669712855065784], + [0.4092768899914751, 0.53277730177130322, 0.74598030635707824], + [0.40727018694219069, 0.52869188011057411, 0.74524942637581271], + [0.40533343789303178, 0.52459228174983119, 0.74450365836708132], + [0.40346600333905397, 0.52047847653840029, 0.74374215223567086], + [0.40166714010896104, 0.51635044969688759, 0.7429640345324835], + [0.39993606933454834, 0.51220818143218516, 0.74216844571317986], + [0.3982719152586337, 0.50805166539276136, 0.74135450918099721], + [0.39667374905665609, 0.50388089053847973, 0.74052138580516735], + [0.39514058808207631, 0.49969585326377758, 0.73966820211715711], + [0.39367135736822567, 0.49549655777451179, 0.738794102296364], + [0.39226494876209317, 0.49128300332899261, 0.73789824784475078], + [0.39092017571994903, 0.48705520251223039, 0.73697977133881254], + [0.38963580160340855, 0.48281316715123496, 0.73603782546932739], + [0.38841053300842432, 0.47855691131792805, 0.73507157641157261], + [0.38724301459330251, 0.47428645933635388, 0.73408016787854391], + [0.38613184178892102, 0.4700018340988123, 0.7330627749243106], + [0.38507556793651387, 0.46570306719930193, 0.73201854033690505], + [0.38407269378943537, 0.46139018782416635, 0.73094665432902683], + [0.38312168084402748, 0.45706323581407199, 0.72984626791353258], + [0.38222094988570376, 0.45272225034283325, 0.72871656144003782], + [0.38136887930454161, 0.44836727669277859, 0.72755671317141346], + [0.38056380696565623, 0.44399837208633719, 0.72636587045135315], + [0.37980403744848751, 0.43961558821222629, 0.72514323778761092], + [0.37908789283110761, 0.43521897612544935, 0.72388798691323131], + [0.378413635091359, 0.43080859411413064, 0.72259931993061044], + [0.37777949753513729, 0.4263845142616835, 0.72127639993530235], + [0.37718371844251231, 0.42194680223454828, 0.71991841524475775], + [0.37662448930806297, 0.41749553747893614, 0.71852454736176108], + [0.37610001286385814, 0.41303079952477062, 0.71709396919920232], + [0.37560846919442398, 0.40855267638072096, 0.71562585091587549], + [0.37514802505380473, 0.4040612609993941, 0.7141193695725726], + [0.37471686019302231, 0.3995566498711684, 0.71257368516500463], + [0.37431313199312338, 0.39503894828283309, 0.71098796522377461], + [0.37393499330475782, 0.39050827529375831, 0.70936134293478448], + [0.3735806215098284, 0.38596474386057539, 0.70769297607310577], + [0.37324816143326384, 0.38140848555753937, 0.70598200974806036], + [0.37293578646665032, 0.37683963835219841, 0.70422755780589941], + [0.37264166757849604, 0.37225835004836849, 0.7024287314570723], + [0.37236397858465387, 0.36766477862108266, 0.70058463496520773], + [0.37210089702443822, 0.36305909736982378, 0.69869434615073722], + [0.3718506155898596, 0.35844148285875221, 0.69675695810256544], + [0.37161133234400479, 0.3538121372967869, 0.69477149919380887], + [0.37138124223736607, 0.34917126878479027, 0.69273703471928827], + [0.37115856636209105, 0.34451911410230168, 0.69065253586464992], + [0.37094151551337329, 0.33985591488818123, 0.68851703379505125], + [0.37072833279422668, 0.33518193808489577, 0.68632948169606767], + [0.37051738634484427, 0.33049741244307851, 0.68408888788857214], + [0.37030682071842685, 0.32580269697872455, 0.68179411684486679], + [0.37009487130772695, 0.3210981375964933, 0.67944405399056851], + [0.36987980329025361, 0.31638410101153364, 0.67703755438090574], + [0.36965987626565955, 0.31166098762951971, 0.67457344743419545], + [0.36943334591276228, 0.30692923551862339, 0.67205052849120617], + [0.36919847837592484, 0.30218932176507068, 0.66946754331614522], + [0.36895355306596778, 0.29744175492366276, 0.66682322089824264], + [0.36869682231895268, 0.29268709856150099, 0.66411625298236909], + [0.36842655638020444, 0.28792596437778462, 0.66134526910944602], + [0.36814101479899719, 0.28315901221182987, 0.65850888806972308], + [0.36783843696531082, 0.27838697181297761, 0.65560566838453704], + [0.36751707094367697, 0.27361063317090978, 0.65263411711618635], + [0.36717513650699446, 0.26883085667326956, 0.64959272297892245], + [0.36681085540107988, 0.26404857724525643, 0.64647991652908243], + [0.36642243251550632, 0.25926481158628106, 0.64329409140765537], + [0.36600853966739794, 0.25448043878086224, 0.64003361803368586], + [0.36556698373538982, 0.24969683475296395, 0.63669675187488584], + [0.36509579845886808, 0.24491536803550484, 0.63328173520055586], + [0.36459308890125008, 0.24013747024823828, 0.62978680155026101], + [0.36405693022088509, 0.23536470386204195, 0.62621013451953023], + [0.36348537610385145, 0.23059876218396419, 0.62254988622392882], + [0.36287643560041027, 0.22584149293287031, 0.61880417410823019], + [0.36222809558295926, 0.22109488427338303, 0.61497112346096128], + [0.36153829010998356, 0.21636111429594002, 0.61104880679640927], + [0.36080493826624654, 0.21164251793458128, 0.60703532172064711], + [0.36002681809096376, 0.20694122817889948, 0.60292845431916875], + [0.35920088560930186, 0.20226037920758122, 0.5987265295935138], + [0.35832489966617809, 0.197602942459778, 0.59442768517501066], + [0.35739663292915563, 0.19297208197842461, 0.59003011251063131], + [0.35641381143126327, 0.18837119869242164, 0.5855320765920552], + [0.35537415306906722, 0.18380392577704466, 0.58093191431832802], + [0.35427534960663759, 0.17927413271618647, 0.57622809660668717], + [0.35311574421123737, 0.17478570377561287, 0.57141871523555288], + [0.35189248608873791, 0.17034320478524959, 0.56650284911216653], + [0.35060304441931012, 0.16595129984720861, 0.56147964703993225], + [0.34924513554955644, 0.16161477763045118, 0.55634837474163779], + [0.34781653238777782, 0.15733863511152979, 0.55110853452703257], + [0.34631507175793091, 0.15312802296627787, 0.5457599924248665], + [0.34473901574536375, 0.14898820589826409, 0.54030245920406539], + [0.34308600291572294, 0.14492465359918028, 0.53473704282067103], + [0.34135411074506483, 0.1409427920655632, 0.52906500940336754], + [0.33954168752669694, 0.13704801896718169, 0.52328797535085236], + [0.33764732090671112, 0.13324562282438077, 0.51740807573979475], + [0.33566978565015315, 0.12954074251271822, 0.51142807215168951], + [0.33360804901486002, 0.12593818301005921, 0.50535164796654897], + [0.33146154891145124, 0.12244245263391232, 0.49918274588431072], + [0.32923005203231409, 0.11905764321981127, 0.49292595612342666], + [0.3269137124539796, 0.1157873496841953, 0.48658646495697461], + [0.32451307931207785, 0.11263459791730848, 0.48017007211645196], + [0.32202882276069322, 0.10960114111258401, 0.47368494725726878], + [0.31946262395497965, 0.10668879882392659, 0.46713728801395243], + [0.31681648089023501, 0.10389861387653518, 0.46053414662739794], + [0.31409278414755532, 0.10123077676403242, 0.45388335612058467], + [0.31129434479712365, 0.098684771934052201, 0.44719313715161618], + [0.30842444457210105, 0.096259385340577736, 0.44047194882050544], + [0.30548675819945936, 0.093952764840823738, 0.43372849999361113], + [0.30248536364574252, 0.091761187397303601, 0.42697404043749887], + [0.29942483960214772, 0.089682253716750038, 0.42021619665853854], + [0.29631000388905288, 0.087713250960463951, 0.41346259134143476], + [0.29314593096985248, 0.085850656889620708, 0.40672178082365834], + [0.28993792445176608, 0.08409078829085731, 0.40000214725256295], + [0.28669151388283165, 0.082429873848480689, 0.39331182532243375], + [0.28341239797185225, 0.080864153365499375, 0.38665868550105914], + [0.28010638576975472, 0.079389994802261526, 0.38005028528138707], + [0.27677939615815589, 0.078003941033788216, 0.37349382846504675], + [0.27343739342450812, 0.076702800237496066, 0.36699616136347685], + [0.27008637749114051, 0.075483675584275545, 0.36056376228111864], + [0.26673233211995284, 0.074344018028546205, 0.35420276066240958], + [0.26338121807151404, 0.073281657939897077, 0.34791888996380105], + [0.26003895187439957, 0.072294781043362205, 0.3417175669546984], + [0.25671191651083902, 0.071380106242082242, 0.33560648984600089], + [0.25340685873736807, 0.070533582926851829, 0.3295945757321303], + [0.25012845306199383, 0.069758206429106989, 0.32368100685760637], + [0.24688226237958999, 0.069053639449204451, 0.31786993834254956], + [0.24367372557466271, 0.068419855150922693, 0.31216524050888372], + [0.24050813332295939, 0.067857103814855602, 0.30657054493678321], + [0.23739062429054825, 0.067365888050555517, 0.30108922184065873], + [0.23433055727563878, 0.066935599661639394, 0.29574009929867601], + [0.23132955273021344, 0.066576186939090592, 0.29051361067988485], + [0.2283917709422868, 0.06628997924139618, 0.28541074411068496], + [0.22552164337737857, 0.066078173119395595, 0.28043398847505197], + [0.22272706739121817, 0.065933790675651943, 0.27559714652053702], + [0.22001251100779617, 0.065857918918907604, 0.27090279994325861], + [0.21737845072382705, 0.065859661233562045, 0.26634209349669508], + [0.21482843531473683, 0.065940385613778491, 0.26191675992376573], + [0.21237411048541005, 0.066085024661758446, 0.25765165093569542], + [0.21001214221188125, 0.066308573918947178, 0.2535289048041211], + [0.2077442377448806, 0.06661453200418091, 0.24954644291943817], + [0.20558051999470117, 0.066990462397868739, 0.24572497420147632], + [0.20352007949514977, 0.067444179612424215, 0.24205576625191821], + [0.20156133764129841, 0.067983271026200248, 0.23852974228695395], + [0.19971571438603364, 0.068592710553704722, 0.23517094067076993], + [0.19794834061899208, 0.069314066071660657, 0.23194647381302336], + [0.1960826032659409, 0.070321227242423623, 0.22874673279569585], + [0.19410351363791453, 0.071608304856891569, 0.22558727307410353], + [0.19199449184606268, 0.073182830649273306, 0.22243385243433622], + [0.18975853639094634, 0.075019861862143766, 0.2193005075652994], + [0.18739228342697645, 0.077102096899588329, 0.21618875376309582], + [0.18488035509396164, 0.079425730279723883, 0.21307651648984993], + [0.18774482037046955, 0.077251588468039312, 0.21387448578597812], + [0.19049578401722037, 0.075311278416787641, 0.2146562337112265], + [0.1931548636579131, 0.073606819040117955, 0.21542362939081539], + [0.19571853588267552, 0.072157781039602742, 0.21617499187076789], + [0.19819343656336558, 0.070974625252738788, 0.21690975060032436], + [0.20058760685133747, 0.070064576149984209, 0.21762721310371608], + [0.20290365333558247, 0.069435248580458964, 0.21833167885096033], + [0.20531725273301316, 0.068919592266397572, 0.21911516689288835], + [0.20785704662965598, 0.068484398797025281, 0.22000133917653536], + [0.21052882914958676, 0.06812195249816172, 0.22098759107715404], + [0.2133313859647627, 0.067830148426026665, 0.22207043213024291], + [0.21625279838647882, 0.067616330270516389, 0.22324568672294431], + [0.21930503925136402, 0.067465786362940039, 0.22451023616807558], + [0.22247308588973624, 0.067388214053092838, 0.22585960379408354], + [0.2257539681670791, 0.067382132300147474, 0.22728984778098055], + [0.22915620278592841, 0.067434730871152565, 0.22879681433956656], + [0.23266299920501882, 0.067557104388479783, 0.23037617493752832], + [0.23627495835774248, 0.06774359820987802, 0.23202360805926608], + [0.23999586188690308, 0.067985029964779953, 0.23373434258507808], + [0.24381149720247919, 0.068289851529011875, 0.23550427698321885], + [0.24772092990501099, 0.068653337909486523, 0.2373288009471749], + [0.25172899728289466, 0.069064630826035506, 0.23920260612763083], + [0.25582135547481771, 0.06953231029187984, 0.24112190491594204], + [0.25999463887892144, 0.070053855603861875, 0.24308218808684579], + [0.26425512207060942, 0.070616595622995437, 0.24507758869355967], + [0.26859095948172862, 0.071226716277922458, 0.24710443563450618], + [0.27299701518897301, 0.071883555446163511, 0.24915847093232929], + [0.27747150809142801, 0.072582969899254779, 0.25123493995942769], + [0.28201746297366942, 0.073315693214040967, 0.25332800295084507], + [0.28662309235899847, 0.074088460826808866, 0.25543478673717029], + [0.29128515387578635, 0.074899049847466703, 0.25755101595750435], + [0.2960004726065818, 0.075745336000958424, 0.25967245030364566], + [0.30077276812918691, 0.076617824336164764, 0.26179294097819672], + [0.30559226007249934, 0.077521963107537312, 0.26391006692119662], + [0.31045520848595526, 0.078456871676182177, 0.2660200572779356], + [0.31535870009205808, 0.079420997315243186, 0.26811904076941961], + [0.32029986557994061, 0.080412994737554838, 0.27020322893039511], + [0.32527888860401261, 0.081428390076546092, 0.27226772884656186], + [0.33029174471181438, 0.08246763389003825, 0.27430929404579435], + [0.33533353224455448, 0.083532434119003962, 0.27632534356790039], + [0.34040164359597463, 0.084622236191702671, 0.27831254595259397], + [0.34549355713871799, 0.085736654965126335, 0.28026769921081435], + [0.35060678246032478, 0.08687555176033529, 0.28218770540182386], + [0.35573889947341125, 0.088038974350243354, 0.2840695897279818], + [0.36088752387578377, 0.089227194362745205, 0.28591050458531014], + [0.36605031412464006, 0.090440685427697898, 0.2877077458811747], + [0.37122508431309342, 0.091679997480262732, 0.28945865397633169], + [0.3764103053221462, 0.092945198093777909, 0.29116024157313919], + [0.38160247377467543, 0.094238731263712183, 0.29281107506269488], + [0.38679939079544168, 0.09556181960083443, 0.29440901248173756], + [0.39199887556812907, 0.09691583650296684, 0.29595212005509081], + [0.39719876876325577, 0.098302320968278623, 0.29743856476285779], + [0.40239692379737496, 0.099722930314950553, 0.29886674369733968], + [0.40759120392688708, 0.10117945586419633, 0.30023519507728602], + [0.41277985630360303, 0.1026734006932461, 0.30154226437468967], + [0.41796105205173684, 0.10420644885760968, 0.30278652039631843], + [0.42313214269556043, 0.10578120994917611, 0.3039675809469457], + [0.42829101315789753, 0.1073997763055258, 0.30508479060294547], + [0.4334355841041439, 0.1090642347484701, 0.30613767928289148], + [0.43856378187931538, 0.11077667828375456, 0.30712600062348083], + [0.44367358645071275, 0.11253912421257944, 0.30804973095465449], + [0.44876299173174822, 0.11435355574622549, 0.30890905921943196], + [0.45383005086999889, 0.11622183788331528, 0.30970441249844921], + [0.45887288947308297, 0.11814571137706886, 0.31043636979038808], + [0.46389102840284874, 0.12012561256850712, 0.31110343446582983], + [0.46888111384598413, 0.12216445576414045, 0.31170911458932665], + [0.473841437035254, 0.12426354237989065, 0.31225470169927194], + [0.47877034239726296, 0.12642401401409453, 0.31274172735821959], + [0.48366628618847957, 0.12864679022013889, 0.31317188565991266], + [0.48852847371852987, 0.13093210934893723, 0.31354553695453014], + [0.49335504375145617, 0.13328091630401023, 0.31386561956734976], + [0.49814435462074153, 0.13569380302451714, 0.314135190862664], + [0.50289524974970612, 0.13817086581280427, 0.31435662153833671], + [0.50760681181053691, 0.14071192654913128, 0.31453200120082569], + [0.51227835105321762, 0.14331656120063752, 0.3146630922831542], + [0.51690848800544464, 0.14598463068714407, 0.31475407592280041], + [0.52149652863229956, 0.14871544765633712, 0.31480767954534428], + [0.52604189625477482, 0.15150818660835483, 0.31482653406646727], + [0.53054420489856446, 0.15436183633886777, 0.31481299789187128], + [0.5350027976174474, 0.15727540775107324, 0.31477085207396532], + [0.53941736649199057, 0.16024769309971934, 0.31470295028655965], + [0.54378771313608565, 0.16327738551419116, 0.31461204226295625], + [0.54811370033467621, 0.1663630904279047, 0.31450102990914708], + [0.55239521572711914, 0.16950338809328983, 0.31437291554615371], + [0.55663229034969341, 0.17269677158182117, 0.31423043195101424], + [0.56082499039117173, 0.17594170887918095, 0.31407639883970623], + [0.56497343529017696, 0.17923664950367169, 0.3139136046337036], + [0.56907784784011428, 0.18258004462335425, 0.31374440956796529], + [0.57313845754107873, 0.18597036007065024, 0.31357126868520002], + [0.57715550812992045, 0.18940601489760422, 0.31339704333572083], + [0.58112932761586555, 0.19288548904692518, 0.31322399394183942], + [0.58506024396466882, 0.19640737049066315, 0.31305401163732732], + [0.58894861935544707, 0.19997020971775276, 0.31288922211590126], + [0.59279480536520257, 0.20357251410079796, 0.31273234839304942], + [0.59659918109122367, 0.207212956082026, 0.31258523031121233], + [0.60036213010411577, 0.21089030138947745, 0.31244934410414688], + [0.60408401696732739, 0.21460331490206347, 0.31232652641170694], + [0.60776523994818654, 0.21835070166659282, 0.31221903291870201], + [0.6114062072731884, 0.22213124697023234, 0.31212881396435238], + [0.61500723236391375, 0.22594402043981826, 0.31205680685765741], + [0.61856865258877192, 0.22978799249179921, 0.31200463838728931], + [0.62209079821082613, 0.2336621873300741, 0.31197383273627388], + [0.62557416500434959, 0.23756535071152696, 0.31196698314912269], + [0.62901892016985872, 0.24149689191922535, 0.31198447195645718], + [0.63242534854210275, 0.24545598775548677, 0.31202765974624452], + [0.6357937104834237, 0.24944185818822678, 0.31209793953300591], + [0.6391243387840212, 0.25345365461983138, 0.31219689612063978], + [0.642417577481186, 0.257490519876798, 0.31232631707560987], + [0.64567349382645434, 0.26155203161615281, 0.31248673753935263], + [0.64889230169458245, 0.26563755336209077, 0.31267941819570189], + [0.65207417290277303, 0.26974650525236699, 0.31290560605819168], + [0.65521932609327127, 0.27387826652410152, 0.3131666792687211], + [0.6583280801134499, 0.27803210957665631, 0.3134643447952643], + [0.66140037532601781, 0.28220778870555907, 0.31379912926498488], + [0.66443632469878844, 0.28640483614256179, 0.31417223403606975], + [0.66743603766369131, 0.29062280081258873, 0.31458483752056837], + [0.67039959547676198, 0.29486126309253047, 0.31503813956872212], + [0.67332725564817331, 0.29911962764489264, 0.31553372323982209], + [0.67621897924409746, 0.30339762792450425, 0.3160724937230589], + [0.67907474028157344, 0.30769497879760166, 0.31665545668946665], + [0.68189457150944521, 0.31201133280550686, 0.31728380489244951], + [0.68467850942494535, 0.31634634821222207, 0.31795870784057567], + [0.68742656435169625, 0.32069970535138104, 0.31868137622277692], + [0.6901389321505248, 0.32507091815606004, 0.31945332332898302], + [0.69281544846764931, 0.32945984647042675, 0.3202754315314667], + [0.69545608346891119, 0.33386622163232865, 0.32114884306985791], + [0.6980608153581771, 0.33828976326048621, 0.32207478855218091], + [0.70062962477242097, 0.34273019305341756, 0.32305449047765694], + [0.70316249458814151, 0.34718723719597999, 0.32408913679491225], + [0.70565951122610093, 0.35166052978120937, 0.32518014084085567], + [0.70812059568420482, 0.35614985523380299, 0.32632861885644465], + [0.7105456546582587, 0.36065500290840113, 0.32753574162788762], + [0.71293466839773467, 0.36517570519856757, 0.3288027427038317], + [0.71528760614847287, 0.36971170225223449, 0.3301308728723546], + [0.71760444908133847, 0.37426272710686193, 0.33152138620958932], + [0.71988521490549851, 0.37882848839337313, 0.33297555200245399], + [0.7221299918421461, 0.38340864508963057, 0.33449469983585844], + [0.72433865647781592, 0.38800301593162145, 0.33607995965691828], + [0.72651122900227549, 0.3926113126792577, 0.3377325942005665], + [0.72864773856716547, 0.39723324476747235, 0.33945384341064017], + [0.73074820754845171, 0.401868526884681, 0.3412449533046818], + [0.73281270506268747, 0.4065168468778026, 0.34310715173410822], + [0.73484133598564938, 0.41117787004519513, 0.34504169470809071], + [0.73683422173585866, 0.41585125850290111, 0.34704978520758401], + [0.73879140024599266, 0.42053672992315327, 0.34913260148542435], + [0.74071301619506091, 0.4252339389526239, 0.35129130890802607], + [0.7425992159973317, 0.42994254036133867, 0.35352709245374592], + [0.74445018676570673, 0.43466217184617112, 0.35584108091122535], + [0.74626615789163442, 0.43939245044973502, 0.35823439142300639], + [0.74804739275559562, 0.44413297780351974, 0.36070813602540136], + [0.74979420547170472, 0.44888333481548809, 0.36326337558360278], + [0.75150685045891663, 0.45364314496866825, 0.36590112443835765], + [0.75318566369046569, 0.45841199172949604, 0.36862236642234769], + [0.75483105066959544, 0.46318942799460555, 0.3714280448394211], + [0.75644341577140706, 0.46797501437948458, 0.37431909037543515], + [0.75802325538455839, 0.4727682731566229, 0.37729635531096678], + [0.75957111105340058, 0.47756871222057079, 0.380360657784311], + [0.7610876378057071, 0.48237579130289127, 0.38351275723852291], + [0.76257333554052609, 0.48718906673415824, 0.38675335037837993], + [0.76402885609288662, 0.49200802533379656, 0.39008308392311997], + [0.76545492593330511, 0.49683212909727231, 0.39350254000115381], + [0.76685228950643891, 0.5016608471009063, 0.39701221751773474], + [0.76822176599735303, 0.50649362371287909, 0.40061257089416885], + [0.7695642334401418, 0.5113298901696085, 0.40430398069682483], + [0.77088091962302474, 0.51616892643469103, 0.40808667584648967], + [0.77217257229605551, 0.5210102658711383, 0.41196089987122869], + [0.77344021829889886, 0.52585332093451564, 0.41592679539764366], + [0.77468494746063199, 0.53069749384776732, 0.41998440356963762], + [0.77590790730685699, 0.53554217882461186, 0.42413367909988375], + [0.7771103295521099, 0.54038674910561235, 0.42837450371258479], + [0.77829345807633121, 0.54523059488426595, 0.432706647838971], + [0.77945862731506643, 0.55007308413977274, 0.43712979856444761], + [0.78060774749483774, 0.55491335744890613, 0.44164332426364639], + [0.78174180478981836, 0.55975098052594863, 0.44624687186865436], + [0.78286225264440912, 0.56458533111166875, 0.45093985823706345], + [0.78397060836414478, 0.56941578326710418, 0.45572154742892063], + [0.78506845019606841, 0.5742417003617839, 0.46059116206904965], + [0.78615737132332963, 0.5790624629815756, 0.46554778281918402], + [0.78723904108188347, 0.58387743744557208, 0.47059039582133383], + [0.78831514045623963, 0.58868600173562435, 0.47571791879076081], + [0.78938737766251943, 0.5934875421745599, 0.48092913815357724], + [0.79045776847727878, 0.59828134277062461, 0.48622257801969754], + [0.79152832843475607, 0.60306670593147205, 0.49159667021646397], + [0.79260034304237448, 0.60784322087037024, 0.49705020621532009], + [0.79367559698664958, 0.61261029334072192, 0.50258161291269432], + [0.79475585972654039, 0.61736734400220705, 0.50818921213102985], + [0.79584292379583765, 0.62211378808451145, 0.51387124091909786], + [0.79693854719951607, 0.62684905679296699, 0.5196258425240281], + [0.79804447815136637, 0.63157258225089552, 0.52545108144834785], + [0.7991624518501963, 0.63628379372029187, 0.53134495942561433], + [0.80029415389753977, 0.64098213306749863, 0.53730535185141037], + [0.80144124292560048, 0.64566703459218766, 0.5433300863249918], + [0.80260531146112946, 0.65033793748103852, 0.54941691584603647], + [0.80378792531077625, 0.65499426549472628, 0.55556350867083815], + [0.80499054790810298, 0.65963545027564163, 0.56176745110546977], + [0.80621460526927058, 0.66426089585282289, 0.56802629178649788], + [0.8074614045096935, 0.6688700095398864, 0.57433746373459582], + [0.80873219170089694, 0.67346216702194517, 0.58069834805576737], + [0.81002809466520687, 0.67803672673971815, 0.58710626908082753], + [0.81135014011763329, 0.68259301546243389, 0.59355848909050757], + [0.81269922039881493, 0.68713033714618876, 0.60005214820435104], + [0.81407611046993344, 0.69164794791482131, 0.6065843782630862], + [0.81548146627279483, 0.69614505508308089, 0.61315221209322646], + [0.81691575775055891, 0.70062083014783982, 0.61975260637257923], + [0.81837931164498223, 0.70507438189635097, 0.62638245478933297], + [0.81987230650455289, 0.70950474978787481, 0.63303857040067113], + [0.8213947205565636, 0.7139109141951604, 0.63971766697672761], + [0.82294635110428427, 0.71829177331290062, 0.6464164243818421], + [0.8245268129450285, 0.72264614312088882, 0.65313137915422603], + [0.82613549710580259, 0.72697275518238258, 0.65985900156216504], + [0.8277716072353446, 0.73127023324078089, 0.66659570204682972], + [0.82943407816481474, 0.7355371221572935, 0.67333772009301907], + [0.83112163529096306, 0.73977184647638616, 0.68008125203631464], + [0.83283277185777982, 0.74397271817459876, 0.68682235874648545], + [0.8345656905566583, 0.7481379479992134, 0.69355697649863846], + [0.83631898844737929, 0.75226548952875261, 0.70027999028864962], + [0.83809123476131964, 0.75635314860808633, 0.70698561390212977], + [0.83987839884120874, 0.76039907199779677, 0.71367147811129228], + [0.84167750766845151, 0.76440101200982946, 0.72033299387284622], + [0.84348529222933699, 0.76835660399870176, 0.72696536998972039], + [0.84529810731955113, 0.77226338601044719, 0.73356368240541492], + [0.84711195507965098, 0.77611880236047159, 0.74012275762807056], + [0.84892245563117641, 0.77992021407650147, 0.74663719293664366], + [0.85072697023178789, 0.78366457342383888, 0.7530974636118285], + [0.85251907207708444, 0.78734936133548439, 0.7594994148789691], + [0.85429219611470464, 0.79097196777091994, 0.76583801477914104], + [0.85604022314725403, 0.79452963601550608, 0.77210610037674143], + [0.85775662943504905, 0.79801963142713928, 0.77829571667247499], + [0.8594346370300241, 0.8014392309950078, 0.78439788751383921], + [0.86107117027565516, 0.80478517909812231, 0.79039529663736285], + [0.86265601051127572, 0.80805523804261525, 0.796282666437655], + [0.86418343723941027, 0.81124644224653542, 0.80204612696863953], + [0.86564934325605325, 0.81435544067514909, 0.80766972324164554], + [0.86705314907048503, 0.81737804041911244, 0.81313419626911398], + [0.86839954695818633, 0.82030875512181523, 0.81841638963128993], + [0.86969131502613806, 0.82314158859569164, 0.82350476683173168], + [0.87093846717297507, 0.82586857889438514, 0.82838497261149613], + [0.87215331978454325, 0.82848052823709672, 0.8330486712880828], + [0.87335171360916275, 0.83096715251272624, 0.83748851001197089], + [0.87453793320260187, 0.83331972948645461, 0.84171925358069011], + [0.87571458709961403, 0.8355302318472394, 0.84575537519027078], + [0.87687848451614692, 0.83759238071186537, 0.84961373549150254], + [0.87802298436649007, 0.83950165618540074, 0.85330645352458923], + [0.87913244240792765, 0.84125554884475906, 0.85685572291039636], + [0.88019293315695812, 0.84285224824778615, 0.86027399927156634], + [0.88119169871341951, 0.84429066717717349, 0.86356595168669881], + [0.88211542489401606, 0.84557007254559347, 0.86673765046233331], + [0.88295168595448525, 0.84668970275699273, 0.86979617048190971], + [0.88369127145898041, 0.84764891761519268, 0.87274147101441557], + [0.88432713054113543, 0.84844741572055415, 0.87556785228242973], + [0.88485138159908572, 0.84908426422893801, 0.87828235285372469], + [0.88525897972630474, 0.84955892810989209, 0.88088414794024839], + [0.88554714811952384, 0.84987174283631584, 0.88336206121170946], + [0.88571155122845646, 0.85002186115856315, 0.88572538990087124]] + +_twilight_shifted_data = (_twilight_data[len(_twilight_data)//2:] + + _twilight_data[:len(_twilight_data)//2]) +_twilight_shifted_data.reverse() +_turbo_data = [[0.18995, 0.07176, 0.23217], + [0.19483, 0.08339, 0.26149], + [0.19956, 0.09498, 0.29024], + [0.20415, 0.10652, 0.31844], + [0.20860, 0.11802, 0.34607], + [0.21291, 0.12947, 0.37314], + [0.21708, 0.14087, 0.39964], + [0.22111, 0.15223, 0.42558], + [0.22500, 0.16354, 0.45096], + [0.22875, 0.17481, 0.47578], + [0.23236, 0.18603, 0.50004], + [0.23582, 0.19720, 0.52373], + [0.23915, 0.20833, 0.54686], + [0.24234, 0.21941, 0.56942], + [0.24539, 0.23044, 0.59142], + [0.24830, 0.24143, 0.61286], + [0.25107, 0.25237, 0.63374], + [0.25369, 0.26327, 0.65406], + [0.25618, 0.27412, 0.67381], + [0.25853, 0.28492, 0.69300], + [0.26074, 0.29568, 0.71162], + [0.26280, 0.30639, 0.72968], + [0.26473, 0.31706, 0.74718], + [0.26652, 0.32768, 0.76412], + [0.26816, 0.33825, 0.78050], + [0.26967, 0.34878, 0.79631], + [0.27103, 0.35926, 0.81156], + [0.27226, 0.36970, 0.82624], + [0.27334, 0.38008, 0.84037], + [0.27429, 0.39043, 0.85393], + [0.27509, 0.40072, 0.86692], + [0.27576, 0.41097, 0.87936], + [0.27628, 0.42118, 0.89123], + [0.27667, 0.43134, 0.90254], + [0.27691, 0.44145, 0.91328], + [0.27701, 0.45152, 0.92347], + [0.27698, 0.46153, 0.93309], + [0.27680, 0.47151, 0.94214], + [0.27648, 0.48144, 0.95064], + [0.27603, 0.49132, 0.95857], + [0.27543, 0.50115, 0.96594], + [0.27469, 0.51094, 0.97275], + [0.27381, 0.52069, 0.97899], + [0.27273, 0.53040, 0.98461], + [0.27106, 0.54015, 0.98930], + [0.26878, 0.54995, 0.99303], + [0.26592, 0.55979, 0.99583], + [0.26252, 0.56967, 0.99773], + [0.25862, 0.57958, 0.99876], + [0.25425, 0.58950, 0.99896], + [0.24946, 0.59943, 0.99835], + [0.24427, 0.60937, 0.99697], + [0.23874, 0.61931, 0.99485], + [0.23288, 0.62923, 0.99202], + [0.22676, 0.63913, 0.98851], + [0.22039, 0.64901, 0.98436], + [0.21382, 0.65886, 0.97959], + [0.20708, 0.66866, 0.97423], + [0.20021, 0.67842, 0.96833], + [0.19326, 0.68812, 0.96190], + [0.18625, 0.69775, 0.95498], + [0.17923, 0.70732, 0.94761], + [0.17223, 0.71680, 0.93981], + [0.16529, 0.72620, 0.93161], + [0.15844, 0.73551, 0.92305], + [0.15173, 0.74472, 0.91416], + [0.14519, 0.75381, 0.90496], + [0.13886, 0.76279, 0.89550], + [0.13278, 0.77165, 0.88580], + [0.12698, 0.78037, 0.87590], + [0.12151, 0.78896, 0.86581], + [0.11639, 0.79740, 0.85559], + [0.11167, 0.80569, 0.84525], + [0.10738, 0.81381, 0.83484], + [0.10357, 0.82177, 0.82437], + [0.10026, 0.82955, 0.81389], + [0.09750, 0.83714, 0.80342], + [0.09532, 0.84455, 0.79299], + [0.09377, 0.85175, 0.78264], + [0.09287, 0.85875, 0.77240], + [0.09267, 0.86554, 0.76230], + [0.09320, 0.87211, 0.75237], + [0.09451, 0.87844, 0.74265], + [0.09662, 0.88454, 0.73316], + [0.09958, 0.89040, 0.72393], + [0.10342, 0.89600, 0.71500], + [0.10815, 0.90142, 0.70599], + [0.11374, 0.90673, 0.69651], + [0.12014, 0.91193, 0.68660], + [0.12733, 0.91701, 0.67627], + [0.13526, 0.92197, 0.66556], + [0.14391, 0.92680, 0.65448], + [0.15323, 0.93151, 0.64308], + [0.16319, 0.93609, 0.63137], + [0.17377, 0.94053, 0.61938], + [0.18491, 0.94484, 0.60713], + [0.19659, 0.94901, 0.59466], + [0.20877, 0.95304, 0.58199], + [0.22142, 0.95692, 0.56914], + [0.23449, 0.96065, 0.55614], + [0.24797, 0.96423, 0.54303], + [0.26180, 0.96765, 0.52981], + [0.27597, 0.97092, 0.51653], + [0.29042, 0.97403, 0.50321], + [0.30513, 0.97697, 0.48987], + [0.32006, 0.97974, 0.47654], + [0.33517, 0.98234, 0.46325], + [0.35043, 0.98477, 0.45002], + [0.36581, 0.98702, 0.43688], + [0.38127, 0.98909, 0.42386], + [0.39678, 0.99098, 0.41098], + [0.41229, 0.99268, 0.39826], + [0.42778, 0.99419, 0.38575], + [0.44321, 0.99551, 0.37345], + [0.45854, 0.99663, 0.36140], + [0.47375, 0.99755, 0.34963], + [0.48879, 0.99828, 0.33816], + [0.50362, 0.99879, 0.32701], + [0.51822, 0.99910, 0.31622], + [0.53255, 0.99919, 0.30581], + [0.54658, 0.99907, 0.29581], + [0.56026, 0.99873, 0.28623], + [0.57357, 0.99817, 0.27712], + [0.58646, 0.99739, 0.26849], + [0.59891, 0.99638, 0.26038], + [0.61088, 0.99514, 0.25280], + [0.62233, 0.99366, 0.24579], + [0.63323, 0.99195, 0.23937], + [0.64362, 0.98999, 0.23356], + [0.65394, 0.98775, 0.22835], + [0.66428, 0.98524, 0.22370], + [0.67462, 0.98246, 0.21960], + [0.68494, 0.97941, 0.21602], + [0.69525, 0.97610, 0.21294], + [0.70553, 0.97255, 0.21032], + [0.71577, 0.96875, 0.20815], + [0.72596, 0.96470, 0.20640], + [0.73610, 0.96043, 0.20504], + [0.74617, 0.95593, 0.20406], + [0.75617, 0.95121, 0.20343], + [0.76608, 0.94627, 0.20311], + [0.77591, 0.94113, 0.20310], + [0.78563, 0.93579, 0.20336], + [0.79524, 0.93025, 0.20386], + [0.80473, 0.92452, 0.20459], + [0.81410, 0.91861, 0.20552], + [0.82333, 0.91253, 0.20663], + [0.83241, 0.90627, 0.20788], + [0.84133, 0.89986, 0.20926], + [0.85010, 0.89328, 0.21074], + [0.85868, 0.88655, 0.21230], + [0.86709, 0.87968, 0.21391], + [0.87530, 0.87267, 0.21555], + [0.88331, 0.86553, 0.21719], + [0.89112, 0.85826, 0.21880], + [0.89870, 0.85087, 0.22038], + [0.90605, 0.84337, 0.22188], + [0.91317, 0.83576, 0.22328], + [0.92004, 0.82806, 0.22456], + [0.92666, 0.82025, 0.22570], + [0.93301, 0.81236, 0.22667], + [0.93909, 0.80439, 0.22744], + [0.94489, 0.79634, 0.22800], + [0.95039, 0.78823, 0.22831], + [0.95560, 0.78005, 0.22836], + [0.96049, 0.77181, 0.22811], + [0.96507, 0.76352, 0.22754], + [0.96931, 0.75519, 0.22663], + [0.97323, 0.74682, 0.22536], + [0.97679, 0.73842, 0.22369], + [0.98000, 0.73000, 0.22161], + [0.98289, 0.72140, 0.21918], + [0.98549, 0.71250, 0.21650], + [0.98781, 0.70330, 0.21358], + [0.98986, 0.69382, 0.21043], + [0.99163, 0.68408, 0.20706], + [0.99314, 0.67408, 0.20348], + [0.99438, 0.66386, 0.19971], + [0.99535, 0.65341, 0.19577], + [0.99607, 0.64277, 0.19165], + [0.99654, 0.63193, 0.18738], + [0.99675, 0.62093, 0.18297], + [0.99672, 0.60977, 0.17842], + [0.99644, 0.59846, 0.17376], + [0.99593, 0.58703, 0.16899], + [0.99517, 0.57549, 0.16412], + [0.99419, 0.56386, 0.15918], + [0.99297, 0.55214, 0.15417], + [0.99153, 0.54036, 0.14910], + [0.98987, 0.52854, 0.14398], + [0.98799, 0.51667, 0.13883], + [0.98590, 0.50479, 0.13367], + [0.98360, 0.49291, 0.12849], + [0.98108, 0.48104, 0.12332], + [0.97837, 0.46920, 0.11817], + [0.97545, 0.45740, 0.11305], + [0.97234, 0.44565, 0.10797], + [0.96904, 0.43399, 0.10294], + [0.96555, 0.42241, 0.09798], + [0.96187, 0.41093, 0.09310], + [0.95801, 0.39958, 0.08831], + [0.95398, 0.38836, 0.08362], + [0.94977, 0.37729, 0.07905], + [0.94538, 0.36638, 0.07461], + [0.94084, 0.35566, 0.07031], + [0.93612, 0.34513, 0.06616], + [0.93125, 0.33482, 0.06218], + [0.92623, 0.32473, 0.05837], + [0.92105, 0.31489, 0.05475], + [0.91572, 0.30530, 0.05134], + [0.91024, 0.29599, 0.04814], + [0.90463, 0.28696, 0.04516], + [0.89888, 0.27824, 0.04243], + [0.89298, 0.26981, 0.03993], + [0.88691, 0.26152, 0.03753], + [0.88066, 0.25334, 0.03521], + [0.87422, 0.24526, 0.03297], + [0.86760, 0.23730, 0.03082], + [0.86079, 0.22945, 0.02875], + [0.85380, 0.22170, 0.02677], + [0.84662, 0.21407, 0.02487], + [0.83926, 0.20654, 0.02305], + [0.83172, 0.19912, 0.02131], + [0.82399, 0.19182, 0.01966], + [0.81608, 0.18462, 0.01809], + [0.80799, 0.17753, 0.01660], + [0.79971, 0.17055, 0.01520], + [0.79125, 0.16368, 0.01387], + [0.78260, 0.15693, 0.01264], + [0.77377, 0.15028, 0.01148], + [0.76476, 0.14374, 0.01041], + [0.75556, 0.13731, 0.00942], + [0.74617, 0.13098, 0.00851], + [0.73661, 0.12477, 0.00769], + [0.72686, 0.11867, 0.00695], + [0.71692, 0.11268, 0.00629], + [0.70680, 0.10680, 0.00571], + [0.69650, 0.10102, 0.00522], + [0.68602, 0.09536, 0.00481], + [0.67535, 0.08980, 0.00449], + [0.66449, 0.08436, 0.00424], + [0.65345, 0.07902, 0.00408], + [0.64223, 0.07380, 0.00401], + [0.63082, 0.06868, 0.00401], + [0.61923, 0.06367, 0.00410], + [0.60746, 0.05878, 0.00427], + [0.59550, 0.05399, 0.00453], + [0.58336, 0.04931, 0.00486], + [0.57103, 0.04474, 0.00529], + [0.55852, 0.04028, 0.00579], + [0.54583, 0.03593, 0.00638], + [0.53295, 0.03169, 0.00705], + [0.51989, 0.02756, 0.00780], + [0.50664, 0.02354, 0.00863], + [0.49321, 0.01963, 0.00955], + [0.47960, 0.01583, 0.01055]] + + +cmaps = { + name: ListedColormap(data, name=name) for name, data in [ + ('magma', _magma_data), + ('inferno', _inferno_data), + ('plasma', _plasma_data), + ('viridis', _viridis_data), + ('cividis', _cividis_data), + ('twilight', _twilight_data), + ('twilight_shifted', _twilight_shifted_data), + ('turbo', _turbo_data), + ]} diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_color_data.py b/minor_project/lib/python3.6/site-packages/matplotlib/_color_data.py new file mode 100644 index 0000000..e50998b --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_color_data.py @@ -0,0 +1,1147 @@ +from collections import OrderedDict + + +BASE_COLORS = { + 'b': (0, 0, 1), # blue + 'g': (0, 0.5, 0), # green + 'r': (1, 0, 0), # red + 'c': (0, 0.75, 0.75), # cyan + 'm': (0.75, 0, 0.75), # magenta + 'y': (0.75, 0.75, 0), # yellow + 'k': (0, 0, 0), # black + 'w': (1, 1, 1), # white +} + + +# These colors are from Tableau +TABLEAU_COLORS = ( + ('blue', '#1f77b4'), + ('orange', '#ff7f0e'), + ('green', '#2ca02c'), + ('red', '#d62728'), + ('purple', '#9467bd'), + ('brown', '#8c564b'), + ('pink', '#e377c2'), + ('gray', '#7f7f7f'), + ('olive', '#bcbd22'), + ('cyan', '#17becf'), +) + +# Normalize name to "tab:" to avoid name collisions. +TABLEAU_COLORS = OrderedDict( + ('tab:' + name, value) for name, value in TABLEAU_COLORS) + +# This mapping of color names -> hex values is taken from +# a survey run by Randall Munroe see: +# https://blog.xkcd.com/2010/05/03/color-survey-results/ +# for more details. The results are hosted at +# https://xkcd.com/color/rgb +# and also available as a text file at +# https://xkcd.com/color/rgb.txt +# +# License: http://creativecommons.org/publicdomain/zero/1.0/ +XKCD_COLORS = { + 'cloudy blue': '#acc2d9', + 'dark pastel green': '#56ae57', + 'dust': '#b2996e', + 'electric lime': '#a8ff04', + 'fresh green': '#69d84f', + 'light eggplant': '#894585', + 'nasty green': '#70b23f', + 'really light blue': '#d4ffff', + 'tea': '#65ab7c', + 'warm purple': '#952e8f', + 'yellowish tan': '#fcfc81', + 'cement': '#a5a391', + 'dark grass green': '#388004', + 'dusty teal': '#4c9085', + 'grey teal': '#5e9b8a', + 'macaroni and cheese': '#efb435', + 'pinkish tan': '#d99b82', + 'spruce': '#0a5f38', + 'strong blue': '#0c06f7', + 'toxic green': '#61de2a', + 'windows blue': '#3778bf', + 'blue blue': '#2242c7', + 'blue with a hint of purple': '#533cc6', + 'booger': '#9bb53c', + 'bright sea green': '#05ffa6', + 'dark green blue': '#1f6357', + 'deep turquoise': '#017374', + 'green teal': '#0cb577', + 'strong pink': '#ff0789', + 'bland': '#afa88b', + 'deep aqua': '#08787f', + 'lavender pink': '#dd85d7', + 'light moss green': '#a6c875', + 'light seafoam green': '#a7ffb5', + 'olive yellow': '#c2b709', + 'pig pink': '#e78ea5', + 'deep lilac': '#966ebd', + 'desert': '#ccad60', + 'dusty lavender': '#ac86a8', + 'purpley grey': '#947e94', + 'purply': '#983fb2', + 'candy pink': '#ff63e9', + 'light pastel green': '#b2fba5', + 'boring green': '#63b365', + 'kiwi green': '#8ee53f', + 'light grey green': '#b7e1a1', + 'orange pink': '#ff6f52', + 'tea green': '#bdf8a3', + 'very light brown': '#d3b683', + 'egg shell': '#fffcc4', + 'eggplant purple': '#430541', + 'powder pink': '#ffb2d0', + 'reddish grey': '#997570', + 'baby shit brown': '#ad900d', + 'liliac': '#c48efd', + 'stormy blue': '#507b9c', + 'ugly brown': '#7d7103', + 'custard': '#fffd78', + 'darkish pink': '#da467d', + 'deep brown': '#410200', + 'greenish beige': '#c9d179', + 'manilla': '#fffa86', + 'off blue': '#5684ae', + 'battleship grey': '#6b7c85', + 'browny green': '#6f6c0a', + 'bruise': '#7e4071', + 'kelley green': '#009337', + 'sickly yellow': '#d0e429', + 'sunny yellow': '#fff917', + 'azul': '#1d5dec', + 'darkgreen': '#054907', + 'green/yellow': '#b5ce08', + 'lichen': '#8fb67b', + 'light light green': '#c8ffb0', + 'pale gold': '#fdde6c', + 'sun yellow': '#ffdf22', + 'tan green': '#a9be70', + 'burple': '#6832e3', + 'butterscotch': '#fdb147', + 'toupe': '#c7ac7d', + 'dark cream': '#fff39a', + 'indian red': '#850e04', + 'light lavendar': '#efc0fe', + 'poison green': '#40fd14', + 'baby puke green': '#b6c406', + 'bright yellow green': '#9dff00', + 'charcoal grey': '#3c4142', + 'squash': '#f2ab15', + 'cinnamon': '#ac4f06', + 'light pea green': '#c4fe82', + 'radioactive green': '#2cfa1f', + 'raw sienna': '#9a6200', + 'baby purple': '#ca9bf7', + 'cocoa': '#875f42', + 'light royal blue': '#3a2efe', + 'orangeish': '#fd8d49', + 'rust brown': '#8b3103', + 'sand brown': '#cba560', + 'swamp': '#698339', + 'tealish green': '#0cdc73', + 'burnt siena': '#b75203', + 'camo': '#7f8f4e', + 'dusk blue': '#26538d', + 'fern': '#63a950', + 'old rose': '#c87f89', + 'pale light green': '#b1fc99', + 'peachy pink': '#ff9a8a', + 'rosy pink': '#f6688e', + 'light bluish green': '#76fda8', + 'light bright green': '#53fe5c', + 'light neon green': '#4efd54', + 'light seafoam': '#a0febf', + 'tiffany blue': '#7bf2da', + 'washed out green': '#bcf5a6', + 'browny orange': '#ca6b02', + 'nice blue': '#107ab0', + 'sapphire': '#2138ab', + 'greyish teal': '#719f91', + 'orangey yellow': '#fdb915', + 'parchment': '#fefcaf', + 'straw': '#fcf679', + 'very dark brown': '#1d0200', + 'terracota': '#cb6843', + 'ugly blue': '#31668a', + 'clear blue': '#247afd', + 'creme': '#ffffb6', + 'foam green': '#90fda9', + 'grey/green': '#86a17d', + 'light gold': '#fddc5c', + 'seafoam blue': '#78d1b6', + 'topaz': '#13bbaf', + 'violet pink': '#fb5ffc', + 'wintergreen': '#20f986', + 'yellow tan': '#ffe36e', + 'dark fuchsia': '#9d0759', + 'indigo blue': '#3a18b1', + 'light yellowish green': '#c2ff89', + 'pale magenta': '#d767ad', + 'rich purple': '#720058', + 'sunflower yellow': '#ffda03', + 'green/blue': '#01c08d', + 'leather': '#ac7434', + 'racing green': '#014600', + 'vivid purple': '#9900fa', + 'dark royal blue': '#02066f', + 'hazel': '#8e7618', + 'muted pink': '#d1768f', + 'booger green': '#96b403', + 'canary': '#fdff63', + 'cool grey': '#95a3a6', + 'dark taupe': '#7f684e', + 'darkish purple': '#751973', + 'true green': '#089404', + 'coral pink': '#ff6163', + 'dark sage': '#598556', + 'dark slate blue': '#214761', + 'flat blue': '#3c73a8', + 'mushroom': '#ba9e88', + 'rich blue': '#021bf9', + 'dirty purple': '#734a65', + 'greenblue': '#23c48b', + 'icky green': '#8fae22', + 'light khaki': '#e6f2a2', + 'warm blue': '#4b57db', + 'dark hot pink': '#d90166', + 'deep sea blue': '#015482', + 'carmine': '#9d0216', + 'dark yellow green': '#728f02', + 'pale peach': '#ffe5ad', + 'plum purple': '#4e0550', + 'golden rod': '#f9bc08', + 'neon red': '#ff073a', + 'old pink': '#c77986', + 'very pale blue': '#d6fffe', + 'blood orange': '#fe4b03', + 'grapefruit': '#fd5956', + 'sand yellow': '#fce166', + 'clay brown': '#b2713d', + 'dark blue grey': '#1f3b4d', + 'flat green': '#699d4c', + 'light green blue': '#56fca2', + 'warm pink': '#fb5581', + 'dodger blue': '#3e82fc', + 'gross green': '#a0bf16', + 'ice': '#d6fffa', + 'metallic blue': '#4f738e', + 'pale salmon': '#ffb19a', + 'sap green': '#5c8b15', + 'algae': '#54ac68', + 'bluey grey': '#89a0b0', + 'greeny grey': '#7ea07a', + 'highlighter green': '#1bfc06', + 'light light blue': '#cafffb', + 'light mint': '#b6ffbb', + 'raw umber': '#a75e09', + 'vivid blue': '#152eff', + 'deep lavender': '#8d5eb7', + 'dull teal': '#5f9e8f', + 'light greenish blue': '#63f7b4', + 'mud green': '#606602', + 'pinky': '#fc86aa', + 'red wine': '#8c0034', + 'shit green': '#758000', + 'tan brown': '#ab7e4c', + 'darkblue': '#030764', + 'rosa': '#fe86a4', + 'lipstick': '#d5174e', + 'pale mauve': '#fed0fc', + 'claret': '#680018', + 'dandelion': '#fedf08', + 'orangered': '#fe420f', + 'poop green': '#6f7c00', + 'ruby': '#ca0147', + 'dark': '#1b2431', + 'greenish turquoise': '#00fbb0', + 'pastel red': '#db5856', + 'piss yellow': '#ddd618', + 'bright cyan': '#41fdfe', + 'dark coral': '#cf524e', + 'algae green': '#21c36f', + 'darkish red': '#a90308', + 'reddy brown': '#6e1005', + 'blush pink': '#fe828c', + 'camouflage green': '#4b6113', + 'lawn green': '#4da409', + 'putty': '#beae8a', + 'vibrant blue': '#0339f8', + 'dark sand': '#a88f59', + 'purple/blue': '#5d21d0', + 'saffron': '#feb209', + 'twilight': '#4e518b', + 'warm brown': '#964e02', + 'bluegrey': '#85a3b2', + 'bubble gum pink': '#ff69af', + 'duck egg blue': '#c3fbf4', + 'greenish cyan': '#2afeb7', + 'petrol': '#005f6a', + 'royal': '#0c1793', + 'butter': '#ffff81', + 'dusty orange': '#f0833a', + 'off yellow': '#f1f33f', + 'pale olive green': '#b1d27b', + 'orangish': '#fc824a', + 'leaf': '#71aa34', + 'light blue grey': '#b7c9e2', + 'dried blood': '#4b0101', + 'lightish purple': '#a552e6', + 'rusty red': '#af2f0d', + 'lavender blue': '#8b88f8', + 'light grass green': '#9af764', + 'light mint green': '#a6fbb2', + 'sunflower': '#ffc512', + 'velvet': '#750851', + 'brick orange': '#c14a09', + 'lightish red': '#fe2f4a', + 'pure blue': '#0203e2', + 'twilight blue': '#0a437a', + 'violet red': '#a50055', + 'yellowy brown': '#ae8b0c', + 'carnation': '#fd798f', + 'muddy yellow': '#bfac05', + 'dark seafoam green': '#3eaf76', + 'deep rose': '#c74767', + 'dusty red': '#b9484e', + 'grey/blue': '#647d8e', + 'lemon lime': '#bffe28', + 'purple/pink': '#d725de', + 'brown yellow': '#b29705', + 'purple brown': '#673a3f', + 'wisteria': '#a87dc2', + 'banana yellow': '#fafe4b', + 'lipstick red': '#c0022f', + 'water blue': '#0e87cc', + 'brown grey': '#8d8468', + 'vibrant purple': '#ad03de', + 'baby green': '#8cff9e', + 'barf green': '#94ac02', + 'eggshell blue': '#c4fff7', + 'sandy yellow': '#fdee73', + 'cool green': '#33b864', + 'pale': '#fff9d0', + 'blue/grey': '#758da3', + 'hot magenta': '#f504c9', + 'greyblue': '#77a1b5', + 'purpley': '#8756e4', + 'baby shit green': '#889717', + 'brownish pink': '#c27e79', + 'dark aquamarine': '#017371', + 'diarrhea': '#9f8303', + 'light mustard': '#f7d560', + 'pale sky blue': '#bdf6fe', + 'turtle green': '#75b84f', + 'bright olive': '#9cbb04', + 'dark grey blue': '#29465b', + 'greeny brown': '#696006', + 'lemon green': '#adf802', + 'light periwinkle': '#c1c6fc', + 'seaweed green': '#35ad6b', + 'sunshine yellow': '#fffd37', + 'ugly purple': '#a442a0', + 'medium pink': '#f36196', + 'puke brown': '#947706', + 'very light pink': '#fff4f2', + 'viridian': '#1e9167', + 'bile': '#b5c306', + 'faded yellow': '#feff7f', + 'very pale green': '#cffdbc', + 'vibrant green': '#0add08', + 'bright lime': '#87fd05', + 'spearmint': '#1ef876', + 'light aquamarine': '#7bfdc7', + 'light sage': '#bcecac', + 'yellowgreen': '#bbf90f', + 'baby poo': '#ab9004', + 'dark seafoam': '#1fb57a', + 'deep teal': '#00555a', + 'heather': '#a484ac', + 'rust orange': '#c45508', + 'dirty blue': '#3f829d', + 'fern green': '#548d44', + 'bright lilac': '#c95efb', + 'weird green': '#3ae57f', + 'peacock blue': '#016795', + 'avocado green': '#87a922', + 'faded orange': '#f0944d', + 'grape purple': '#5d1451', + 'hot green': '#25ff29', + 'lime yellow': '#d0fe1d', + 'mango': '#ffa62b', + 'shamrock': '#01b44c', + 'bubblegum': '#ff6cb5', + 'purplish brown': '#6b4247', + 'vomit yellow': '#c7c10c', + 'pale cyan': '#b7fffa', + 'key lime': '#aeff6e', + 'tomato red': '#ec2d01', + 'lightgreen': '#76ff7b', + 'merlot': '#730039', + 'night blue': '#040348', + 'purpleish pink': '#df4ec8', + 'apple': '#6ecb3c', + 'baby poop green': '#8f9805', + 'green apple': '#5edc1f', + 'heliotrope': '#d94ff5', + 'yellow/green': '#c8fd3d', + 'almost black': '#070d0d', + 'cool blue': '#4984b8', + 'leafy green': '#51b73b', + 'mustard brown': '#ac7e04', + 'dusk': '#4e5481', + 'dull brown': '#876e4b', + 'frog green': '#58bc08', + 'vivid green': '#2fef10', + 'bright light green': '#2dfe54', + 'fluro green': '#0aff02', + 'kiwi': '#9cef43', + 'seaweed': '#18d17b', + 'navy green': '#35530a', + 'ultramarine blue': '#1805db', + 'iris': '#6258c4', + 'pastel orange': '#ff964f', + 'yellowish orange': '#ffab0f', + 'perrywinkle': '#8f8ce7', + 'tealish': '#24bca8', + 'dark plum': '#3f012c', + 'pear': '#cbf85f', + 'pinkish orange': '#ff724c', + 'midnight purple': '#280137', + 'light urple': '#b36ff6', + 'dark mint': '#48c072', + 'greenish tan': '#bccb7a', + 'light burgundy': '#a8415b', + 'turquoise blue': '#06b1c4', + 'ugly pink': '#cd7584', + 'sandy': '#f1da7a', + 'electric pink': '#ff0490', + 'muted purple': '#805b87', + 'mid green': '#50a747', + 'greyish': '#a8a495', + 'neon yellow': '#cfff04', + 'banana': '#ffff7e', + 'carnation pink': '#ff7fa7', + 'tomato': '#ef4026', + 'sea': '#3c9992', + 'muddy brown': '#886806', + 'turquoise green': '#04f489', + 'buff': '#fef69e', + 'fawn': '#cfaf7b', + 'muted blue': '#3b719f', + 'pale rose': '#fdc1c5', + 'dark mint green': '#20c073', + 'amethyst': '#9b5fc0', + 'blue/green': '#0f9b8e', + 'chestnut': '#742802', + 'sick green': '#9db92c', + 'pea': '#a4bf20', + 'rusty orange': '#cd5909', + 'stone': '#ada587', + 'rose red': '#be013c', + 'pale aqua': '#b8ffeb', + 'deep orange': '#dc4d01', + 'earth': '#a2653e', + 'mossy green': '#638b27', + 'grassy green': '#419c03', + 'pale lime green': '#b1ff65', + 'light grey blue': '#9dbcd4', + 'pale grey': '#fdfdfe', + 'asparagus': '#77ab56', + 'blueberry': '#464196', + 'purple red': '#990147', + 'pale lime': '#befd73', + 'greenish teal': '#32bf84', + 'caramel': '#af6f09', + 'deep magenta': '#a0025c', + 'light peach': '#ffd8b1', + 'milk chocolate': '#7f4e1e', + 'ocher': '#bf9b0c', + 'off green': '#6ba353', + 'purply pink': '#f075e6', + 'lightblue': '#7bc8f6', + 'dusky blue': '#475f94', + 'golden': '#f5bf03', + 'light beige': '#fffeb6', + 'butter yellow': '#fffd74', + 'dusky purple': '#895b7b', + 'french blue': '#436bad', + 'ugly yellow': '#d0c101', + 'greeny yellow': '#c6f808', + 'orangish red': '#f43605', + 'shamrock green': '#02c14d', + 'orangish brown': '#b25f03', + 'tree green': '#2a7e19', + 'deep violet': '#490648', + 'gunmetal': '#536267', + 'blue/purple': '#5a06ef', + 'cherry': '#cf0234', + 'sandy brown': '#c4a661', + 'warm grey': '#978a84', + 'dark indigo': '#1f0954', + 'midnight': '#03012d', + 'bluey green': '#2bb179', + 'grey pink': '#c3909b', + 'soft purple': '#a66fb5', + 'blood': '#770001', + 'brown red': '#922b05', + 'medium grey': '#7d7f7c', + 'berry': '#990f4b', + 'poo': '#8f7303', + 'purpley pink': '#c83cb9', + 'light salmon': '#fea993', + 'snot': '#acbb0d', + 'easter purple': '#c071fe', + 'light yellow green': '#ccfd7f', + 'dark navy blue': '#00022e', + 'drab': '#828344', + 'light rose': '#ffc5cb', + 'rouge': '#ab1239', + 'purplish red': '#b0054b', + 'slime green': '#99cc04', + 'baby poop': '#937c00', + 'irish green': '#019529', + 'pink/purple': '#ef1de7', + 'dark navy': '#000435', + 'greeny blue': '#42b395', + 'light plum': '#9d5783', + 'pinkish grey': '#c8aca9', + 'dirty orange': '#c87606', + 'rust red': '#aa2704', + 'pale lilac': '#e4cbff', + 'orangey red': '#fa4224', + 'primary blue': '#0804f9', + 'kermit green': '#5cb200', + 'brownish purple': '#76424e', + 'murky green': '#6c7a0e', + 'wheat': '#fbdd7e', + 'very dark purple': '#2a0134', + 'bottle green': '#044a05', + 'watermelon': '#fd4659', + 'deep sky blue': '#0d75f8', + 'fire engine red': '#fe0002', + 'yellow ochre': '#cb9d06', + 'pumpkin orange': '#fb7d07', + 'pale olive': '#b9cc81', + 'light lilac': '#edc8ff', + 'lightish green': '#61e160', + 'carolina blue': '#8ab8fe', + 'mulberry': '#920a4e', + 'shocking pink': '#fe02a2', + 'auburn': '#9a3001', + 'bright lime green': '#65fe08', + 'celadon': '#befdb7', + 'pinkish brown': '#b17261', + 'poo brown': '#885f01', + 'bright sky blue': '#02ccfe', + 'celery': '#c1fd95', + 'dirt brown': '#836539', + 'strawberry': '#fb2943', + 'dark lime': '#84b701', + 'copper': '#b66325', + 'medium brown': '#7f5112', + 'muted green': '#5fa052', + "robin's egg": '#6dedfd', + 'bright aqua': '#0bf9ea', + 'bright lavender': '#c760ff', + 'ivory': '#ffffcb', + 'very light purple': '#f6cefc', + 'light navy': '#155084', + 'pink red': '#f5054f', + 'olive brown': '#645403', + 'poop brown': '#7a5901', + 'mustard green': '#a8b504', + 'ocean green': '#3d9973', + 'very dark blue': '#000133', + 'dusty green': '#76a973', + 'light navy blue': '#2e5a88', + 'minty green': '#0bf77d', + 'adobe': '#bd6c48', + 'barney': '#ac1db8', + 'jade green': '#2baf6a', + 'bright light blue': '#26f7fd', + 'light lime': '#aefd6c', + 'dark khaki': '#9b8f55', + 'orange yellow': '#ffad01', + 'ocre': '#c69c04', + 'maize': '#f4d054', + 'faded pink': '#de9dac', + 'british racing green': '#05480d', + 'sandstone': '#c9ae74', + 'mud brown': '#60460f', + 'light sea green': '#98f6b0', + 'robin egg blue': '#8af1fe', + 'aqua marine': '#2ee8bb', + 'dark sea green': '#11875d', + 'soft pink': '#fdb0c0', + 'orangey brown': '#b16002', + 'cherry red': '#f7022a', + 'burnt yellow': '#d5ab09', + 'brownish grey': '#86775f', + 'camel': '#c69f59', + 'purplish grey': '#7a687f', + 'marine': '#042e60', + 'greyish pink': '#c88d94', + 'pale turquoise': '#a5fbd5', + 'pastel yellow': '#fffe71', + 'bluey purple': '#6241c7', + 'canary yellow': '#fffe40', + 'faded red': '#d3494e', + 'sepia': '#985e2b', + 'coffee': '#a6814c', + 'bright magenta': '#ff08e8', + 'mocha': '#9d7651', + 'ecru': '#feffca', + 'purpleish': '#98568d', + 'cranberry': '#9e003a', + 'darkish green': '#287c37', + 'brown orange': '#b96902', + 'dusky rose': '#ba6873', + 'melon': '#ff7855', + 'sickly green': '#94b21c', + 'silver': '#c5c9c7', + 'purply blue': '#661aee', + 'purpleish blue': '#6140ef', + 'hospital green': '#9be5aa', + 'shit brown': '#7b5804', + 'mid blue': '#276ab3', + 'amber': '#feb308', + 'easter green': '#8cfd7e', + 'soft blue': '#6488ea', + 'cerulean blue': '#056eee', + 'golden brown': '#b27a01', + 'bright turquoise': '#0ffef9', + 'red pink': '#fa2a55', + 'red purple': '#820747', + 'greyish brown': '#7a6a4f', + 'vermillion': '#f4320c', + 'russet': '#a13905', + 'steel grey': '#6f828a', + 'lighter purple': '#a55af4', + 'bright violet': '#ad0afd', + 'prussian blue': '#004577', + 'slate green': '#658d6d', + 'dirty pink': '#ca7b80', + 'dark blue green': '#005249', + 'pine': '#2b5d34', + 'yellowy green': '#bff128', + 'dark gold': '#b59410', + 'bluish': '#2976bb', + 'darkish blue': '#014182', + 'dull red': '#bb3f3f', + 'pinky red': '#fc2647', + 'bronze': '#a87900', + 'pale teal': '#82cbb2', + 'military green': '#667c3e', + 'barbie pink': '#fe46a5', + 'bubblegum pink': '#fe83cc', + 'pea soup green': '#94a617', + 'dark mustard': '#a88905', + 'shit': '#7f5f00', + 'medium purple': '#9e43a2', + 'very dark green': '#062e03', + 'dirt': '#8a6e45', + 'dusky pink': '#cc7a8b', + 'red violet': '#9e0168', + 'lemon yellow': '#fdff38', + 'pistachio': '#c0fa8b', + 'dull yellow': '#eedc5b', + 'dark lime green': '#7ebd01', + 'denim blue': '#3b5b92', + 'teal blue': '#01889f', + 'lightish blue': '#3d7afd', + 'purpley blue': '#5f34e7', + 'light indigo': '#6d5acf', + 'swamp green': '#748500', + 'brown green': '#706c11', + 'dark maroon': '#3c0008', + 'hot purple': '#cb00f5', + 'dark forest green': '#002d04', + 'faded blue': '#658cbb', + 'drab green': '#749551', + 'light lime green': '#b9ff66', + 'snot green': '#9dc100', + 'yellowish': '#faee66', + 'light blue green': '#7efbb3', + 'bordeaux': '#7b002c', + 'light mauve': '#c292a1', + 'ocean': '#017b92', + 'marigold': '#fcc006', + 'muddy green': '#657432', + 'dull orange': '#d8863b', + 'steel': '#738595', + 'electric purple': '#aa23ff', + 'fluorescent green': '#08ff08', + 'yellowish brown': '#9b7a01', + 'blush': '#f29e8e', + 'soft green': '#6fc276', + 'bright orange': '#ff5b00', + 'lemon': '#fdff52', + 'purple grey': '#866f85', + 'acid green': '#8ffe09', + 'pale lavender': '#eecffe', + 'violet blue': '#510ac9', + 'light forest green': '#4f9153', + 'burnt red': '#9f2305', + 'khaki green': '#728639', + 'cerise': '#de0c62', + 'faded purple': '#916e99', + 'apricot': '#ffb16d', + 'dark olive green': '#3c4d03', + 'grey brown': '#7f7053', + 'green grey': '#77926f', + 'true blue': '#010fcc', + 'pale violet': '#ceaefa', + 'periwinkle blue': '#8f99fb', + 'light sky blue': '#c6fcff', + 'blurple': '#5539cc', + 'green brown': '#544e03', + 'bluegreen': '#017a79', + 'bright teal': '#01f9c6', + 'brownish yellow': '#c9b003', + 'pea soup': '#929901', + 'forest': '#0b5509', + 'barney purple': '#a00498', + 'ultramarine': '#2000b1', + 'purplish': '#94568c', + 'puke yellow': '#c2be0e', + 'bluish grey': '#748b97', + 'dark periwinkle': '#665fd1', + 'dark lilac': '#9c6da5', + 'reddish': '#c44240', + 'light maroon': '#a24857', + 'dusty purple': '#825f87', + 'terra cotta': '#c9643b', + 'avocado': '#90b134', + 'marine blue': '#01386a', + 'teal green': '#25a36f', + 'slate grey': '#59656d', + 'lighter green': '#75fd63', + 'electric green': '#21fc0d', + 'dusty blue': '#5a86ad', + 'golden yellow': '#fec615', + 'bright yellow': '#fffd01', + 'light lavender': '#dfc5fe', + 'umber': '#b26400', + 'poop': '#7f5e00', + 'dark peach': '#de7e5d', + 'jungle green': '#048243', + 'eggshell': '#ffffd4', + 'denim': '#3b638c', + 'yellow brown': '#b79400', + 'dull purple': '#84597e', + 'chocolate brown': '#411900', + 'wine red': '#7b0323', + 'neon blue': '#04d9ff', + 'dirty green': '#667e2c', + 'light tan': '#fbeeac', + 'ice blue': '#d7fffe', + 'cadet blue': '#4e7496', + 'dark mauve': '#874c62', + 'very light blue': '#d5ffff', + 'grey purple': '#826d8c', + 'pastel pink': '#ffbacd', + 'very light green': '#d1ffbd', + 'dark sky blue': '#448ee4', + 'evergreen': '#05472a', + 'dull pink': '#d5869d', + 'aubergine': '#3d0734', + 'mahogany': '#4a0100', + 'reddish orange': '#f8481c', + 'deep green': '#02590f', + 'vomit green': '#89a203', + 'purple pink': '#e03fd8', + 'dusty pink': '#d58a94', + 'faded green': '#7bb274', + 'camo green': '#526525', + 'pinky purple': '#c94cbe', + 'pink purple': '#db4bda', + 'brownish red': '#9e3623', + 'dark rose': '#b5485d', + 'mud': '#735c12', + 'brownish': '#9c6d57', + 'emerald green': '#028f1e', + 'pale brown': '#b1916e', + 'dull blue': '#49759c', + 'burnt umber': '#a0450e', + 'medium green': '#39ad48', + 'clay': '#b66a50', + 'light aqua': '#8cffdb', + 'light olive green': '#a4be5c', + 'brownish orange': '#cb7723', + 'dark aqua': '#05696b', + 'purplish pink': '#ce5dae', + 'dark salmon': '#c85a53', + 'greenish grey': '#96ae8d', + 'jade': '#1fa774', + 'ugly green': '#7a9703', + 'dark beige': '#ac9362', + 'emerald': '#01a049', + 'pale red': '#d9544d', + 'light magenta': '#fa5ff7', + 'sky': '#82cafc', + 'light cyan': '#acfffc', + 'yellow orange': '#fcb001', + 'reddish purple': '#910951', + 'reddish pink': '#fe2c54', + 'orchid': '#c875c4', + 'dirty yellow': '#cdc50a', + 'orange red': '#fd411e', + 'deep red': '#9a0200', + 'orange brown': '#be6400', + 'cobalt blue': '#030aa7', + 'neon pink': '#fe019a', + 'rose pink': '#f7879a', + 'greyish purple': '#887191', + 'raspberry': '#b00149', + 'aqua green': '#12e193', + 'salmon pink': '#fe7b7c', + 'tangerine': '#ff9408', + 'brownish green': '#6a6e09', + 'red brown': '#8b2e16', + 'greenish brown': '#696112', + 'pumpkin': '#e17701', + 'pine green': '#0a481e', + 'charcoal': '#343837', + 'baby pink': '#ffb7ce', + 'cornflower': '#6a79f7', + 'blue violet': '#5d06e9', + 'chocolate': '#3d1c02', + 'greyish green': '#82a67d', + 'scarlet': '#be0119', + 'green yellow': '#c9ff27', + 'dark olive': '#373e02', + 'sienna': '#a9561e', + 'pastel purple': '#caa0ff', + 'terracotta': '#ca6641', + 'aqua blue': '#02d8e9', + 'sage green': '#88b378', + 'blood red': '#980002', + 'deep pink': '#cb0162', + 'grass': '#5cac2d', + 'moss': '#769958', + 'pastel blue': '#a2bffe', + 'bluish green': '#10a674', + 'green blue': '#06b48b', + 'dark tan': '#af884a', + 'greenish blue': '#0b8b87', + 'pale orange': '#ffa756', + 'vomit': '#a2a415', + 'forrest green': '#154406', + 'dark lavender': '#856798', + 'dark violet': '#34013f', + 'purple blue': '#632de9', + 'dark cyan': '#0a888a', + 'olive drab': '#6f7632', + 'pinkish': '#d46a7e', + 'cobalt': '#1e488f', + 'neon purple': '#bc13fe', + 'light turquoise': '#7ef4cc', + 'apple green': '#76cd26', + 'dull green': '#74a662', + 'wine': '#80013f', + 'powder blue': '#b1d1fc', + 'off white': '#ffffe4', + 'electric blue': '#0652ff', + 'dark turquoise': '#045c5a', + 'blue purple': '#5729ce', + 'azure': '#069af3', + 'bright red': '#ff000d', + 'pinkish red': '#f10c45', + 'cornflower blue': '#5170d7', + 'light olive': '#acbf69', + 'grape': '#6c3461', + 'greyish blue': '#5e819d', + 'purplish blue': '#601ef9', + 'yellowish green': '#b0dd16', + 'greenish yellow': '#cdfd02', + 'medium blue': '#2c6fbb', + 'dusty rose': '#c0737a', + 'light violet': '#d6b4fc', + 'midnight blue': '#020035', + 'bluish purple': '#703be7', + 'red orange': '#fd3c06', + 'dark magenta': '#960056', + 'greenish': '#40a368', + 'ocean blue': '#03719c', + 'coral': '#fc5a50', + 'cream': '#ffffc2', + 'reddish brown': '#7f2b0a', + 'burnt sienna': '#b04e0f', + 'brick': '#a03623', + 'sage': '#87ae73', + 'grey green': '#789b73', + 'white': '#ffffff', + "robin's egg blue": '#98eff9', + 'moss green': '#658b38', + 'steel blue': '#5a7d9a', + 'eggplant': '#380835', + 'light yellow': '#fffe7a', + 'leaf green': '#5ca904', + 'light grey': '#d8dcd6', + 'puke': '#a5a502', + 'pinkish purple': '#d648d7', + 'sea blue': '#047495', + 'pale purple': '#b790d4', + 'slate blue': '#5b7c99', + 'blue grey': '#607c8e', + 'hunter green': '#0b4008', + 'fuchsia': '#ed0dd9', + 'crimson': '#8c000f', + 'pale yellow': '#ffff84', + 'ochre': '#bf9005', + 'mustard yellow': '#d2bd0a', + 'light red': '#ff474c', + 'cerulean': '#0485d1', + 'pale pink': '#ffcfdc', + 'deep blue': '#040273', + 'rust': '#a83c09', + 'light teal': '#90e4c1', + 'slate': '#516572', + 'goldenrod': '#fac205', + 'dark yellow': '#d5b60a', + 'dark grey': '#363737', + 'army green': '#4b5d16', + 'grey blue': '#6b8ba4', + 'seafoam': '#80f9ad', + 'puce': '#a57e52', + 'spring green': '#a9f971', + 'dark orange': '#c65102', + 'sand': '#e2ca76', + 'pastel green': '#b0ff9d', + 'mint': '#9ffeb0', + 'light orange': '#fdaa48', + 'bright pink': '#fe01b1', + 'chartreuse': '#c1f80a', + 'deep purple': '#36013f', + 'dark brown': '#341c02', + 'taupe': '#b9a281', + 'pea green': '#8eab12', + 'puke green': '#9aae07', + 'kelly green': '#02ab2e', + 'seafoam green': '#7af9ab', + 'blue green': '#137e6d', + 'khaki': '#aaa662', + 'burgundy': '#610023', + 'dark teal': '#014d4e', + 'brick red': '#8f1402', + 'royal purple': '#4b006e', + 'plum': '#580f41', + 'mint green': '#8fff9f', + 'gold': '#dbb40c', + 'baby blue': '#a2cffe', + 'yellow green': '#c0fb2d', + 'bright purple': '#be03fd', + 'dark red': '#840000', + 'pale blue': '#d0fefe', + 'grass green': '#3f9b0b', + 'navy': '#01153e', + 'aquamarine': '#04d8b2', + 'burnt orange': '#c04e01', + 'neon green': '#0cff0c', + 'bright blue': '#0165fc', + 'rose': '#cf6275', + 'light pink': '#ffd1df', + 'mustard': '#ceb301', + 'indigo': '#380282', + 'lime': '#aaff32', + 'sea green': '#53fca1', + 'periwinkle': '#8e82fe', + 'dark pink': '#cb416b', + 'olive green': '#677a04', + 'peach': '#ffb07c', + 'pale green': '#c7fdb5', + 'light brown': '#ad8150', + 'hot pink': '#ff028d', + 'black': '#000000', + 'lilac': '#cea2fd', + 'navy blue': '#001146', + 'royal blue': '#0504aa', + 'beige': '#e6daa6', + 'salmon': '#ff796c', + 'olive': '#6e750e', + 'maroon': '#650021', + 'bright green': '#01ff07', + 'dark purple': '#35063e', + 'mauve': '#ae7181', + 'forest green': '#06470c', + 'aqua': '#13eac9', + 'cyan': '#00ffff', + 'tan': '#d1b26f', + 'dark blue': '#00035b', + 'lavender': '#c79fef', + 'turquoise': '#06c2ac', + 'dark green': '#033500', + 'violet': '#9a0eea', + 'light purple': '#bf77f6', + 'lime green': '#89fe05', + 'grey': '#929591', + 'sky blue': '#75bbfd', + 'yellow': '#ffff14', + 'magenta': '#c20078', + 'light green': '#96f97b', + 'orange': '#f97306', + 'teal': '#029386', + 'light blue': '#95d0fc', + 'red': '#e50000', + 'brown': '#653700', + 'pink': '#ff81c0', + 'blue': '#0343df', + 'green': '#15b01a', + 'purple': '#7e1e9c'} + +# Normalize name to "xkcd:" to avoid name collisions. +XKCD_COLORS = {'xkcd:' + name: value for name, value in XKCD_COLORS.items()} + + +# https://drafts.csswg.org/css-color-4/#named-colors +CSS4_COLORS = { + 'aliceblue': '#F0F8FF', + 'antiquewhite': '#FAEBD7', + 'aqua': '#00FFFF', + 'aquamarine': '#7FFFD4', + 'azure': '#F0FFFF', + 'beige': '#F5F5DC', + 'bisque': '#FFE4C4', + 'black': '#000000', + 'blanchedalmond': '#FFEBCD', + 'blue': '#0000FF', + 'blueviolet': '#8A2BE2', + 'brown': '#A52A2A', + 'burlywood': '#DEB887', + 'cadetblue': '#5F9EA0', + 'chartreuse': '#7FFF00', + 'chocolate': '#D2691E', + 'coral': '#FF7F50', + 'cornflowerblue': '#6495ED', + 'cornsilk': '#FFF8DC', + 'crimson': '#DC143C', + 'cyan': '#00FFFF', + 'darkblue': '#00008B', + 'darkcyan': '#008B8B', + 'darkgoldenrod': '#B8860B', + 'darkgray': '#A9A9A9', + 'darkgreen': '#006400', + 'darkgrey': '#A9A9A9', + 'darkkhaki': '#BDB76B', + 'darkmagenta': '#8B008B', + 'darkolivegreen': '#556B2F', + 'darkorange': '#FF8C00', + 'darkorchid': '#9932CC', + 'darkred': '#8B0000', + 'darksalmon': '#E9967A', + 'darkseagreen': '#8FBC8F', + 'darkslateblue': '#483D8B', + 'darkslategray': '#2F4F4F', + 'darkslategrey': '#2F4F4F', + 'darkturquoise': '#00CED1', + 'darkviolet': '#9400D3', + 'deeppink': '#FF1493', + 'deepskyblue': '#00BFFF', + 'dimgray': '#696969', + 'dimgrey': '#696969', + 'dodgerblue': '#1E90FF', + 'firebrick': '#B22222', + 'floralwhite': '#FFFAF0', + 'forestgreen': '#228B22', + 'fuchsia': '#FF00FF', + 'gainsboro': '#DCDCDC', + 'ghostwhite': '#F8F8FF', + 'gold': '#FFD700', + 'goldenrod': '#DAA520', + 'gray': '#808080', + 'green': '#008000', + 'greenyellow': '#ADFF2F', + 'grey': '#808080', + 'honeydew': '#F0FFF0', + 'hotpink': '#FF69B4', + 'indianred': '#CD5C5C', + 'indigo': '#4B0082', + 'ivory': '#FFFFF0', + 'khaki': '#F0E68C', + 'lavender': '#E6E6FA', + 'lavenderblush': '#FFF0F5', + 'lawngreen': '#7CFC00', + 'lemonchiffon': '#FFFACD', + 'lightblue': '#ADD8E6', + 'lightcoral': '#F08080', + 'lightcyan': '#E0FFFF', + 'lightgoldenrodyellow': '#FAFAD2', + 'lightgray': '#D3D3D3', + 'lightgreen': '#90EE90', + 'lightgrey': '#D3D3D3', + 'lightpink': '#FFB6C1', + 'lightsalmon': '#FFA07A', + 'lightseagreen': '#20B2AA', + 'lightskyblue': '#87CEFA', + 'lightslategray': '#778899', + 'lightslategrey': '#778899', + 'lightsteelblue': '#B0C4DE', + 'lightyellow': '#FFFFE0', + 'lime': '#00FF00', + 'limegreen': '#32CD32', + 'linen': '#FAF0E6', + 'magenta': '#FF00FF', + 'maroon': '#800000', + 'mediumaquamarine': '#66CDAA', + 'mediumblue': '#0000CD', + 'mediumorchid': '#BA55D3', + 'mediumpurple': '#9370DB', + 'mediumseagreen': '#3CB371', + 'mediumslateblue': '#7B68EE', + 'mediumspringgreen': '#00FA9A', + 'mediumturquoise': '#48D1CC', + 'mediumvioletred': '#C71585', + 'midnightblue': '#191970', + 'mintcream': '#F5FFFA', + 'mistyrose': '#FFE4E1', + 'moccasin': '#FFE4B5', + 'navajowhite': '#FFDEAD', + 'navy': '#000080', + 'oldlace': '#FDF5E6', + 'olive': '#808000', + 'olivedrab': '#6B8E23', + 'orange': '#FFA500', + 'orangered': '#FF4500', + 'orchid': '#DA70D6', + 'palegoldenrod': '#EEE8AA', + 'palegreen': '#98FB98', + 'paleturquoise': '#AFEEEE', + 'palevioletred': '#DB7093', + 'papayawhip': '#FFEFD5', + 'peachpuff': '#FFDAB9', + 'peru': '#CD853F', + 'pink': '#FFC0CB', + 'plum': '#DDA0DD', + 'powderblue': '#B0E0E6', + 'purple': '#800080', + 'rebeccapurple': '#663399', + 'red': '#FF0000', + 'rosybrown': '#BC8F8F', + 'royalblue': '#4169E1', + 'saddlebrown': '#8B4513', + 'salmon': '#FA8072', + 'sandybrown': '#F4A460', + 'seagreen': '#2E8B57', + 'seashell': '#FFF5EE', + 'sienna': '#A0522D', + 'silver': '#C0C0C0', + 'skyblue': '#87CEEB', + 'slateblue': '#6A5ACD', + 'slategray': '#708090', + 'slategrey': '#708090', + 'snow': '#FFFAFA', + 'springgreen': '#00FF7F', + 'steelblue': '#4682B4', + 'tan': '#D2B48C', + 'teal': '#008080', + 'thistle': '#D8BFD8', + 'tomato': '#FF6347', + 'turquoise': '#40E0D0', + 'violet': '#EE82EE', + 'wheat': '#F5DEB3', + 'white': '#FFFFFF', + 'whitesmoke': '#F5F5F5', + 'yellow': '#FFFF00', + 'yellowgreen': '#9ACD32'} diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_constrained_layout.py b/minor_project/lib/python3.6/site-packages/matplotlib/_constrained_layout.py new file mode 100644 index 0000000..90faebc --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_constrained_layout.py @@ -0,0 +1,662 @@ +""" +Adjust subplot layouts so that there are no overlapping axes or axes +decorations. All axes decorations are dealt with (labels, ticks, titles, +ticklabels) and some dependent artists are also dealt with (colorbar, suptitle, +legend). + +Layout is done via `~matplotlib.gridspec`, with one constraint per gridspec, +so it is possible to have overlapping axes if the gridspecs overlap (i.e. +using `~matplotlib.gridspec.GridSpecFromSubplotSpec`). Axes placed using +``figure.subplots()`` or ``figure.add_subplots()`` will participate in the +layout. Axes manually placed via ``figure.add_axes()`` will not. + +See Tutorial: :doc:`/tutorials/intermediate/constrainedlayout_guide` +""" + +# Development Notes: + +# What gets a layoutbox: +# - figure +# - gridspec +# - subplotspec +# EITHER: +# - axes + pos for the axes (i.e. the total area taken by axis and +# the actual "position" argument that needs to be sent to +# ax.set_position.) +# - The axes layout box will also encompass the legend, and that is +# how legends get included (axes legends, not figure legends) +# - colorbars are siblings of the axes if they are single-axes +# colorbars +# OR: +# - a gridspec can be inside a subplotspec. +# - subplotspec +# EITHER: +# - axes... +# OR: +# - gridspec... with arbitrary nesting... +# - colorbars are siblings of the subplotspecs if they are multi-axes +# colorbars. +# - suptitle: +# - right now suptitles are just stacked atop everything else in figure. +# Could imagine suptitles being gridspec suptitles, but not implemented +# +# Todo: AnchoredOffsetbox connected to gridspecs or axes. This would +# be more general way to add extra-axes annotations. + +import logging + +import numpy as np + +import matplotlib.cbook as cbook +import matplotlib._layoutbox as layoutbox + +_log = logging.getLogger(__name__) + + +def _spans_overlap(span0, span1): + return span0.start in span1 or span1.start in span0 + + +def _axes_all_finite_sized(fig): + """Return whether all axes in the figure have a finite width and height.""" + for ax in fig.axes: + if ax._layoutbox is not None: + newpos = ax._poslayoutbox.get_rect() + if newpos[2] <= 0 or newpos[3] <= 0: + return False + return True + + +###################################################### +def do_constrained_layout(fig, renderer, h_pad, w_pad, + hspace=None, wspace=None): + """ + Do the constrained_layout. Called at draw time in + ``figure.constrained_layout()`` + + Parameters + ---------- + fig : Figure + is the ``figure`` instance to do the layout in. + + renderer : Renderer + the renderer to use. + + h_pad, w_pad : float + are in figure-normalized units, and are a padding around the axes + elements. + + hspace, wspace : float + are in fractions of the subplot sizes. + + """ + + # Steps: + # + # 1. get a list of unique gridspecs in this figure. Each gridspec will be + # constrained separately. + # 2. Check for gaps in the gridspecs. i.e. if not every axes slot in the + # gridspec has been filled. If empty, add a ghost axis that is made so + # that it cannot be seen (though visible=True). This is needed to make + # a blank spot in the layout. + # 3. Compare the tight_bbox of each axes to its `position`, and assume that + # the difference is the space needed by the elements around the edge of + # the axes (decorations) like the title, ticklabels, x-labels, etc. This + # can include legends who overspill the axes boundaries. + # 4. Constrain gridspec elements to line up: + # a) if colnum0 != colnumC, the two subplotspecs are stacked next to + # each other, with the appropriate order. + # b) if colnum0 == colnumC, line up the left or right side of the + # _poslayoutbox (depending if it is the min or max num that is equal). + # c) do the same for rows... + # 5. The above doesn't constrain relative sizes of the _poslayoutboxes + # at all, and indeed zero-size is a solution that the solver often finds + # more convenient than expanding the sizes. Right now the solution is to + # compare subplotspec sizes (i.e. drowsC and drows0) and constrain the + # larger _poslayoutbox to be larger than the ratio of the sizes. i.e. if + # drows0 > drowsC, then ax._poslayoutbox > axc._poslayoutbox*drowsC/drows0. + # This works fine *if* the decorations are similar between the axes. + # If the larger subplotspec has much larger axes decorations, then the + # constraint above is incorrect. + # + # We need the greater than in the above, in general, rather than an equals + # sign. Consider the case of the left column having 2 rows, and the right + # column having 1 row. We want the top and bottom of the _poslayoutboxes + # to line up. So that means if there are decorations on the left column + # axes they will be smaller than half as large as the right hand axis. + # + # This can break down if the decoration size for the right hand axis (the + # margins) is very large. There must be a math way to check for this case. + + invTransFig = fig.transFigure.inverted().transform_bbox + + # list of unique gridspecs that contain child axes: + gss = set() + for ax in fig.axes: + if hasattr(ax, 'get_subplotspec'): + gs = ax.get_subplotspec().get_gridspec() + if gs._layoutbox is not None: + gss.add(gs) + if len(gss) == 0: + cbook._warn_external('There are no gridspecs with layoutboxes. ' + 'Possibly did not call parent GridSpec with the' + ' figure= keyword') + + if fig._layoutbox.constrained_layout_called < 1: + for gs in gss: + # fill in any empty gridspec slots w/ ghost axes... + _make_ghost_gridspec_slots(fig, gs) + + for _ in range(2): + # do the algorithm twice. This has to be done because decorators + # change size after the first re-position (i.e. x/yticklabels get + # larger/smaller). This second reposition tends to be much milder, + # so doing twice makes things work OK. + for ax in fig.axes: + _log.debug(ax._layoutbox) + if ax._layoutbox is not None: + # make margins for each layout box based on the size of + # the decorators. + _make_layout_margins(ax, renderer, h_pad, w_pad) + + # do layout for suptitle. + suptitle = fig._suptitle + do_suptitle = (suptitle is not None and + suptitle._layoutbox is not None and + suptitle.get_in_layout()) + if do_suptitle: + bbox = invTransFig( + suptitle.get_window_extent(renderer=renderer)) + height = bbox.height + if np.isfinite(height): + # reserve at top of figure include an h_pad above and below + suptitle._layoutbox.edit_height(height + h_pad * 2) + + # OK, the above lines up ax._poslayoutbox with ax._layoutbox + # now we need to + # 1) arrange the subplotspecs. We do it at this level because + # the subplotspecs are meant to contain other dependent axes + # like colorbars or legends. + # 2) line up the right and left side of the ax._poslayoutbox + # that have the same subplotspec maxes. + + if fig._layoutbox.constrained_layout_called < 1: + # arrange the subplotspecs... This is all done relative to each + # other. Some subplotspecs contain axes, and others contain + # gridspecs the ones that contain gridspecs are a set proportion + # of their parent gridspec. The ones that contain axes are + # not so constrained. + figlb = fig._layoutbox + for child in figlb.children: + if child._is_gridspec_layoutbox(): + # This routine makes all the subplot spec containers + # have the correct arrangement. It just stacks the + # subplot layoutboxes in the correct order... + _arrange_subplotspecs(child, hspace=hspace, wspace=wspace) + + for gs in gss: + _align_spines(fig, gs) + + fig._layoutbox.constrained_layout_called += 1 + fig._layoutbox.update_variables() + + # check if any axes collapsed to zero. If not, don't change positions: + if _axes_all_finite_sized(fig): + # Now set the position of the axes... + for ax in fig.axes: + if ax._layoutbox is not None: + newpos = ax._poslayoutbox.get_rect() + # Now set the new position. + # ax.set_position will zero out the layout for + # this axis, allowing users to hard-code the position, + # so this does the same w/o zeroing layout. + ax._set_position(newpos, which='original') + if do_suptitle: + newpos = suptitle._layoutbox.get_rect() + suptitle.set_y(1.0 - h_pad) + else: + if suptitle is not None and suptitle._layoutbox is not None: + suptitle._layoutbox.edit_height(0) + else: + cbook._warn_external('constrained_layout not applied. At least ' + 'one axes collapsed to zero width or height.') + + +def _make_ghost_gridspec_slots(fig, gs): + """ + Check for unoccupied gridspec slots and make ghost axes for these + slots... Do for each gs separately. This is a pretty big kludge + but shouldn't have too much ill effect. The worst is that + someone querying the figure will wonder why there are more + axes than they thought. + """ + nrows, ncols = gs.get_geometry() + hassubplotspec = np.zeros(nrows * ncols, dtype=bool) + axs = [] + for ax in fig.axes: + if (hasattr(ax, 'get_subplotspec') + and ax._layoutbox is not None + and ax.get_subplotspec().get_gridspec() == gs): + axs += [ax] + for ax in axs: + ss0 = ax.get_subplotspec() + hassubplotspec[ss0.num1:(ss0.num2 + 1)] = True + for nn, hss in enumerate(hassubplotspec): + if not hss: + # this gridspec slot doesn't have an axis so we + # make a "ghost". + ax = fig.add_subplot(gs[nn]) + ax.set_visible(False) + + +def _make_layout_margins(ax, renderer, h_pad, w_pad): + """ + For each axes, make a margin between the *pos* layoutbox and the + *axes* layoutbox be a minimum size that can accommodate the + decorations on the axis. + """ + fig = ax.figure + invTransFig = fig.transFigure.inverted().transform_bbox + pos = ax.get_position(original=True) + try: + tightbbox = ax.get_tightbbox(renderer=renderer, for_layout_only=True) + except TypeError: + tightbbox = ax.get_tightbbox(renderer=renderer) + + if tightbbox is None: + bbox = pos + else: + bbox = invTransFig(tightbbox) + + # this can go wrong: + if not (np.isfinite(bbox.width) and np.isfinite(bbox.height)): + # just abort, this is likely a bad set of coordinates that + # is transitory... + return + # use stored h_pad if it exists + h_padt = ax._poslayoutbox.h_pad + if h_padt is None: + h_padt = h_pad + w_padt = ax._poslayoutbox.w_pad + if w_padt is None: + w_padt = w_pad + ax._poslayoutbox.edit_left_margin_min(-bbox.x0 + pos.x0 + w_padt) + ax._poslayoutbox.edit_right_margin_min(bbox.x1 - pos.x1 + w_padt) + ax._poslayoutbox.edit_bottom_margin_min(-bbox.y0 + pos.y0 + h_padt) + ax._poslayoutbox.edit_top_margin_min(bbox.y1-pos.y1+h_padt) + _log.debug('left %f', (-bbox.x0 + pos.x0 + w_pad)) + _log.debug('right %f', (bbox.x1 - pos.x1 + w_pad)) + _log.debug('bottom %f', (-bbox.y0 + pos.y0 + h_padt)) + _log.debug('bbox.y0 %f', bbox.y0) + _log.debug('pos.y0 %f', pos.y0) + # Sometimes its possible for the solver to collapse + # rather than expand axes, so they all have zero height + # or width. This stops that... It *should* have been + # taken into account w/ pref_width... + if fig._layoutbox.constrained_layout_called < 1: + ax._poslayoutbox.constrain_height_min(20, strength='weak') + ax._poslayoutbox.constrain_width_min(20, strength='weak') + ax._layoutbox.constrain_height_min(20, strength='weak') + ax._layoutbox.constrain_width_min(20, strength='weak') + ax._poslayoutbox.constrain_top_margin(0, strength='weak') + ax._poslayoutbox.constrain_bottom_margin(0, strength='weak') + ax._poslayoutbox.constrain_right_margin(0, strength='weak') + ax._poslayoutbox.constrain_left_margin(0, strength='weak') + + +def _align_spines(fig, gs): + """ + - Align right/left and bottom/top spines of appropriate subplots. + - Compare size of subplotspec including height and width ratios + and make sure that the axes spines are at least as large + as they should be. + """ + # for each gridspec... + nrows, ncols = gs.get_geometry() + width_ratios = gs.get_width_ratios() + height_ratios = gs.get_height_ratios() + if width_ratios is None: + width_ratios = np.ones(ncols) + if height_ratios is None: + height_ratios = np.ones(nrows) + + # get axes in this gridspec.... + axs = [ax for ax in fig.axes + if (hasattr(ax, 'get_subplotspec') + and ax._layoutbox is not None + and ax.get_subplotspec().get_gridspec() == gs)] + rowspans = [] + colspans = [] + heights = [] + widths = [] + + for ax in axs: + ss0 = ax.get_subplotspec() + rowspan = ss0.rowspan + colspan = ss0.colspan + rowspans.append(rowspan) + colspans.append(colspan) + heights.append(sum(height_ratios[rowspan.start:rowspan.stop])) + widths.append(sum(width_ratios[colspan.start:colspan.stop])) + + for idx0, ax0 in enumerate(axs): + # Compare ax to all other axs: If the subplotspecs start (/stop) at + # the same column, then line up their left (/right) sides; likewise + # for rows/top/bottom. + rowspan0 = rowspans[idx0] + colspan0 = colspans[idx0] + height0 = heights[idx0] + width0 = widths[idx0] + alignleft = False + alignright = False + alignbot = False + aligntop = False + alignheight = False + alignwidth = False + for idx1 in range(idx0 + 1, len(axs)): + ax1 = axs[idx1] + rowspan1 = rowspans[idx1] + colspan1 = colspans[idx1] + width1 = widths[idx1] + height1 = heights[idx1] + # Horizontally align axes spines if they have the same min or max: + if not alignleft and colspan0.start == colspan1.start: + _log.debug('same start columns; line up layoutbox lefts') + layoutbox.align([ax0._poslayoutbox, ax1._poslayoutbox], + 'left') + alignleft = True + if not alignright and colspan0.stop == colspan1.stop: + _log.debug('same stop columns; line up layoutbox rights') + layoutbox.align([ax0._poslayoutbox, ax1._poslayoutbox], + 'right') + alignright = True + # Vertically align axes spines if they have the same min or max: + if not aligntop and rowspan0.start == rowspan1.start: + _log.debug('same start rows; line up layoutbox tops') + layoutbox.align([ax0._poslayoutbox, ax1._poslayoutbox], + 'top') + aligntop = True + if not alignbot and rowspan0.stop == rowspan1.stop: + _log.debug('same stop rows; line up layoutbox bottoms') + layoutbox.align([ax0._poslayoutbox, ax1._poslayoutbox], + 'bottom') + alignbot = True + + # Now we make the widths and heights of position boxes + # similar. (i.e the spine locations) + # This allows vertically stacked subplots to have different sizes + # if they occupy different amounts of the gridspec, e.g. if + # gs = gridspec.GridSpec(3, 1) + # ax0 = gs[0, :] + # ax1 = gs[1:, :] + # then len(rowspan0) = 1, and len(rowspan1) = 2, + # and ax1 should be at least twice as large as ax0. + # But it can be more than twice as large because + # it needs less room for the labeling. + + # For heights, do it if the subplots share a column. + if not alignheight and len(rowspan0) == len(rowspan1): + ax0._poslayoutbox.constrain_height( + ax1._poslayoutbox.height * height0 / height1) + alignheight = True + elif _spans_overlap(colspan0, colspan1): + if height0 > height1: + ax0._poslayoutbox.constrain_height_min( + ax1._poslayoutbox.height * height0 / height1) + elif height0 < height1: + ax1._poslayoutbox.constrain_height_min( + ax0._poslayoutbox.height * height1 / height0) + # For widths, do it if the subplots share a row. + if not alignwidth and len(colspan0) == len(colspan1): + ax0._poslayoutbox.constrain_width( + ax1._poslayoutbox.width * width0 / width1) + alignwidth = True + elif _spans_overlap(rowspan0, rowspan1): + if width0 > width1: + ax0._poslayoutbox.constrain_width_min( + ax1._poslayoutbox.width * width0 / width1) + elif width0 < width1: + ax1._poslayoutbox.constrain_width_min( + ax0._poslayoutbox.width * width1 / width0) + + +def _arrange_subplotspecs(gs, hspace=0, wspace=0): + """Recursively arrange the subplotspec children of the given gridspec.""" + sschildren = [] + for child in gs.children: + if child._is_subplotspec_layoutbox(): + for child2 in child.children: + # check for gridspec children... + if child2._is_gridspec_layoutbox(): + _arrange_subplotspecs(child2, hspace=hspace, wspace=wspace) + sschildren += [child] + # now arrange the subplots... + for child0 in sschildren: + ss0 = child0.artist + nrows, ncols = ss0.get_gridspec().get_geometry() + rowspan0 = ss0.rowspan + colspan0 = ss0.colspan + sschildren = sschildren[1:] + for child1 in sschildren: + ss1 = child1.artist + rowspan1 = ss1.rowspan + colspan1 = ss1.colspan + # OK, this tells us the relative layout of child0 with child1. + pad = wspace / ncols + if colspan0.stop <= colspan1.start: + layoutbox.hstack([ss0._layoutbox, ss1._layoutbox], padding=pad) + if colspan1.stop <= colspan0.start: + layoutbox.hstack([ss1._layoutbox, ss0._layoutbox], padding=pad) + # vertical alignment + pad = hspace / nrows + if rowspan0.stop <= rowspan1.start: + layoutbox.vstack([ss0._layoutbox, ss1._layoutbox], padding=pad) + if rowspan1.stop <= rowspan0.start: + layoutbox.vstack([ss1._layoutbox, ss0._layoutbox], padding=pad) + + +def layoutcolorbarsingle(ax, cax, shrink, aspect, location, pad=0.05): + """ + Do the layout for a colorbar, to not overly pollute colorbar.py + + *pad* is in fraction of the original axis size. + """ + axlb = ax._layoutbox + axpos = ax._poslayoutbox + axsslb = ax.get_subplotspec()._layoutbox + lb = layoutbox.LayoutBox( + parent=axsslb, + name=axsslb.name + '.cbar', + artist=cax) + + if location in ('left', 'right'): + lbpos = layoutbox.LayoutBox( + parent=lb, + name=lb.name + '.pos', + tightwidth=False, + pos=True, + subplot=False, + artist=cax) + + if location == 'right': + # arrange to right of parent axis + layoutbox.hstack([axlb, lb], padding=pad * axlb.width, + strength='strong') + else: + layoutbox.hstack([lb, axlb], padding=pad * axlb.width) + # constrain the height and center... + layoutbox.match_heights([axpos, lbpos], [1, shrink]) + layoutbox.align([axpos, lbpos], 'v_center') + # set the width of the pos box + lbpos.constrain_width(shrink * axpos.height * (1/aspect), + strength='strong') + elif location in ('bottom', 'top'): + lbpos = layoutbox.LayoutBox( + parent=lb, + name=lb.name + '.pos', + tightheight=True, + pos=True, + subplot=False, + artist=cax) + + if location == 'bottom': + layoutbox.vstack([axlb, lb], padding=pad * axlb.height) + else: + layoutbox.vstack([lb, axlb], padding=pad * axlb.height) + # constrain the height and center... + layoutbox.match_widths([axpos, lbpos], + [1, shrink], strength='strong') + layoutbox.align([axpos, lbpos], 'h_center') + # set the height of the pos box + lbpos.constrain_height(axpos.width * aspect * shrink, + strength='medium') + + return lb, lbpos + + +def _getmaxminrowcolumn(axs): + """ + Find axes covering the first and last rows and columns of a list of axes. + """ + startrow = startcol = np.inf + stoprow = stopcol = -np.inf + startax_row = startax_col = stopax_row = stopax_col = None + for ax in axs: + subspec = ax.get_subplotspec() + if subspec.rowspan.start < startrow: + startrow = subspec.rowspan.start + startax_row = ax + if subspec.rowspan.stop > stoprow: + stoprow = subspec.rowspan.stop + stopax_row = ax + if subspec.colspan.start < startcol: + startcol = subspec.colspan.start + startax_col = ax + if subspec.colspan.stop > stopcol: + stopcol = subspec.colspan.stop + stopax_col = ax + return (startrow, stoprow - 1, startax_row, stopax_row, + startcol, stopcol - 1, startax_col, stopax_col) + + +def layoutcolorbargridspec(parents, cax, shrink, aspect, location, pad=0.05): + """ + Do the layout for a colorbar, to not overly pollute colorbar.py + + *pad* is in fraction of the original axis size. + """ + + gs = parents[0].get_subplotspec().get_gridspec() + # parent layout box.... + gslb = gs._layoutbox + + lb = layoutbox.LayoutBox(parent=gslb.parent, + name=gslb.parent.name + '.cbar', + artist=cax) + # figure out the row and column extent of the parents. + (minrow, maxrow, minax_row, maxax_row, + mincol, maxcol, minax_col, maxax_col) = _getmaxminrowcolumn(parents) + + if location in ('left', 'right'): + lbpos = layoutbox.LayoutBox( + parent=lb, + name=lb.name + '.pos', + tightwidth=False, + pos=True, + subplot=False, + artist=cax) + for ax in parents: + if location == 'right': + order = [ax._layoutbox, lb] + else: + order = [lb, ax._layoutbox] + layoutbox.hstack(order, padding=pad * gslb.width, + strength='strong') + # constrain the height and center... + # This isn't quite right. We'd like the colorbar + # pos to line up w/ the axes poss, not the size of the + # gs. + + # Horizontal Layout: need to check all the axes in this gridspec + for ch in gslb.children: + subspec = ch.artist + if location == 'right': + if subspec.colspan.stop - 1 <= maxcol: + order = [subspec._layoutbox, lb] + # arrange to right of the parents + elif subspec.colspan.start > maxcol: + order = [lb, subspec._layoutbox] + elif location == 'left': + if subspec.colspan.start >= mincol: + order = [lb, subspec._layoutbox] + elif subspec.colspan.stop - 1 < mincol: + order = [subspec._layoutbox, lb] + layoutbox.hstack(order, padding=pad * gslb.width, + strength='strong') + + # Vertical layout: + maxposlb = minax_row._poslayoutbox + minposlb = maxax_row._poslayoutbox + # now we want the height of the colorbar pos to be + # set by the top and bottom of the min/max axes... + # bottom top + # b t + # h = (top-bottom)*shrink + # b = bottom + (top-bottom - h) / 2. + lbpos.constrain_height( + (maxposlb.top - minposlb.bottom) * + shrink, strength='strong') + lbpos.constrain_bottom( + (maxposlb.top - minposlb.bottom) * + (1 - shrink)/2 + minposlb.bottom, + strength='strong') + + # set the width of the pos box + lbpos.constrain_width(lbpos.height * (shrink / aspect), + strength='strong') + elif location in ('bottom', 'top'): + lbpos = layoutbox.LayoutBox( + parent=lb, + name=lb.name + '.pos', + tightheight=True, + pos=True, + subplot=False, + artist=cax) + + for ax in parents: + if location == 'bottom': + order = [ax._layoutbox, lb] + else: + order = [lb, ax._layoutbox] + layoutbox.vstack(order, padding=pad * gslb.width, + strength='strong') + + # Vertical Layout: need to check all the axes in this gridspec + for ch in gslb.children: + subspec = ch.artist + if location == 'bottom': + if subspec.rowspan.stop - 1 <= minrow: + order = [subspec._layoutbox, lb] + elif subspec.rowspan.start > maxrow: + order = [lb, subspec._layoutbox] + elif location == 'top': + if subspec.rowspan.stop - 1 < minrow: + order = [subspec._layoutbox, lb] + elif subspec.rowspan.start >= maxrow: + order = [lb, subspec._layoutbox] + layoutbox.vstack(order, padding=pad * gslb.width, + strength='strong') + + # Do horizontal layout... + maxposlb = maxax_col._poslayoutbox + minposlb = minax_col._poslayoutbox + lbpos.constrain_width((maxposlb.right - minposlb.left) * + shrink) + lbpos.constrain_left( + (maxposlb.right - minposlb.left) * + (1-shrink)/2 + minposlb.left) + # set the height of the pos box + lbpos.constrain_height(lbpos.width * shrink * aspect, + strength='medium') + + return lb, lbpos diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_contour.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/matplotlib/_contour.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..f1947be Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/_contour.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_image.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/matplotlib/_image.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..7a3cbad Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/_image.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_internal_utils.py b/minor_project/lib/python3.6/site-packages/matplotlib/_internal_utils.py new file mode 100644 index 0000000..0223aa5 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_internal_utils.py @@ -0,0 +1,64 @@ +""" +Internal debugging utilities, that are not expected to be used in the rest of +the codebase. + +WARNING: Code in this module may change without prior notice! +""" + +from io import StringIO +from pathlib import Path +import subprocess + +from matplotlib.transforms import TransformNode + + +def graphviz_dump_transform(transform, dest, *, highlight=None): + """ + Generate a graphical representation of the transform tree for *transform* + using the :program:`dot` program (which this function depends on). The + output format (png, dot, etc.) is determined from the suffix of *dest*. + + Parameters + ---------- + transform : `~matplotlib.transform.Transform` + The represented transform. + dest : str + Output filename. The extension must be one of the formats supported + by :program:`dot`, e.g. png, svg, dot, ... + (see https://www.graphviz.org/doc/info/output.html). + highlight : list of `~matplotlib.transform.Transform` or None + The transforms in the tree to be drawn in bold. + If *None*, *transform* is highlighted. + """ + + if highlight is None: + highlight = [transform] + seen = set() + + def recurse(root, buf): + if id(root) in seen: + return + seen.add(id(root)) + props = {} + label = type(root).__name__ + if root._invalid: + label = f'[{label}]' + if root in highlight: + props['style'] = 'bold' + props['shape'] = 'box' + props['label'] = '"%s"' % label + props = ' '.join(map('{0[0]}={0[1]}'.format, props.items())) + buf.write(f'{id(root)} [{props}];\n') + for key, val in vars(root).items(): + if isinstance(val, TransformNode) and id(root) in val._parents: + buf.write(f'"{id(root)}" -> "{id(val)}" ' + f'[label="{key}", fontsize=10];\n') + recurse(val, buf) + + buf = StringIO() + buf.write('digraph G {\n') + recurse(transform, buf) + buf.write('}\n') + subprocess.run( + ['dot', '-T', Path(dest).suffix[1:], '-o', dest], + input=buf.getvalue().encode('utf-8'), check=True) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_layoutbox.py b/minor_project/lib/python3.6/site-packages/matplotlib/_layoutbox.py new file mode 100644 index 0000000..0afa2e4 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_layoutbox.py @@ -0,0 +1,695 @@ +""" + +Conventions: + +"constrain_x" means to constrain the variable with either +another kiwisolver variable, or a float. i.e. `constrain_width(0.2)` +will set a constraint that the width has to be 0.2 and this constraint is +permanent - i.e. it will not be removed if it becomes obsolete. + +"edit_x" means to set x to a value (just a float), and that this value can +change. So `edit_width(0.2)` will set width to be 0.2, but `edit_width(0.3)` +will allow it to change to 0.3 later. Note that these values are still just +"suggestions" in `kiwisolver` parlance, and could be over-ridden by +other constrains. + +""" + +import itertools +import kiwisolver as kiwi +import logging +import numpy as np + + +_log = logging.getLogger(__name__) + + +# renderers can be complicated +def get_renderer(fig): + if fig._cachedRenderer: + renderer = fig._cachedRenderer + else: + canvas = fig.canvas + if canvas and hasattr(canvas, "get_renderer"): + renderer = canvas.get_renderer() + else: + # not sure if this can happen + # seems to with PDF... + _log.info("constrained_layout : falling back to Agg renderer") + from matplotlib.backends.backend_agg import FigureCanvasAgg + canvas = FigureCanvasAgg(fig) + renderer = canvas.get_renderer() + + return renderer + + +class LayoutBox: + """ + Basic rectangle representation using kiwi solver variables + """ + + def __init__(self, parent=None, name='', tightwidth=False, + tightheight=False, artist=None, + lower_left=(0, 0), upper_right=(1, 1), pos=False, + subplot=False, h_pad=None, w_pad=None): + Variable = kiwi.Variable + self.parent = parent + self.name = name + sn = self.name + '_' + if parent is None: + self.solver = kiwi.Solver() + self.constrained_layout_called = 0 + else: + self.solver = parent.solver + self.constrained_layout_called = None + # parent wants to know about this child! + parent.add_child(self) + # keep track of artist associated w/ this layout. Can be none + self.artist = artist + # keep track if this box is supposed to be a pos that is constrained + # by the parent. + self.pos = pos + # keep track of whether we need to match this subplot up with others. + self.subplot = subplot + + self.top = Variable(sn + 'top') + self.bottom = Variable(sn + 'bottom') + self.left = Variable(sn + 'left') + self.right = Variable(sn + 'right') + + self.width = Variable(sn + 'width') + self.height = Variable(sn + 'height') + self.h_center = Variable(sn + 'h_center') + self.v_center = Variable(sn + 'v_center') + + self.min_width = Variable(sn + 'min_width') + self.min_height = Variable(sn + 'min_height') + self.pref_width = Variable(sn + 'pref_width') + self.pref_height = Variable(sn + 'pref_height') + # margins are only used for axes-position layout boxes. maybe should + # be a separate subclass: + self.left_margin = Variable(sn + 'left_margin') + self.right_margin = Variable(sn + 'right_margin') + self.bottom_margin = Variable(sn + 'bottom_margin') + self.top_margin = Variable(sn + 'top_margin') + # mins + self.left_margin_min = Variable(sn + 'left_margin_min') + self.right_margin_min = Variable(sn + 'right_margin_min') + self.bottom_margin_min = Variable(sn + 'bottom_margin_min') + self.top_margin_min = Variable(sn + 'top_margin_min') + + right, top = upper_right + left, bottom = lower_left + self.tightheight = tightheight + self.tightwidth = tightwidth + self.add_constraints() + self.children = [] + self.subplotspec = None + if self.pos: + self.constrain_margins() + self.h_pad = h_pad + self.w_pad = w_pad + + def constrain_margins(self): + """ + Only do this for pos. This sets a variable distance + margin between the position of the axes and the outer edge of + the axes. + + Margins are variable because they change with the figure size. + + Margin minimums are set to make room for axes decorations. However, + the margins can be larger if we are mathicng the position size to + other axes. + """ + sol = self.solver + + # left + if not sol.hasEditVariable(self.left_margin_min): + sol.addEditVariable(self.left_margin_min, 'strong') + sol.suggestValue(self.left_margin_min, 0.0001) + c = (self.left_margin == self.left - self.parent.left) + self.solver.addConstraint(c | 'required') + c = (self.left_margin >= self.left_margin_min) + self.solver.addConstraint(c | 'strong') + + # right + if not sol.hasEditVariable(self.right_margin_min): + sol.addEditVariable(self.right_margin_min, 'strong') + sol.suggestValue(self.right_margin_min, 0.0001) + c = (self.right_margin == self.parent.right - self.right) + self.solver.addConstraint(c | 'required') + c = (self.right_margin >= self.right_margin_min) + self.solver.addConstraint(c | 'required') + # bottom + if not sol.hasEditVariable(self.bottom_margin_min): + sol.addEditVariable(self.bottom_margin_min, 'strong') + sol.suggestValue(self.bottom_margin_min, 0.0001) + c = (self.bottom_margin == self.bottom - self.parent.bottom) + self.solver.addConstraint(c | 'required') + c = (self.bottom_margin >= self.bottom_margin_min) + self.solver.addConstraint(c | 'required') + # top + if not sol.hasEditVariable(self.top_margin_min): + sol.addEditVariable(self.top_margin_min, 'strong') + sol.suggestValue(self.top_margin_min, 0.0001) + c = (self.top_margin == self.parent.top - self.top) + self.solver.addConstraint(c | 'required') + c = (self.top_margin >= self.top_margin_min) + self.solver.addConstraint(c | 'required') + + def add_child(self, child): + self.children += [child] + + def remove_child(self, child): + try: + self.children.remove(child) + except ValueError: + _log.info("Tried to remove child that doesn't belong to parent") + + def add_constraints(self): + sol = self.solver + # never let width and height go negative. + for i in [self.min_width, self.min_height]: + sol.addEditVariable(i, 1e9) + sol.suggestValue(i, 0.0) + # define relation ships between things thing width and right and left + self.hard_constraints() + # self.soft_constraints() + if self.parent: + self.parent_constrain() + # sol.updateVariables() + + def parent_constrain(self): + parent = self.parent + hc = [self.left >= parent.left, + self.bottom >= parent.bottom, + self.top <= parent.top, + self.right <= parent.right] + for c in hc: + self.solver.addConstraint(c | 'required') + + def hard_constraints(self): + hc = [self.width == self.right - self.left, + self.height == self.top - self.bottom, + self.h_center == (self.left + self.right) * 0.5, + self.v_center == (self.top + self.bottom) * 0.5, + self.width >= self.min_width, + self.height >= self.min_height] + for c in hc: + self.solver.addConstraint(c | 'required') + + def soft_constraints(self): + sol = self.solver + if self.tightwidth: + suggest = 0. + else: + suggest = 20. + c = (self.pref_width == suggest) + for i in c: + sol.addConstraint(i | 'required') + if self.tightheight: + suggest = 0. + else: + suggest = 20. + c = (self.pref_height == suggest) + for i in c: + sol.addConstraint(i | 'required') + + c = [(self.width >= suggest), + (self.height >= suggest)] + for i in c: + sol.addConstraint(i | 150000) + + def set_parent(self, parent): + """Replace the parent of this with the new parent.""" + self.parent = parent + self.parent_constrain() + + def constrain_geometry(self, left, bottom, right, top, strength='strong'): + hc = [self.left == left, + self.right == right, + self.bottom == bottom, + self.top == top] + for c in hc: + self.solver.addConstraint(c | strength) + # self.solver.updateVariables() + + def constrain_same(self, other, strength='strong'): + """ + Make the layoutbox have same position as other layoutbox + """ + hc = [self.left == other.left, + self.right == other.right, + self.bottom == other.bottom, + self.top == other.top] + for c in hc: + self.solver.addConstraint(c | strength) + + def constrain_left_margin(self, margin, strength='strong'): + c = (self.left == self.parent.left + margin) + self.solver.addConstraint(c | strength) + + def edit_left_margin_min(self, margin): + self.solver.suggestValue(self.left_margin_min, margin) + + def constrain_right_margin(self, margin, strength='strong'): + c = (self.right == self.parent.right - margin) + self.solver.addConstraint(c | strength) + + def edit_right_margin_min(self, margin): + self.solver.suggestValue(self.right_margin_min, margin) + + def constrain_bottom_margin(self, margin, strength='strong'): + c = (self.bottom == self.parent.bottom + margin) + self.solver.addConstraint(c | strength) + + def edit_bottom_margin_min(self, margin): + self.solver.suggestValue(self.bottom_margin_min, margin) + + def constrain_top_margin(self, margin, strength='strong'): + c = (self.top == self.parent.top - margin) + self.solver.addConstraint(c | strength) + + def edit_top_margin_min(self, margin): + self.solver.suggestValue(self.top_margin_min, margin) + + def get_rect(self): + return (self.left.value(), self.bottom.value(), + self.width.value(), self.height.value()) + + def update_variables(self): + """ + Update *all* the variables that are part of the solver this LayoutBox + is created with. + """ + self.solver.updateVariables() + + def edit_height(self, height, strength='strong'): + """ + Set the height of the layout box. + + This is done as an editable variable so that the value can change + due to resizing. + """ + sol = self.solver + for i in [self.height]: + if not sol.hasEditVariable(i): + sol.addEditVariable(i, strength) + sol.suggestValue(self.height, height) + + def constrain_height(self, height, strength='strong'): + """ + Constrain the height of the layout box. height is + either a float or a layoutbox.height. + """ + c = (self.height == height) + self.solver.addConstraint(c | strength) + + def constrain_height_min(self, height, strength='strong'): + c = (self.height >= height) + self.solver.addConstraint(c | strength) + + def edit_width(self, width, strength='strong'): + sol = self.solver + for i in [self.width]: + if not sol.hasEditVariable(i): + sol.addEditVariable(i, strength) + sol.suggestValue(self.width, width) + + def constrain_width(self, width, strength='strong'): + """ + Constrain the width of the layout box. *width* is + either a float or a layoutbox.width. + """ + c = (self.width == width) + self.solver.addConstraint(c | strength) + + def constrain_width_min(self, width, strength='strong'): + c = (self.width >= width) + self.solver.addConstraint(c | strength) + + def constrain_left(self, left, strength='strong'): + c = (self.left == left) + self.solver.addConstraint(c | strength) + + def constrain_bottom(self, bottom, strength='strong'): + c = (self.bottom == bottom) + self.solver.addConstraint(c | strength) + + def constrain_right(self, right, strength='strong'): + c = (self.right == right) + self.solver.addConstraint(c | strength) + + def constrain_top(self, top, strength='strong'): + c = (self.top == top) + self.solver.addConstraint(c | strength) + + def _is_subplotspec_layoutbox(self): + """ + Helper to check if this layoutbox is the layoutbox of a subplotspec. + """ + name = self.name.split('.')[-1] + return name[:2] == 'ss' + + def _is_gridspec_layoutbox(self): + """ + Helper to check if this layoutbox is the layoutbox of a gridspec. + """ + name = self.name.split('.')[-1] + return name[:8] == 'gridspec' + + def find_child_subplots(self): + """ + Find children of this layout box that are subplots. We want to line + poss up, and this is an easy way to find them all. + """ + if self.subplot: + subplots = [self] + else: + subplots = [] + for child in self.children: + subplots += child.find_child_subplots() + return subplots + + def layout_from_subplotspec(self, subspec, + name='', artist=None, pos=False): + """ + Make a layout box from a subplotspec. The layout box is + constrained to be a fraction of the width/height of the parent, + and be a fraction of the parent width/height from the left/bottom + of the parent. Therefore the parent can move around and the + layout for the subplot spec should move with it. + + The parent is *usually* the gridspec that made the subplotspec.?? + """ + lb = LayoutBox(parent=self, name=name, artist=artist, pos=pos) + gs = subspec.get_gridspec() + nrows, ncols = gs.get_geometry() + parent = self.parent + + # OK, now, we want to set the position of this subplotspec + # based on its subplotspec parameters. The new gridspec will inherit + # from gridspec. prob should be new method in gridspec + left = 0.0 + right = 1.0 + bottom = 0.0 + top = 1.0 + totWidth = right-left + totHeight = top-bottom + hspace = 0. + wspace = 0. + + # calculate accumulated heights of columns + cellH = totHeight / (nrows + hspace * (nrows - 1)) + sepH = hspace * cellH + + if gs._row_height_ratios is not None: + netHeight = cellH * nrows + tr = sum(gs._row_height_ratios) + cellHeights = [netHeight * r / tr for r in gs._row_height_ratios] + else: + cellHeights = [cellH] * nrows + + sepHeights = [0] + ([sepH] * (nrows - 1)) + cellHs = np.cumsum(np.column_stack([sepHeights, cellHeights]).flat) + + # calculate accumulated widths of rows + cellW = totWidth / (ncols + wspace * (ncols - 1)) + sepW = wspace * cellW + + if gs._col_width_ratios is not None: + netWidth = cellW * ncols + tr = sum(gs._col_width_ratios) + cellWidths = [netWidth * r / tr for r in gs._col_width_ratios] + else: + cellWidths = [cellW] * ncols + + sepWidths = [0] + ([sepW] * (ncols - 1)) + cellWs = np.cumsum(np.column_stack([sepWidths, cellWidths]).flat) + + figTops = [top - cellHs[2 * rowNum] for rowNum in range(nrows)] + figBottoms = [top - cellHs[2 * rowNum + 1] for rowNum in range(nrows)] + figLefts = [left + cellWs[2 * colNum] for colNum in range(ncols)] + figRights = [left + cellWs[2 * colNum + 1] for colNum in range(ncols)] + + rowNum1, colNum1 = divmod(subspec.num1, ncols) + rowNum2, colNum2 = divmod(subspec.num2, ncols) + figBottom = min(figBottoms[rowNum1], figBottoms[rowNum2]) + figTop = max(figTops[rowNum1], figTops[rowNum2]) + figLeft = min(figLefts[colNum1], figLefts[colNum2]) + figRight = max(figRights[colNum1], figRights[colNum2]) + + # These are numbers relative to (0, 0, 1, 1). Need to constrain + # relative to parent. + + width = figRight - figLeft + height = figTop - figBottom + parent = self.parent + cs = [self.left == parent.left + parent.width * figLeft, + self.bottom == parent.bottom + parent.height * figBottom, + self.width == parent.width * width, + self.height == parent.height * height] + for c in cs: + self.solver.addConstraint(c | 'required') + + return lb + + def __repr__(self): + return (f'LayoutBox: {self.name:25s}, ' + f'(left: {self.left.value():1.3f}) ' + f'(bot: {self.bottom.value():1.3f}) ' + f'(right: {self.right.value():1.3f}) ' + f'(top: {self.top.value():1.3f})') + + +# Utility functions that act on layoutboxes... +def hstack(boxes, padding=0, strength='strong'): + """ + Stack LayoutBox instances from left to right. + *padding* is in figure-relative units. + """ + + for i in range(1, len(boxes)): + c = (boxes[i-1].right + padding <= boxes[i].left) + boxes[i].solver.addConstraint(c | strength) + + +def hpack(boxes, padding=0, strength='strong'): + """Stack LayoutBox instances from left to right.""" + + for i in range(1, len(boxes)): + c = (boxes[i-1].right + padding == boxes[i].left) + boxes[i].solver.addConstraint(c | strength) + + +def vstack(boxes, padding=0, strength='strong'): + """Stack LayoutBox instances from top to bottom.""" + + for i in range(1, len(boxes)): + c = (boxes[i-1].bottom - padding >= boxes[i].top) + boxes[i].solver.addConstraint(c | strength) + + +def vpack(boxes, padding=0, strength='strong'): + """Stack LayoutBox instances from top to bottom.""" + + for i in range(1, len(boxes)): + c = (boxes[i-1].bottom - padding >= boxes[i].top) + boxes[i].solver.addConstraint(c | strength) + + +def match_heights(boxes, height_ratios=None, strength='medium'): + """Stack LayoutBox instances from top to bottom.""" + + if height_ratios is None: + height_ratios = np.ones(len(boxes)) + for i in range(1, len(boxes)): + c = (boxes[i-1].height == + boxes[i].height*height_ratios[i-1]/height_ratios[i]) + boxes[i].solver.addConstraint(c | strength) + + +def match_widths(boxes, width_ratios=None, strength='medium'): + """Stack LayoutBox instances from top to bottom.""" + + if width_ratios is None: + width_ratios = np.ones(len(boxes)) + for i in range(1, len(boxes)): + c = (boxes[i-1].width == + boxes[i].width*width_ratios[i-1]/width_ratios[i]) + boxes[i].solver.addConstraint(c | strength) + + +def vstackeq(boxes, padding=0, height_ratios=None): + vstack(boxes, padding=padding) + match_heights(boxes, height_ratios=height_ratios) + + +def hstackeq(boxes, padding=0, width_ratios=None): + hstack(boxes, padding=padding) + match_widths(boxes, width_ratios=width_ratios) + + +def align(boxes, attr, strength='strong'): + cons = [] + for box in boxes[1:]: + cons = (getattr(boxes[0], attr) == getattr(box, attr)) + boxes[0].solver.addConstraint(cons | strength) + + +def match_top_margins(boxes, levels=1): + box0 = boxes[0] + top0 = box0 + for n in range(levels): + top0 = top0.parent + for box in boxes[1:]: + topb = box + for n in range(levels): + topb = topb.parent + c = (box0.top-top0.top == box.top-topb.top) + box0.solver.addConstraint(c | 'strong') + + +def match_bottom_margins(boxes, levels=1): + box0 = boxes[0] + top0 = box0 + for n in range(levels): + top0 = top0.parent + for box in boxes[1:]: + topb = box + for n in range(levels): + topb = topb.parent + c = (box0.bottom-top0.bottom == box.bottom-topb.bottom) + box0.solver.addConstraint(c | 'strong') + + +def match_left_margins(boxes, levels=1): + box0 = boxes[0] + top0 = box0 + for n in range(levels): + top0 = top0.parent + for box in boxes[1:]: + topb = box + for n in range(levels): + topb = topb.parent + c = (box0.left-top0.left == box.left-topb.left) + box0.solver.addConstraint(c | 'strong') + + +def match_right_margins(boxes, levels=1): + box0 = boxes[0] + top0 = box0 + for n in range(levels): + top0 = top0.parent + for box in boxes[1:]: + topb = box + for n in range(levels): + topb = topb.parent + c = (box0.right-top0.right == box.right-topb.right) + box0.solver.addConstraint(c | 'strong') + + +def match_width_margins(boxes, levels=1): + match_left_margins(boxes, levels=levels) + match_right_margins(boxes, levels=levels) + + +def match_height_margins(boxes, levels=1): + match_top_margins(boxes, levels=levels) + match_bottom_margins(boxes, levels=levels) + + +def match_margins(boxes, levels=1): + match_width_margins(boxes, levels=levels) + match_height_margins(boxes, levels=levels) + + +_layoutboxobjnum = itertools.count() + + +def seq_id(): + """Generate a short sequential id for layoutbox objects.""" + return '%06d' % next(_layoutboxobjnum) + + +def print_children(lb): + """Print the children of the layoutbox.""" + print(lb) + for child in lb.children: + print_children(child) + + +def nonetree(lb): + """ + Make all elements in this tree None, signalling not to do any more layout. + """ + if lb is not None: + if lb.parent is None: + # Clear the solver. Hopefully this garbage collects. + lb.solver.reset() + nonechildren(lb) + else: + nonetree(lb.parent) + + +def nonechildren(lb): + for child in lb.children: + nonechildren(child) + lb.artist._layoutbox = None + lb = None + + +def print_tree(lb): + """Print the tree of layoutboxes.""" + + if lb.parent is None: + print('LayoutBox Tree\n') + print('==============\n') + print_children(lb) + print('\n') + else: + print_tree(lb.parent) + + +def plot_children(fig, box, level=0, printit=True): + """Simple plotting to show where boxes are.""" + import matplotlib + import matplotlib.pyplot as plt + + if isinstance(fig, matplotlib.figure.Figure): + ax = fig.add_axes([0., 0., 1., 1.]) + ax.set_facecolor([1., 1., 1., 0.7]) + ax.set_alpha(0.3) + fig.draw(fig.canvas.get_renderer()) + else: + ax = fig + + import matplotlib.patches as patches + colors = plt.rcParams["axes.prop_cycle"].by_key()["color"] + if printit: + print("Level:", level) + for child in box.children: + if printit: + print(child) + ax.add_patch( + patches.Rectangle( + (child.left.value(), child.bottom.value()), # (x, y) + child.width.value(), # width + child.height.value(), # height + fc='none', + alpha=0.8, + ec=colors[level] + ) + ) + if level > 0: + name = child.name.split('.')[-1] + if level % 2 == 0: + ax.text(child.left.value(), child.bottom.value(), name, + size=12-level, color=colors[level]) + else: + ax.text(child.right.value(), child.top.value(), name, + ha='right', va='top', size=12-level, + color=colors[level]) + + plot_children(ax, child, level=level+1, printit=printit) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_mathtext_data.py b/minor_project/lib/python3.6/site-packages/matplotlib/_mathtext_data.py new file mode 100644 index 0000000..8c8a2d0 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_mathtext_data.py @@ -0,0 +1,1397 @@ +""" +font data tables for truetype and afm computer modern fonts +""" + +latex_to_bakoma = { + '\\__sqrt__' : ('cmex10', 0x70), + '\\bigcap' : ('cmex10', 0x5c), + '\\bigcup' : ('cmex10', 0x5b), + '\\bigodot' : ('cmex10', 0x4b), + '\\bigoplus' : ('cmex10', 0x4d), + '\\bigotimes' : ('cmex10', 0x4f), + '\\biguplus' : ('cmex10', 0x5d), + '\\bigvee' : ('cmex10', 0x5f), + '\\bigwedge' : ('cmex10', 0x5e), + '\\coprod' : ('cmex10', 0x61), + '\\int' : ('cmex10', 0x5a), + '\\langle' : ('cmex10', 0xad), + '\\leftangle' : ('cmex10', 0xad), + '\\leftbrace' : ('cmex10', 0xa9), + '\\oint' : ('cmex10', 0x49), + '\\prod' : ('cmex10', 0x59), + '\\rangle' : ('cmex10', 0xae), + '\\rightangle' : ('cmex10', 0xae), + '\\rightbrace' : ('cmex10', 0xaa), + '\\sum' : ('cmex10', 0x58), + '\\widehat' : ('cmex10', 0x62), + '\\widetilde' : ('cmex10', 0x65), + '\\{' : ('cmex10', 0xa9), + '\\}' : ('cmex10', 0xaa), + '{' : ('cmex10', 0xa9), + '}' : ('cmex10', 0xaa), + + ',' : ('cmmi10', 0x3b), + '.' : ('cmmi10', 0x3a), + '/' : ('cmmi10', 0x3d), + '<' : ('cmmi10', 0x3c), + '>' : ('cmmi10', 0x3e), + '\\alpha' : ('cmmi10', 0xae), + '\\beta' : ('cmmi10', 0xaf), + '\\chi' : ('cmmi10', 0xc2), + '\\combiningrightarrowabove' : ('cmmi10', 0x7e), + '\\delta' : ('cmmi10', 0xb1), + '\\ell' : ('cmmi10', 0x60), + '\\epsilon' : ('cmmi10', 0xb2), + '\\eta' : ('cmmi10', 0xb4), + '\\flat' : ('cmmi10', 0x5b), + '\\frown' : ('cmmi10', 0x5f), + '\\gamma' : ('cmmi10', 0xb0), + '\\imath' : ('cmmi10', 0x7b), + '\\iota' : ('cmmi10', 0xb6), + '\\jmath' : ('cmmi10', 0x7c), + '\\kappa' : ('cmmi10', 0x2219), + '\\lambda' : ('cmmi10', 0xb8), + '\\leftharpoondown' : ('cmmi10', 0x29), + '\\leftharpoonup' : ('cmmi10', 0x28), + '\\mu' : ('cmmi10', 0xb9), + '\\natural' : ('cmmi10', 0x5c), + '\\nu' : ('cmmi10', 0xba), + '\\omega' : ('cmmi10', 0x21), + '\\phi' : ('cmmi10', 0xc1), + '\\pi' : ('cmmi10', 0xbc), + '\\psi' : ('cmmi10', 0xc3), + '\\rho' : ('cmmi10', 0xbd), + '\\rightharpoondown' : ('cmmi10', 0x2b), + '\\rightharpoonup' : ('cmmi10', 0x2a), + '\\sharp' : ('cmmi10', 0x5d), + '\\sigma' : ('cmmi10', 0xbe), + '\\smile' : ('cmmi10', 0x5e), + '\\tau' : ('cmmi10', 0xbf), + '\\theta' : ('cmmi10', 0xb5), + '\\triangleleft' : ('cmmi10', 0x2f), + '\\triangleright' : ('cmmi10', 0x2e), + '\\upsilon' : ('cmmi10', 0xc0), + '\\varepsilon' : ('cmmi10', 0x22), + '\\varphi' : ('cmmi10', 0x27), + '\\varrho' : ('cmmi10', 0x25), + '\\varsigma' : ('cmmi10', 0x26), + '\\vartheta' : ('cmmi10', 0x23), + '\\wp' : ('cmmi10', 0x7d), + '\\xi' : ('cmmi10', 0xbb), + '\\zeta' : ('cmmi10', 0xb3), + + '!' : ('cmr10', 0x21), + '%' : ('cmr10', 0x25), + '&' : ('cmr10', 0x26), + '(' : ('cmr10', 0x28), + ')' : ('cmr10', 0x29), + '+' : ('cmr10', 0x2b), + '0' : ('cmr10', 0x30), + '1' : ('cmr10', 0x31), + '2' : ('cmr10', 0x32), + '3' : ('cmr10', 0x33), + '4' : ('cmr10', 0x34), + '5' : ('cmr10', 0x35), + '6' : ('cmr10', 0x36), + '7' : ('cmr10', 0x37), + '8' : ('cmr10', 0x38), + '9' : ('cmr10', 0x39), + ':' : ('cmr10', 0x3a), + ';' : ('cmr10', 0x3b), + '=' : ('cmr10', 0x3d), + '?' : ('cmr10', 0x3f), + '@' : ('cmr10', 0x40), + '[' : ('cmr10', 0x5b), + '\\#' : ('cmr10', 0x23), + '\\$' : ('cmr10', 0x24), + '\\%' : ('cmr10', 0x25), + '\\Delta' : ('cmr10', 0xa2), + '\\Gamma' : ('cmr10', 0xa1), + '\\Lambda' : ('cmr10', 0xa4), + '\\Omega' : ('cmr10', 0xad), + '\\Phi' : ('cmr10', 0xa9), + '\\Pi' : ('cmr10', 0xa6), + '\\Psi' : ('cmr10', 0xaa), + '\\Sigma' : ('cmr10', 0xa7), + '\\Theta' : ('cmr10', 0xa3), + '\\Upsilon' : ('cmr10', 0xa8), + '\\Xi' : ('cmr10', 0xa5), + '\\circumflexaccent' : ('cmr10', 0x5e), + '\\combiningacuteaccent' : ('cmr10', 0xb6), + '\\combiningbreve' : ('cmr10', 0xb8), + '\\combiningdiaeresis' : ('cmr10', 0xc4), + '\\combiningdotabove' : ('cmr10', 0x5f), + '\\combininggraveaccent' : ('cmr10', 0xb5), + '\\combiningoverline' : ('cmr10', 0xb9), + '\\combiningtilde' : ('cmr10', 0x7e), + '\\leftbracket' : ('cmr10', 0x5b), + '\\leftparen' : ('cmr10', 0x28), + '\\rightbracket' : ('cmr10', 0x5d), + '\\rightparen' : ('cmr10', 0x29), + '\\widebar' : ('cmr10', 0xb9), + ']' : ('cmr10', 0x5d), + + '*' : ('cmsy10', 0xa4), + '-' : ('cmsy10', 0xa1), + '\\Downarrow' : ('cmsy10', 0x2b), + '\\Im' : ('cmsy10', 0x3d), + '\\Leftarrow' : ('cmsy10', 0x28), + '\\Leftrightarrow' : ('cmsy10', 0x2c), + '\\P' : ('cmsy10', 0x7b), + '\\Re' : ('cmsy10', 0x3c), + '\\Rightarrow' : ('cmsy10', 0x29), + '\\S' : ('cmsy10', 0x78), + '\\Uparrow' : ('cmsy10', 0x2a), + '\\Updownarrow' : ('cmsy10', 0x6d), + '\\Vert' : ('cmsy10', 0x6b), + '\\aleph' : ('cmsy10', 0x40), + '\\approx' : ('cmsy10', 0xbc), + '\\ast' : ('cmsy10', 0xa4), + '\\asymp' : ('cmsy10', 0xb3), + '\\backslash' : ('cmsy10', 0x6e), + '\\bigcirc' : ('cmsy10', 0xb0), + '\\bigtriangledown' : ('cmsy10', 0x35), + '\\bigtriangleup' : ('cmsy10', 0x34), + '\\bot' : ('cmsy10', 0x3f), + '\\bullet' : ('cmsy10', 0xb2), + '\\cap' : ('cmsy10', 0x5c), + '\\cdot' : ('cmsy10', 0xa2), + '\\circ' : ('cmsy10', 0xb1), + '\\clubsuit' : ('cmsy10', 0x7c), + '\\cup' : ('cmsy10', 0x5b), + '\\dag' : ('cmsy10', 0x79), + '\\dashv' : ('cmsy10', 0x61), + '\\ddag' : ('cmsy10', 0x7a), + '\\diamond' : ('cmsy10', 0xa6), + '\\diamondsuit' : ('cmsy10', 0x7d), + '\\div' : ('cmsy10', 0xa5), + '\\downarrow' : ('cmsy10', 0x23), + '\\emptyset' : ('cmsy10', 0x3b), + '\\equiv' : ('cmsy10', 0xb4), + '\\exists' : ('cmsy10', 0x39), + '\\forall' : ('cmsy10', 0x38), + '\\geq' : ('cmsy10', 0xb8), + '\\gg' : ('cmsy10', 0xc0), + '\\heartsuit' : ('cmsy10', 0x7e), + '\\in' : ('cmsy10', 0x32), + '\\infty' : ('cmsy10', 0x31), + '\\lbrace' : ('cmsy10', 0x66), + '\\lceil' : ('cmsy10', 0x64), + '\\leftarrow' : ('cmsy10', 0xc3), + '\\leftrightarrow' : ('cmsy10', 0x24), + '\\leq' : ('cmsy10', 0x2219), + '\\lfloor' : ('cmsy10', 0x62), + '\\ll' : ('cmsy10', 0xbf), + '\\mid' : ('cmsy10', 0x6a), + '\\mp' : ('cmsy10', 0xa8), + '\\nabla' : ('cmsy10', 0x72), + '\\nearrow' : ('cmsy10', 0x25), + '\\neg' : ('cmsy10', 0x3a), + '\\ni' : ('cmsy10', 0x33), + '\\nwarrow' : ('cmsy10', 0x2d), + '\\odot' : ('cmsy10', 0xaf), + '\\ominus' : ('cmsy10', 0xaa), + '\\oplus' : ('cmsy10', 0xa9), + '\\oslash' : ('cmsy10', 0xae), + '\\otimes' : ('cmsy10', 0xad), + '\\pm' : ('cmsy10', 0xa7), + '\\prec' : ('cmsy10', 0xc1), + '\\preceq' : ('cmsy10', 0xb9), + '\\prime' : ('cmsy10', 0x30), + '\\propto' : ('cmsy10', 0x2f), + '\\rbrace' : ('cmsy10', 0x67), + '\\rceil' : ('cmsy10', 0x65), + '\\rfloor' : ('cmsy10', 0x63), + '\\rightarrow' : ('cmsy10', 0x21), + '\\searrow' : ('cmsy10', 0x26), + '\\sim' : ('cmsy10', 0xbb), + '\\simeq' : ('cmsy10', 0x27), + '\\slash' : ('cmsy10', 0x36), + '\\spadesuit' : ('cmsy10', 0xc4), + '\\sqcap' : ('cmsy10', 0x75), + '\\sqcup' : ('cmsy10', 0x74), + '\\sqsubseteq' : ('cmsy10', 0x76), + '\\sqsupseteq' : ('cmsy10', 0x77), + '\\subset' : ('cmsy10', 0xbd), + '\\subseteq' : ('cmsy10', 0xb5), + '\\succ' : ('cmsy10', 0xc2), + '\\succeq' : ('cmsy10', 0xba), + '\\supset' : ('cmsy10', 0xbe), + '\\supseteq' : ('cmsy10', 0xb6), + '\\swarrow' : ('cmsy10', 0x2e), + '\\times' : ('cmsy10', 0xa3), + '\\to' : ('cmsy10', 0x21), + '\\top' : ('cmsy10', 0x3e), + '\\uparrow' : ('cmsy10', 0x22), + '\\updownarrow' : ('cmsy10', 0x6c), + '\\uplus' : ('cmsy10', 0x5d), + '\\vdash' : ('cmsy10', 0x60), + '\\vee' : ('cmsy10', 0x5f), + '\\vert' : ('cmsy10', 0x6a), + '\\wedge' : ('cmsy10', 0x5e), + '\\wr' : ('cmsy10', 0x6f), + '\\|' : ('cmsy10', 0x6b), + '|' : ('cmsy10', 0x6a), + + '\\_' : ('cmtt10', 0x5f) +} + +latex_to_cmex = { + r'\__sqrt__' : 112, + r'\bigcap' : 92, + r'\bigcup' : 91, + r'\bigodot' : 75, + r'\bigoplus' : 77, + r'\bigotimes' : 79, + r'\biguplus' : 93, + r'\bigvee' : 95, + r'\bigwedge' : 94, + r'\coprod' : 97, + r'\int' : 90, + r'\leftangle' : 173, + r'\leftbrace' : 169, + r'\oint' : 73, + r'\prod' : 89, + r'\rightangle' : 174, + r'\rightbrace' : 170, + r'\sum' : 88, + r'\widehat' : 98, + r'\widetilde' : 101, +} + +latex_to_standard = { + r'\cong' : ('psyr', 64), + r'\Delta' : ('psyr', 68), + r'\Phi' : ('psyr', 70), + r'\Gamma' : ('psyr', 89), + r'\alpha' : ('psyr', 97), + r'\beta' : ('psyr', 98), + r'\chi' : ('psyr', 99), + r'\delta' : ('psyr', 100), + r'\varepsilon' : ('psyr', 101), + r'\phi' : ('psyr', 102), + r'\gamma' : ('psyr', 103), + r'\eta' : ('psyr', 104), + r'\iota' : ('psyr', 105), + r'\varpsi' : ('psyr', 106), + r'\kappa' : ('psyr', 108), + r'\nu' : ('psyr', 110), + r'\pi' : ('psyr', 112), + r'\theta' : ('psyr', 113), + r'\rho' : ('psyr', 114), + r'\sigma' : ('psyr', 115), + r'\tau' : ('psyr', 116), + r'\upsilon' : ('psyr', 117), + r'\varpi' : ('psyr', 118), + r'\omega' : ('psyr', 119), + r'\xi' : ('psyr', 120), + r'\psi' : ('psyr', 121), + r'\zeta' : ('psyr', 122), + r'\sim' : ('psyr', 126), + r'\leq' : ('psyr', 163), + r'\infty' : ('psyr', 165), + r'\clubsuit' : ('psyr', 167), + r'\diamondsuit' : ('psyr', 168), + r'\heartsuit' : ('psyr', 169), + r'\spadesuit' : ('psyr', 170), + r'\leftrightarrow' : ('psyr', 171), + r'\leftarrow' : ('psyr', 172), + r'\uparrow' : ('psyr', 173), + r'\rightarrow' : ('psyr', 174), + r'\downarrow' : ('psyr', 175), + r'\pm' : ('psyr', 176), + r'\geq' : ('psyr', 179), + r'\times' : ('psyr', 180), + r'\propto' : ('psyr', 181), + r'\partial' : ('psyr', 182), + r'\bullet' : ('psyr', 183), + r'\div' : ('psyr', 184), + r'\neq' : ('psyr', 185), + r'\equiv' : ('psyr', 186), + r'\approx' : ('psyr', 187), + r'\ldots' : ('psyr', 188), + r'\aleph' : ('psyr', 192), + r'\Im' : ('psyr', 193), + r'\Re' : ('psyr', 194), + r'\wp' : ('psyr', 195), + r'\otimes' : ('psyr', 196), + r'\oplus' : ('psyr', 197), + r'\oslash' : ('psyr', 198), + r'\cap' : ('psyr', 199), + r'\cup' : ('psyr', 200), + r'\supset' : ('psyr', 201), + r'\supseteq' : ('psyr', 202), + r'\subset' : ('psyr', 204), + r'\subseteq' : ('psyr', 205), + r'\in' : ('psyr', 206), + r'\notin' : ('psyr', 207), + r'\angle' : ('psyr', 208), + r'\nabla' : ('psyr', 209), + r'\textregistered' : ('psyr', 210), + r'\copyright' : ('psyr', 211), + r'\texttrademark' : ('psyr', 212), + r'\Pi' : ('psyr', 213), + r'\prod' : ('psyr', 213), + r'\surd' : ('psyr', 214), + r'\__sqrt__' : ('psyr', 214), + r'\cdot' : ('psyr', 215), + r'\urcorner' : ('psyr', 216), + r'\vee' : ('psyr', 217), + r'\wedge' : ('psyr', 218), + r'\Leftrightarrow' : ('psyr', 219), + r'\Leftarrow' : ('psyr', 220), + r'\Uparrow' : ('psyr', 221), + r'\Rightarrow' : ('psyr', 222), + r'\Downarrow' : ('psyr', 223), + r'\Diamond' : ('psyr', 224), + r'\Sigma' : ('psyr', 229), + r'\sum' : ('psyr', 229), + r'\forall' : ('psyr', 34), + r'\exists' : ('psyr', 36), + r'\lceil' : ('psyr', 233), + r'\lbrace' : ('psyr', 123), + r'\Psi' : ('psyr', 89), + r'\bot' : ('psyr', 0o136), + r'\Omega' : ('psyr', 0o127), + r'\leftbracket' : ('psyr', 0o133), + r'\rightbracket' : ('psyr', 0o135), + r'\leftbrace' : ('psyr', 123), + r'\leftparen' : ('psyr', 0o50), + r'\prime' : ('psyr', 0o242), + r'\sharp' : ('psyr', 0o43), + r'\slash' : ('psyr', 0o57), + r'\Lamda' : ('psyr', 0o114), + r'\neg' : ('psyr', 0o330), + r'\Upsilon' : ('psyr', 0o241), + r'\rightbrace' : ('psyr', 0o175), + r'\rfloor' : ('psyr', 0o373), + r'\lambda' : ('psyr', 0o154), + r'\to' : ('psyr', 0o256), + r'\Xi' : ('psyr', 0o130), + r'\emptyset' : ('psyr', 0o306), + r'\lfloor' : ('psyr', 0o353), + r'\rightparen' : ('psyr', 0o51), + r'\rceil' : ('psyr', 0o371), + r'\ni' : ('psyr', 0o47), + r'\epsilon' : ('psyr', 0o145), + r'\Theta' : ('psyr', 0o121), + r'\langle' : ('psyr', 0o341), + r'\leftangle' : ('psyr', 0o341), + r'\rangle' : ('psyr', 0o361), + r'\rightangle' : ('psyr', 0o361), + r'\rbrace' : ('psyr', 0o175), + r'\circ' : ('psyr', 0o260), + r'\diamond' : ('psyr', 0o340), + r'\mu' : ('psyr', 0o155), + r'\mid' : ('psyr', 0o352), + r'\imath' : ('pncri8a', 105), + r'\%' : ('pncr8a', 37), + r'\$' : ('pncr8a', 36), + r'\{' : ('pncr8a', 123), + r'\}' : ('pncr8a', 125), + r'\backslash' : ('pncr8a', 92), + r'\ast' : ('pncr8a', 42), + r'\#' : ('pncr8a', 35), + + r'\circumflexaccent' : ('pncri8a', 124), # for \hat + r'\combiningbreve' : ('pncri8a', 81), # for \breve + r'\combininggraveaccent' : ('pncri8a', 114), # for \grave + r'\combiningacuteaccent' : ('pncri8a', 63), # for \accute + r'\combiningdiaeresis' : ('pncri8a', 91), # for \ddot + r'\combiningtilde' : ('pncri8a', 75), # for \tilde + r'\combiningrightarrowabove' : ('pncri8a', 110), # for \vec + r'\combiningdotabove' : ('pncri8a', 26), # for \dot +} + +# Automatically generated. + +type12uni = { + 'aring' : 229, + 'quotedblright' : 8221, + 'V' : 86, + 'dollar' : 36, + 'four' : 52, + 'Yacute' : 221, + 'P' : 80, + 'underscore' : 95, + 'p' : 112, + 'Otilde' : 213, + 'perthousand' : 8240, + 'zero' : 48, + 'dotlessi' : 305, + 'Scaron' : 352, + 'zcaron' : 382, + 'egrave' : 232, + 'section' : 167, + 'Icircumflex' : 206, + 'ntilde' : 241, + 'ampersand' : 38, + 'dotaccent' : 729, + 'degree' : 176, + 'K' : 75, + 'acircumflex' : 226, + 'Aring' : 197, + 'k' : 107, + 'smalltilde' : 732, + 'Agrave' : 192, + 'divide' : 247, + 'ocircumflex' : 244, + 'asciitilde' : 126, + 'two' : 50, + 'E' : 69, + 'scaron' : 353, + 'F' : 70, + 'bracketleft' : 91, + 'asciicircum' : 94, + 'f' : 102, + 'ordmasculine' : 186, + 'mu' : 181, + 'paragraph' : 182, + 'nine' : 57, + 'v' : 118, + 'guilsinglleft' : 8249, + 'backslash' : 92, + 'six' : 54, + 'A' : 65, + 'icircumflex' : 238, + 'a' : 97, + 'ogonek' : 731, + 'q' : 113, + 'oacute' : 243, + 'ograve' : 242, + 'edieresis' : 235, + 'comma' : 44, + 'otilde' : 245, + 'guillemotright' : 187, + 'ecircumflex' : 234, + 'greater' : 62, + 'uacute' : 250, + 'L' : 76, + 'bullet' : 8226, + 'cedilla' : 184, + 'ydieresis' : 255, + 'l' : 108, + 'logicalnot' : 172, + 'exclamdown' : 161, + 'endash' : 8211, + 'agrave' : 224, + 'Adieresis' : 196, + 'germandbls' : 223, + 'Odieresis' : 214, + 'space' : 32, + 'quoteright' : 8217, + 'ucircumflex' : 251, + 'G' : 71, + 'quoteleft' : 8216, + 'W' : 87, + 'Q' : 81, + 'g' : 103, + 'w' : 119, + 'question' : 63, + 'one' : 49, + 'ring' : 730, + 'figuredash' : 8210, + 'B' : 66, + 'iacute' : 237, + 'Ydieresis' : 376, + 'R' : 82, + 'b' : 98, + 'r' : 114, + 'Ccedilla' : 199, + 'minus' : 8722, + 'Lslash' : 321, + 'Uacute' : 218, + 'yacute' : 253, + 'Ucircumflex' : 219, + 'quotedbl' : 34, + 'onehalf' : 189, + 'Thorn' : 222, + 'M' : 77, + 'eight' : 56, + 'multiply' : 215, + 'grave' : 96, + 'Ocircumflex' : 212, + 'm' : 109, + 'Ugrave' : 217, + 'guilsinglright' : 8250, + 'Ntilde' : 209, + 'questiondown' : 191, + 'Atilde' : 195, + 'ccedilla' : 231, + 'Z' : 90, + 'copyright' : 169, + 'yen' : 165, + 'Eacute' : 201, + 'H' : 72, + 'X' : 88, + 'Idieresis' : 207, + 'bar' : 124, + 'h' : 104, + 'x' : 120, + 'udieresis' : 252, + 'ordfeminine' : 170, + 'braceleft' : 123, + 'macron' : 175, + 'atilde' : 227, + 'Acircumflex' : 194, + 'Oslash' : 216, + 'C' : 67, + 'quotedblleft' : 8220, + 'S' : 83, + 'exclam' : 33, + 'Zcaron' : 381, + 'equal' : 61, + 's' : 115, + 'eth' : 240, + 'Egrave' : 200, + 'hyphen' : 45, + 'period' : 46, + 'igrave' : 236, + 'colon' : 58, + 'Ecircumflex' : 202, + 'trademark' : 8482, + 'Aacute' : 193, + 'cent' : 162, + 'lslash' : 322, + 'c' : 99, + 'N' : 78, + 'breve' : 728, + 'Oacute' : 211, + 'guillemotleft' : 171, + 'n' : 110, + 'idieresis' : 239, + 'braceright' : 125, + 'seven' : 55, + 'brokenbar' : 166, + 'ugrave' : 249, + 'periodcentered' : 183, + 'sterling' : 163, + 'I' : 73, + 'Y' : 89, + 'Eth' : 208, + 'emdash' : 8212, + 'i' : 105, + 'daggerdbl' : 8225, + 'y' : 121, + 'plusminus' : 177, + 'less' : 60, + 'Udieresis' : 220, + 'D' : 68, + 'five' : 53, + 'T' : 84, + 'oslash' : 248, + 'acute' : 180, + 'd' : 100, + 'OE' : 338, + 'Igrave' : 204, + 't' : 116, + 'parenright' : 41, + 'adieresis' : 228, + 'quotesingle' : 39, + 'twodotenleader' : 8229, + 'slash' : 47, + 'ellipsis' : 8230, + 'numbersign' : 35, + 'odieresis' : 246, + 'O' : 79, + 'oe' : 339, + 'o' : 111, + 'Edieresis' : 203, + 'plus' : 43, + 'dagger' : 8224, + 'three' : 51, + 'hungarumlaut' : 733, + 'parenleft' : 40, + 'fraction' : 8260, + 'registered' : 174, + 'J' : 74, + 'dieresis' : 168, + 'Ograve' : 210, + 'j' : 106, + 'z' : 122, + 'ae' : 230, + 'semicolon' : 59, + 'at' : 64, + 'Iacute' : 205, + 'percent' : 37, + 'bracketright' : 93, + 'AE' : 198, + 'asterisk' : 42, + 'aacute' : 225, + 'U' : 85, + 'eacute' : 233, + 'e' : 101, + 'thorn' : 254, + 'u' : 117, +} + +uni2type1 = {v: k for k, v in type12uni.items()} + +tex2uni = { + 'widehat' : 0x0302, + 'widetilde' : 0x0303, + 'widebar' : 0x0305, + 'langle' : 0x27e8, + 'rangle' : 0x27e9, + 'perp' : 0x27c2, + 'neq' : 0x2260, + 'Join' : 0x2a1d, + 'leqslant' : 0x2a7d, + 'geqslant' : 0x2a7e, + 'lessapprox' : 0x2a85, + 'gtrapprox' : 0x2a86, + 'lesseqqgtr' : 0x2a8b, + 'gtreqqless' : 0x2a8c, + 'triangleeq' : 0x225c, + 'eqslantless' : 0x2a95, + 'eqslantgtr' : 0x2a96, + 'backepsilon' : 0x03f6, + 'precapprox' : 0x2ab7, + 'succapprox' : 0x2ab8, + 'fallingdotseq' : 0x2252, + 'subseteqq' : 0x2ac5, + 'supseteqq' : 0x2ac6, + 'varpropto' : 0x221d, + 'precnapprox' : 0x2ab9, + 'succnapprox' : 0x2aba, + 'subsetneqq' : 0x2acb, + 'supsetneqq' : 0x2acc, + 'lnapprox' : 0x2ab9, + 'gnapprox' : 0x2aba, + 'longleftarrow' : 0x27f5, + 'longrightarrow' : 0x27f6, + 'longleftrightarrow' : 0x27f7, + 'Longleftarrow' : 0x27f8, + 'Longrightarrow' : 0x27f9, + 'Longleftrightarrow' : 0x27fa, + 'longmapsto' : 0x27fc, + 'leadsto' : 0x21dd, + 'dashleftarrow' : 0x290e, + 'dashrightarrow' : 0x290f, + 'circlearrowleft' : 0x21ba, + 'circlearrowright' : 0x21bb, + 'leftrightsquigarrow' : 0x21ad, + 'leftsquigarrow' : 0x219c, + 'rightsquigarrow' : 0x219d, + 'Game' : 0x2141, + 'hbar' : 0x0127, + 'hslash' : 0x210f, + 'ldots' : 0x2026, + 'vdots' : 0x22ee, + 'doteqdot' : 0x2251, + 'doteq' : 8784, + 'partial' : 8706, + 'gg' : 8811, + 'asymp' : 8781, + 'blacktriangledown' : 9662, + 'otimes' : 8855, + 'nearrow' : 8599, + 'varpi' : 982, + 'vee' : 8744, + 'vec' : 8407, + 'smile' : 8995, + 'succnsim' : 8937, + 'gimel' : 8503, + 'vert' : 124, + '|' : 124, + 'varrho' : 1009, + 'P' : 182, + 'approxident' : 8779, + 'Swarrow' : 8665, + 'textasciicircum' : 94, + 'imageof' : 8887, + 'ntriangleleft' : 8938, + 'nleq' : 8816, + 'div' : 247, + 'nparallel' : 8742, + 'Leftarrow' : 8656, + 'lll' : 8920, + 'oiint' : 8751, + 'ngeq' : 8817, + 'Theta' : 920, + 'origof' : 8886, + 'blacksquare' : 9632, + 'solbar' : 9023, + 'neg' : 172, + 'sum' : 8721, + 'Vdash' : 8873, + 'coloneq' : 8788, + 'degree' : 176, + 'bowtie' : 8904, + 'blacktriangleright' : 9654, + 'varsigma' : 962, + 'leq' : 8804, + 'ggg' : 8921, + 'lneqq' : 8808, + 'scurel' : 8881, + 'stareq' : 8795, + 'BbbN' : 8469, + 'nLeftarrow' : 8653, + 'nLeftrightarrow' : 8654, + 'k' : 808, + 'bot' : 8869, + 'BbbC' : 8450, + 'Lsh' : 8624, + 'leftleftarrows' : 8647, + 'BbbZ' : 8484, + 'digamma' : 989, + 'BbbR' : 8477, + 'BbbP' : 8473, + 'BbbQ' : 8474, + 'vartriangleright' : 8883, + 'succsim' : 8831, + 'wedge' : 8743, + 'lessgtr' : 8822, + 'veebar' : 8891, + 'mapsdown' : 8615, + 'Rsh' : 8625, + 'chi' : 967, + 'prec' : 8826, + 'nsubseteq' : 8840, + 'therefore' : 8756, + 'eqcirc' : 8790, + 'textexclamdown' : 161, + 'nRightarrow' : 8655, + 'flat' : 9837, + 'notin' : 8713, + 'llcorner' : 8990, + 'varepsilon' : 949, + 'bigtriangleup' : 9651, + 'aleph' : 8501, + 'dotminus' : 8760, + 'upsilon' : 965, + 'Lambda' : 923, + 'cap' : 8745, + 'barleftarrow' : 8676, + 'mu' : 956, + 'boxplus' : 8862, + 'mp' : 8723, + 'circledast' : 8859, + 'tau' : 964, + 'in' : 8712, + 'backslash' : 92, + 'varnothing' : 8709, + 'sharp' : 9839, + 'eqsim' : 8770, + 'gnsim' : 8935, + 'Searrow' : 8664, + 'updownarrows' : 8645, + 'heartsuit' : 9825, + 'trianglelefteq' : 8884, + 'ddag' : 8225, + 'sqsubseteq' : 8849, + 'mapsfrom' : 8612, + 'boxbar' : 9707, + 'sim' : 8764, + 'Nwarrow' : 8662, + 'nequiv' : 8802, + 'succ' : 8827, + 'vdash' : 8866, + 'Leftrightarrow' : 8660, + 'parallel' : 8741, + 'invnot' : 8976, + 'natural' : 9838, + 'ss' : 223, + 'uparrow' : 8593, + 'nsim' : 8769, + 'hookrightarrow' : 8618, + 'Equiv' : 8803, + 'approx' : 8776, + 'Vvdash' : 8874, + 'nsucc' : 8833, + 'leftrightharpoons' : 8651, + 'Re' : 8476, + 'boxminus' : 8863, + 'equiv' : 8801, + 'Lleftarrow' : 8666, + 'll' : 8810, + 'Cup' : 8915, + 'measeq' : 8798, + 'upharpoonleft' : 8639, + 'lq' : 8216, + 'Upsilon' : 933, + 'subsetneq' : 8842, + 'greater' : 62, + 'supsetneq' : 8843, + 'Cap' : 8914, + 'L' : 321, + 'spadesuit' : 9824, + 'lrcorner' : 8991, + 'not' : 824, + 'bar' : 772, + 'rightharpoonaccent' : 8401, + 'boxdot' : 8865, + 'l' : 322, + 'leftharpoondown' : 8637, + 'bigcup' : 8899, + 'iint' : 8748, + 'bigwedge' : 8896, + 'downharpoonleft' : 8643, + 'textasciitilde' : 126, + 'subset' : 8834, + 'leqq' : 8806, + 'mapsup' : 8613, + 'nvDash' : 8877, + 'looparrowleft' : 8619, + 'nless' : 8814, + 'rightarrowbar' : 8677, + 'Vert' : 8214, + 'downdownarrows' : 8650, + 'uplus' : 8846, + 'simeq' : 8771, + 'napprox' : 8777, + 'ast' : 8727, + 'twoheaduparrow' : 8607, + 'doublebarwedge' : 8966, + 'Sigma' : 931, + 'leftharpoonaccent' : 8400, + 'ntrianglelefteq' : 8940, + 'nexists' : 8708, + 'times' : 215, + 'measuredangle' : 8737, + 'bumpeq' : 8783, + 'carriagereturn' : 8629, + 'adots' : 8944, + 'checkmark' : 10003, + 'lambda' : 955, + 'xi' : 958, + 'rbrace' : 125, + 'rbrack' : 93, + 'Nearrow' : 8663, + 'maltese' : 10016, + 'clubsuit' : 9827, + 'top' : 8868, + 'overarc' : 785, + 'varphi' : 966, + 'Delta' : 916, + 'iota' : 953, + 'nleftarrow' : 8602, + 'candra' : 784, + 'supset' : 8835, + 'triangleleft' : 9665, + 'gtreqless' : 8923, + 'ntrianglerighteq' : 8941, + 'quad' : 8195, + 'Xi' : 926, + 'gtrdot' : 8919, + 'leftthreetimes' : 8907, + 'minus' : 8722, + 'preccurlyeq' : 8828, + 'nleftrightarrow' : 8622, + 'lambdabar' : 411, + 'blacktriangle' : 9652, + 'kernelcontraction' : 8763, + 'Phi' : 934, + 'angle' : 8736, + 'spadesuitopen' : 9828, + 'eqless' : 8924, + 'mid' : 8739, + 'varkappa' : 1008, + 'Ldsh' : 8626, + 'updownarrow' : 8597, + 'beta' : 946, + 'textquotedblleft' : 8220, + 'rho' : 961, + 'alpha' : 945, + 'intercal' : 8890, + 'beth' : 8502, + 'grave' : 768, + 'acwopencirclearrow' : 8634, + 'nmid' : 8740, + 'nsupset' : 8837, + 'sigma' : 963, + 'dot' : 775, + 'Rightarrow' : 8658, + 'turnednot' : 8985, + 'backsimeq' : 8909, + 'leftarrowtail' : 8610, + 'approxeq' : 8778, + 'curlyeqsucc' : 8927, + 'rightarrowtail' : 8611, + 'Psi' : 936, + 'copyright' : 169, + 'yen' : 165, + 'vartriangleleft' : 8882, + 'rasp' : 700, + 'triangleright' : 9655, + 'precsim' : 8830, + 'infty' : 8734, + 'geq' : 8805, + 'updownarrowbar' : 8616, + 'precnsim' : 8936, + 'H' : 779, + 'ulcorner' : 8988, + 'looparrowright' : 8620, + 'ncong' : 8775, + 'downarrow' : 8595, + 'circeq' : 8791, + 'subseteq' : 8838, + 'bigstar' : 9733, + 'prime' : 8242, + 'lceil' : 8968, + 'Rrightarrow' : 8667, + 'oiiint' : 8752, + 'curlywedge' : 8911, + 'vDash' : 8872, + 'lfloor' : 8970, + 'ddots' : 8945, + 'exists' : 8707, + 'underbar' : 817, + 'Pi' : 928, + 'leftrightarrows' : 8646, + 'sphericalangle' : 8738, + 'coprod' : 8720, + 'circledcirc' : 8858, + 'gtrsim' : 8819, + 'gneqq' : 8809, + 'between' : 8812, + 'theta' : 952, + 'complement' : 8705, + 'arceq' : 8792, + 'nVdash' : 8878, + 'S' : 167, + 'wr' : 8768, + 'wp' : 8472, + 'backcong' : 8780, + 'lasp' : 701, + 'c' : 807, + 'nabla' : 8711, + 'dotplus' : 8724, + 'eta' : 951, + 'forall' : 8704, + 'eth' : 240, + 'colon' : 58, + 'sqcup' : 8852, + 'rightrightarrows' : 8649, + 'sqsupset' : 8848, + 'mapsto' : 8614, + 'bigtriangledown' : 9661, + 'sqsupseteq' : 8850, + 'propto' : 8733, + 'pi' : 960, + 'pm' : 177, + 'dots' : 0x2026, + 'nrightarrow' : 8603, + 'textasciiacute' : 180, + 'Doteq' : 8785, + 'breve' : 774, + 'sqcap' : 8851, + 'twoheadrightarrow' : 8608, + 'kappa' : 954, + 'vartriangle' : 9653, + 'diamondsuit' : 9826, + 'pitchfork' : 8916, + 'blacktriangleleft' : 9664, + 'nprec' : 8832, + 'curvearrowright' : 8631, + 'barwedge' : 8892, + 'multimap' : 8888, + 'textquestiondown' : 191, + 'cong' : 8773, + 'rtimes' : 8906, + 'rightzigzagarrow' : 8669, + 'rightarrow' : 8594, + 'leftarrow' : 8592, + '__sqrt__' : 8730, + 'twoheaddownarrow' : 8609, + 'oint' : 8750, + 'bigvee' : 8897, + 'eqdef' : 8797, + 'sterling' : 163, + 'phi' : 981, + 'Updownarrow' : 8661, + 'backprime' : 8245, + 'emdash' : 8212, + 'Gamma' : 915, + 'i' : 305, + 'rceil' : 8969, + 'leftharpoonup' : 8636, + 'Im' : 8465, + 'curvearrowleft' : 8630, + 'wedgeq' : 8793, + 'curlyeqprec' : 8926, + 'questeq' : 8799, + 'less' : 60, + 'upuparrows' : 8648, + 'tilde' : 771, + 'textasciigrave' : 96, + 'smallsetminus' : 8726, + 'ell' : 8467, + 'cup' : 8746, + 'danger' : 9761, + 'nVDash' : 8879, + 'cdotp' : 183, + 'cdots' : 8943, + 'hat' : 770, + 'eqgtr' : 8925, + 'psi' : 968, + 'frown' : 8994, + 'acute' : 769, + 'downzigzagarrow' : 8623, + 'ntriangleright' : 8939, + 'cupdot' : 8845, + 'circleddash' : 8861, + 'oslash' : 8856, + 'mho' : 8487, + 'd' : 803, + 'sqsubset' : 8847, + 'cdot' : 8901, + 'Omega' : 937, + 'OE' : 338, + 'veeeq' : 8794, + 'Finv' : 8498, + 't' : 865, + 'leftrightarrow' : 8596, + 'swarrow' : 8601, + 'rightthreetimes' : 8908, + 'rightleftharpoons' : 8652, + 'lesssim' : 8818, + 'searrow' : 8600, + 'because' : 8757, + 'gtrless' : 8823, + 'star' : 8902, + 'nsubset' : 8836, + 'zeta' : 950, + 'dddot' : 8411, + 'bigcirc' : 9675, + 'Supset' : 8913, + 'circ' : 8728, + 'slash' : 8725, + 'ocirc' : 778, + 'prod' : 8719, + 'twoheadleftarrow' : 8606, + 'daleth' : 8504, + 'upharpoonright' : 8638, + 'odot' : 8857, + 'Uparrow' : 8657, + 'O' : 216, + 'hookleftarrow' : 8617, + 'trianglerighteq' : 8885, + 'nsime' : 8772, + 'oe' : 339, + 'nwarrow' : 8598, + 'o' : 248, + 'ddddot' : 8412, + 'downharpoonright' : 8642, + 'succcurlyeq' : 8829, + 'gamma' : 947, + 'scrR' : 8475, + 'dag' : 8224, + 'thickspace' : 8197, + 'frakZ' : 8488, + 'lessdot' : 8918, + 'triangledown' : 9663, + 'ltimes' : 8905, + 'scrB' : 8492, + 'endash' : 8211, + 'scrE' : 8496, + 'scrF' : 8497, + 'scrH' : 8459, + 'scrI' : 8464, + 'rightharpoondown' : 8641, + 'scrL' : 8466, + 'scrM' : 8499, + 'frakC' : 8493, + 'nsupseteq' : 8841, + 'circledR' : 174, + 'circledS' : 9416, + 'ngtr' : 8815, + 'bigcap' : 8898, + 'scre' : 8495, + 'Downarrow' : 8659, + 'scrg' : 8458, + 'overleftrightarrow' : 8417, + 'scro' : 8500, + 'lnsim' : 8934, + 'eqcolon' : 8789, + 'curlyvee' : 8910, + 'urcorner' : 8989, + 'lbrace' : 123, + 'Bumpeq' : 8782, + 'delta' : 948, + 'boxtimes' : 8864, + 'overleftarrow' : 8406, + 'prurel' : 8880, + 'clubsuitopen' : 9831, + 'cwopencirclearrow' : 8635, + 'geqq' : 8807, + 'rightleftarrows' : 8644, + 'ac' : 8766, + 'ae' : 230, + 'int' : 8747, + 'rfloor' : 8971, + 'risingdotseq' : 8787, + 'nvdash' : 8876, + 'diamond' : 8900, + 'ddot' : 776, + 'backsim' : 8765, + 'oplus' : 8853, + 'triangleq' : 8796, + 'check' : 780, + 'ni' : 8715, + 'iiint' : 8749, + 'ne' : 8800, + 'lesseqgtr' : 8922, + 'obar' : 9021, + 'supseteq' : 8839, + 'nu' : 957, + 'AA' : 197, + 'AE' : 198, + 'models' : 8871, + 'ominus' : 8854, + 'dashv' : 8867, + 'omega' : 969, + 'rq' : 8217, + 'Subset' : 8912, + 'rightharpoonup' : 8640, + 'Rdsh' : 8627, + 'bullet' : 8729, + 'divideontimes' : 8903, + 'lbrack' : 91, + 'textquotedblright' : 8221, + 'Colon' : 8759, + '%' : 37, + '$' : 36, + '{' : 123, + '}' : 125, + '_' : 95, + '#' : 35, + 'imath' : 0x131, + 'circumflexaccent' : 770, + 'combiningbreve' : 774, + 'combiningoverline' : 772, + 'combininggraveaccent' : 768, + 'combiningacuteaccent' : 769, + 'combiningdiaeresis' : 776, + 'combiningtilde' : 771, + 'combiningrightarrowabove' : 8407, + 'combiningdotabove' : 775, + 'to' : 8594, + 'succeq' : 8829, + 'emptyset' : 8709, + 'leftparen' : 40, + 'rightparen' : 41, + 'bigoplus' : 10753, + 'leftangle' : 10216, + 'rightangle' : 10217, + 'leftbrace' : 124, + 'rightbrace' : 125, + 'jmath' : 567, + 'bigodot' : 10752, + 'preceq' : 8828, + 'biguplus' : 10756, + 'epsilon' : 949, + 'vartheta' : 977, + 'bigotimes' : 10754, + 'guillemotleft' : 171, + 'ring' : 730, + 'Thorn' : 222, + 'guilsinglright' : 8250, + 'perthousand' : 8240, + 'macron' : 175, + 'cent' : 162, + 'guillemotright' : 187, + 'equal' : 61, + 'asterisk' : 42, + 'guilsinglleft' : 8249, + 'plus' : 43, + 'thorn' : 254, + 'dagger' : 8224 +} + +# Each element is a 4-tuple of the form: +# src_start, src_end, dst_font, dst_start +# +stix_virtual_fonts = { + 'bb': + { + 'rm': + [ + (0x0030, 0x0039, 'rm', 0x1d7d8), # 0-9 + (0x0041, 0x0042, 'rm', 0x1d538), # A-B + (0x0043, 0x0043, 'rm', 0x2102), # C + (0x0044, 0x0047, 'rm', 0x1d53b), # D-G + (0x0048, 0x0048, 'rm', 0x210d), # H + (0x0049, 0x004d, 'rm', 0x1d540), # I-M + (0x004e, 0x004e, 'rm', 0x2115), # N + (0x004f, 0x004f, 'rm', 0x1d546), # O + (0x0050, 0x0051, 'rm', 0x2119), # P-Q + (0x0052, 0x0052, 'rm', 0x211d), # R + (0x0053, 0x0059, 'rm', 0x1d54a), # S-Y + (0x005a, 0x005a, 'rm', 0x2124), # Z + (0x0061, 0x007a, 'rm', 0x1d552), # a-z + (0x0393, 0x0393, 'rm', 0x213e), # \Gamma + (0x03a0, 0x03a0, 'rm', 0x213f), # \Pi + (0x03a3, 0x03a3, 'rm', 0x2140), # \Sigma + (0x03b3, 0x03b3, 'rm', 0x213d), # \gamma + (0x03c0, 0x03c0, 'rm', 0x213c), # \pi + ], + 'it': + [ + (0x0030, 0x0039, 'rm', 0x1d7d8), # 0-9 + (0x0041, 0x0042, 'it', 0xe154), # A-B + (0x0043, 0x0043, 'it', 0x2102), # C + (0x0044, 0x0044, 'it', 0x2145), # D + (0x0045, 0x0047, 'it', 0xe156), # E-G + (0x0048, 0x0048, 'it', 0x210d), # H + (0x0049, 0x004d, 'it', 0xe159), # I-M + (0x004e, 0x004e, 'it', 0x2115), # N + (0x004f, 0x004f, 'it', 0xe15e), # O + (0x0050, 0x0051, 'it', 0x2119), # P-Q + (0x0052, 0x0052, 'it', 0x211d), # R + (0x0053, 0x0059, 'it', 0xe15f), # S-Y + (0x005a, 0x005a, 'it', 0x2124), # Z + (0x0061, 0x0063, 'it', 0xe166), # a-c + (0x0064, 0x0065, 'it', 0x2146), # d-e + (0x0066, 0x0068, 'it', 0xe169), # f-h + (0x0069, 0x006a, 'it', 0x2148), # i-j + (0x006b, 0x007a, 'it', 0xe16c), # k-z + (0x0393, 0x0393, 'it', 0x213e), # \Gamma (not in beta STIX fonts) + (0x03a0, 0x03a0, 'it', 0x213f), # \Pi + (0x03a3, 0x03a3, 'it', 0x2140), # \Sigma (not in beta STIX fonts) + (0x03b3, 0x03b3, 'it', 0x213d), # \gamma (not in beta STIX fonts) + (0x03c0, 0x03c0, 'it', 0x213c), # \pi + ], + 'bf': + [ + (0x0030, 0x0039, 'rm', 0x1d7d8), # 0-9 + (0x0041, 0x0042, 'bf', 0xe38a), # A-B + (0x0043, 0x0043, 'bf', 0x2102), # C + (0x0044, 0x0044, 'bf', 0x2145), # D + (0x0045, 0x0047, 'bf', 0xe38d), # E-G + (0x0048, 0x0048, 'bf', 0x210d), # H + (0x0049, 0x004d, 'bf', 0xe390), # I-M + (0x004e, 0x004e, 'bf', 0x2115), # N + (0x004f, 0x004f, 'bf', 0xe395), # O + (0x0050, 0x0051, 'bf', 0x2119), # P-Q + (0x0052, 0x0052, 'bf', 0x211d), # R + (0x0053, 0x0059, 'bf', 0xe396), # S-Y + (0x005a, 0x005a, 'bf', 0x2124), # Z + (0x0061, 0x0063, 'bf', 0xe39d), # a-c + (0x0064, 0x0065, 'bf', 0x2146), # d-e + (0x0066, 0x0068, 'bf', 0xe3a2), # f-h + (0x0069, 0x006a, 'bf', 0x2148), # i-j + (0x006b, 0x007a, 'bf', 0xe3a7), # k-z + (0x0393, 0x0393, 'bf', 0x213e), # \Gamma + (0x03a0, 0x03a0, 'bf', 0x213f), # \Pi + (0x03a3, 0x03a3, 'bf', 0x2140), # \Sigma + (0x03b3, 0x03b3, 'bf', 0x213d), # \gamma + (0x03c0, 0x03c0, 'bf', 0x213c), # \pi + ], + }, + 'cal': + [ + (0x0041, 0x005a, 'it', 0xe22d), # A-Z + ], + 'frak': + { + 'rm': + [ + (0x0041, 0x0042, 'rm', 0x1d504), # A-B + (0x0043, 0x0043, 'rm', 0x212d), # C + (0x0044, 0x0047, 'rm', 0x1d507), # D-G + (0x0048, 0x0048, 'rm', 0x210c), # H + (0x0049, 0x0049, 'rm', 0x2111), # I + (0x004a, 0x0051, 'rm', 0x1d50d), # J-Q + (0x0052, 0x0052, 'rm', 0x211c), # R + (0x0053, 0x0059, 'rm', 0x1d516), # S-Y + (0x005a, 0x005a, 'rm', 0x2128), # Z + (0x0061, 0x007a, 'rm', 0x1d51e), # a-z + ], + 'it': + [ + (0x0041, 0x0042, 'rm', 0x1d504), # A-B + (0x0043, 0x0043, 'rm', 0x212d), # C + (0x0044, 0x0047, 'rm', 0x1d507), # D-G + (0x0048, 0x0048, 'rm', 0x210c), # H + (0x0049, 0x0049, 'rm', 0x2111), # I + (0x004a, 0x0051, 'rm', 0x1d50d), # J-Q + (0x0052, 0x0052, 'rm', 0x211c), # R + (0x0053, 0x0059, 'rm', 0x1d516), # S-Y + (0x005a, 0x005a, 'rm', 0x2128), # Z + (0x0061, 0x007a, 'rm', 0x1d51e), # a-z + ], + 'bf': + [ + (0x0041, 0x005a, 'bf', 0x1d56c), # A-Z + (0x0061, 0x007a, 'bf', 0x1d586), # a-z + ], + }, + 'scr': + [ + (0x0041, 0x0041, 'it', 0x1d49c), # A + (0x0042, 0x0042, 'it', 0x212c), # B + (0x0043, 0x0044, 'it', 0x1d49e), # C-D + (0x0045, 0x0046, 'it', 0x2130), # E-F + (0x0047, 0x0047, 'it', 0x1d4a2), # G + (0x0048, 0x0048, 'it', 0x210b), # H + (0x0049, 0x0049, 'it', 0x2110), # I + (0x004a, 0x004b, 'it', 0x1d4a5), # J-K + (0x004c, 0x004c, 'it', 0x2112), # L + (0x004d, 0x004d, 'it', 0x2133), # M + (0x004e, 0x0051, 'it', 0x1d4a9), # N-Q + (0x0052, 0x0052, 'it', 0x211b), # R + (0x0053, 0x005a, 'it', 0x1d4ae), # S-Z + (0x0061, 0x0064, 'it', 0x1d4b6), # a-d + (0x0065, 0x0065, 'it', 0x212f), # e + (0x0066, 0x0066, 'it', 0x1d4bb), # f + (0x0067, 0x0067, 'it', 0x210a), # g + (0x0068, 0x006e, 'it', 0x1d4bd), # h-n + (0x006f, 0x006f, 'it', 0x2134), # o + (0x0070, 0x007a, 'it', 0x1d4c5), # p-z + ], + 'sf': + { + 'rm': + [ + (0x0030, 0x0039, 'rm', 0x1d7e2), # 0-9 + (0x0041, 0x005a, 'rm', 0x1d5a0), # A-Z + (0x0061, 0x007a, 'rm', 0x1d5ba), # a-z + (0x0391, 0x03a9, 'rm', 0xe17d), # \Alpha-\Omega + (0x03b1, 0x03c9, 'rm', 0xe196), # \alpha-\omega + (0x03d1, 0x03d1, 'rm', 0xe1b0), # theta variant + (0x03d5, 0x03d5, 'rm', 0xe1b1), # phi variant + (0x03d6, 0x03d6, 'rm', 0xe1b3), # pi variant + (0x03f1, 0x03f1, 'rm', 0xe1b2), # rho variant + (0x03f5, 0x03f5, 'rm', 0xe1af), # lunate epsilon + (0x2202, 0x2202, 'rm', 0xe17c), # partial differential + ], + 'it': + [ + # These numerals are actually upright. We don't actually + # want italic numerals ever. + (0x0030, 0x0039, 'rm', 0x1d7e2), # 0-9 + (0x0041, 0x005a, 'it', 0x1d608), # A-Z + (0x0061, 0x007a, 'it', 0x1d622), # a-z + (0x0391, 0x03a9, 'rm', 0xe17d), # \Alpha-\Omega + (0x03b1, 0x03c9, 'it', 0xe1d8), # \alpha-\omega + (0x03d1, 0x03d1, 'it', 0xe1f2), # theta variant + (0x03d5, 0x03d5, 'it', 0xe1f3), # phi variant + (0x03d6, 0x03d6, 'it', 0xe1f5), # pi variant + (0x03f1, 0x03f1, 'it', 0xe1f4), # rho variant + (0x03f5, 0x03f5, 'it', 0xe1f1), # lunate epsilon + ], + 'bf': + [ + (0x0030, 0x0039, 'bf', 0x1d7ec), # 0-9 + (0x0041, 0x005a, 'bf', 0x1d5d4), # A-Z + (0x0061, 0x007a, 'bf', 0x1d5ee), # a-z + (0x0391, 0x03a9, 'bf', 0x1d756), # \Alpha-\Omega + (0x03b1, 0x03c9, 'bf', 0x1d770), # \alpha-\omega + (0x03d1, 0x03d1, 'bf', 0x1d78b), # theta variant + (0x03d5, 0x03d5, 'bf', 0x1d78d), # phi variant + (0x03d6, 0x03d6, 'bf', 0x1d78f), # pi variant + (0x03f0, 0x03f0, 'bf', 0x1d78c), # kappa variant + (0x03f1, 0x03f1, 'bf', 0x1d78e), # rho variant + (0x03f5, 0x03f5, 'bf', 0x1d78a), # lunate epsilon + (0x2202, 0x2202, 'bf', 0x1d789), # partial differential + (0x2207, 0x2207, 'bf', 0x1d76f), # \Nabla + ], + }, + 'tt': + [ + (0x0030, 0x0039, 'rm', 0x1d7f6), # 0-9 + (0x0041, 0x005a, 'rm', 0x1d670), # A-Z + (0x0061, 0x007a, 'rm', 0x1d68a) # a-z + ], + } diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_path.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/matplotlib/_path.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..8662dc0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/_path.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_pylab_helpers.py b/minor_project/lib/python3.6/site-packages/matplotlib/_pylab_helpers.py new file mode 100644 index 0000000..26ba7a1 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_pylab_helpers.py @@ -0,0 +1,141 @@ +""" +Manage figures for the pyplot interface. +""" + +import atexit +from collections import OrderedDict +import gc + + +class Gcf: + """ + Singleton to maintain the relation between figures and their managers, and + keep track of and "active" figure and manager. + + The canvas of a figure created through pyplot is associated with a figure + manager, which handles the interaction between the figure and the backend. + pyplot keeps track of figure managers using an identifier, the "figure + number" or "manager number" (which can actually be any hashable value); + this number is available as the :attr:`number` attribute of the manager. + + This class is never instantiated; it consists of an `OrderedDict` mapping + figure/manager numbers to managers, and a set of class methods that + manipulate this `OrderedDict`. + + Attributes + ---------- + figs : OrderedDict + `OrderedDict` mapping numbers to managers; the active manager is at the + end. + """ + + figs = OrderedDict() + + @classmethod + def get_fig_manager(cls, num): + """ + If manager number *num* exists, make it the active one and return it; + otherwise return *None*. + """ + manager = cls.figs.get(num, None) + if manager is not None: + cls.set_active(manager) + return manager + + @classmethod + def destroy(cls, num): + """ + Destroy manager *num* -- either a manager instance or a manager number. + + In the interactive backends, this is bound to the window "destroy" and + "delete" events. + + It is recommended to pass a manager instance, to avoid confusion when + two managers share the same number. + """ + if all(hasattr(num, attr) for attr in ["num", "_cidgcf", "destroy"]): + manager = num + if cls.figs.get(manager.num) is manager: + cls.figs.pop(manager.num) + else: + return + else: + try: + manager = cls.figs.pop(num) + except KeyError: + return + manager.canvas.mpl_disconnect(manager._cidgcf) + manager.destroy() + gc.collect(1) + + @classmethod + def destroy_fig(cls, fig): + """Destroy figure *fig*.""" + num = next((manager.num for manager in cls.figs.values() + if manager.canvas.figure == fig), None) + if num is not None: + cls.destroy(num) + + @classmethod + def destroy_all(cls): + """Destroy all figures.""" + # Reimport gc in case the module globals have already been removed + # during interpreter shutdown. + import gc + for manager in list(cls.figs.values()): + manager.canvas.mpl_disconnect(manager._cidgcf) + manager.destroy() + cls.figs.clear() + gc.collect(1) + + @classmethod + def has_fignum(cls, num): + """Return whether figure number *num* exists.""" + return num in cls.figs + + @classmethod + def get_all_fig_managers(cls): + """Return a list of figure managers.""" + return list(cls.figs.values()) + + @classmethod + def get_num_fig_managers(cls): + """Return the number of figures being managed.""" + return len(cls.figs) + + @classmethod + def get_active(cls): + """Return the active manager, or *None* if there is no manager.""" + return next(reversed(cls.figs.values())) if cls.figs else None + + @classmethod + def _set_new_active_manager(cls, manager): + """Adopt *manager* into pyplot and make it the active manager.""" + if not hasattr(manager, "_cidgcf"): + manager._cidgcf = manager.canvas.mpl_connect( + "button_press_event", lambda event: cls.set_active(manager)) + fig = manager.canvas.figure + fig.number = manager.num + label = fig.get_label() + if label: + manager.set_window_title(label) + cls.set_active(manager) + + @classmethod + def set_active(cls, manager): + """Make *manager* the active manager.""" + cls.figs[manager.num] = manager + cls.figs.move_to_end(manager.num) + + @classmethod + def draw_all(cls, force=False): + """ + Redraw all stale managed figures, or, if *force* is True, all managed + figures. + """ + for manager in cls.get_all_fig_managers(): + if force or manager.canvas.figure.stale: + manager.canvas.draw_idle() + + +atexit.register(Gcf.destroy_all) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_qhull.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/matplotlib/_qhull.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..b9d55a0 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/_qhull.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_text_layout.py b/minor_project/lib/python3.6/site-packages/matplotlib/_text_layout.py new file mode 100644 index 0000000..e9fed13 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_text_layout.py @@ -0,0 +1,38 @@ +""" +Text layouting utilities. +""" + +from .ft2font import KERNING_DEFAULT, LOAD_NO_HINTING + + +def layout(string, font, *, kern_mode=KERNING_DEFAULT): + """ + Render *string* with *font*. For each character in *string*, yield a + (glyph-index, x-position) pair. When such a pair is yielded, the font's + glyph is set to the corresponding character. + + Parameters + ---------- + string : str + The string to be rendered. + font : FT2Font + The font. + kern_mode : int + A FreeType kerning mode. + + Yields + ------ + glyph_index : int + x_position : float + """ + x = 0 + last_glyph_idx = None + for char in string: + glyph_idx = font.get_char_index(ord(char)) + kern = (font.get_kerning(last_glyph_idx, glyph_idx, kern_mode) + if last_glyph_idx is not None else 0) / 64 + x += kern + glyph = font.load_glyph(glyph_idx, flags=LOAD_NO_HINTING) + yield glyph_idx, x + x += glyph.linearHoriAdvance / 65536 + last_glyph_idx = glyph_idx diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_tri.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/matplotlib/_tri.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..4770fe5 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/_tri.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_ttconv.cpython-36m-x86_64-linux-gnu.so b/minor_project/lib/python3.6/site-packages/matplotlib/_ttconv.cpython-36m-x86_64-linux-gnu.so new file mode 100755 index 0000000..238fcf3 Binary files /dev/null and b/minor_project/lib/python3.6/site-packages/matplotlib/_ttconv.cpython-36m-x86_64-linux-gnu.so differ diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/_version.py b/minor_project/lib/python3.6/site-packages/matplotlib/_version.py new file mode 100644 index 0000000..0afd936 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.15) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json +import sys + +version_json = ''' +{ + "dirty": false, + "error": null, + "full-revisionid": "5a4f1b675da3d17df2d77d03bceab331afcc21db", + "version": "3.3.3" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/afm.py b/minor_project/lib/python3.6/site-packages/matplotlib/afm.py new file mode 100644 index 0000000..ad3e41c --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/afm.py @@ -0,0 +1,528 @@ +""" +A python interface to Adobe Font Metrics Files. + +Although a number of other python implementations exist, and may be more +complete than this, it was decided not to go with them because they were +either: + +1) copyrighted or used a non-BSD compatible license +2) had too many dependencies and a free standing lib was needed +3) did more than needed and it was easier to write afresh rather than + figure out how to get just what was needed. + +It is pretty easy to use, and has no external dependencies: + +>>> import matplotlib as mpl +>>> from pathlib import Path +>>> afm_path = Path(mpl.get_data_path(), 'fonts', 'afm', 'ptmr8a.afm') +>>> +>>> from matplotlib.afm import AFM +>>> with afm_path.open('rb') as fh: +... afm = AFM(fh) +>>> afm.string_width_height('What the heck?') +(6220.0, 694) +>>> afm.get_fontname() +'Times-Roman' +>>> afm.get_kern_dist('A', 'f') +0 +>>> afm.get_kern_dist('A', 'y') +-92.0 +>>> afm.get_bbox_char('!') +[130, -9, 238, 676] + +As in the Adobe Font Metrics File Format Specification, all dimensions +are given in units of 1/1000 of the scale factor (point size) of the font +being used. +""" + +from collections import namedtuple +import logging +import re + +from ._mathtext_data import uni2type1 + + +_log = logging.getLogger(__name__) + + +def _to_int(x): + # Some AFM files have floats where we are expecting ints -- there is + # probably a better way to handle this (support floats, round rather than + # truncate). But I don't know what the best approach is now and this + # change to _to_int should at least prevent Matplotlib from crashing on + # these. JDH (2009-11-06) + return int(float(x)) + + +def _to_float(x): + # Some AFM files use "," instead of "." as decimal separator -- this + # shouldn't be ambiguous (unless someone is wicked enough to use "," as + # thousands separator...). + if isinstance(x, bytes): + # Encoding doesn't really matter -- if we have codepoints >127 the call + # to float() will error anyways. + x = x.decode('latin-1') + return float(x.replace(',', '.')) + + +def _to_str(x): + return x.decode('utf8') + + +def _to_list_of_ints(s): + s = s.replace(b',', b' ') + return [_to_int(val) for val in s.split()] + + +def _to_list_of_floats(s): + return [_to_float(val) for val in s.split()] + + +def _to_bool(s): + if s.lower().strip() in (b'false', b'0', b'no'): + return False + else: + return True + + +def _parse_header(fh): + """ + Read the font metrics header (up to the char metrics) and returns + a dictionary mapping *key* to *val*. *val* will be converted to the + appropriate python type as necessary; e.g.: + + * 'False'->False + * '0'->0 + * '-168 -218 1000 898'-> [-168, -218, 1000, 898] + + Dictionary keys are + + StartFontMetrics, FontName, FullName, FamilyName, Weight, + ItalicAngle, IsFixedPitch, FontBBox, UnderlinePosition, + UnderlineThickness, Version, Notice, EncodingScheme, CapHeight, + XHeight, Ascender, Descender, StartCharMetrics + """ + header_converters = { + b'StartFontMetrics': _to_float, + b'FontName': _to_str, + b'FullName': _to_str, + b'FamilyName': _to_str, + b'Weight': _to_str, + b'ItalicAngle': _to_float, + b'IsFixedPitch': _to_bool, + b'FontBBox': _to_list_of_ints, + b'UnderlinePosition': _to_float, + b'UnderlineThickness': _to_float, + b'Version': _to_str, + # Some AFM files have non-ASCII characters (which are not allowed by + # the spec). Given that there is actually no public API to even access + # this field, just return it as straight bytes. + b'Notice': lambda x: x, + b'EncodingScheme': _to_str, + b'CapHeight': _to_float, # Is the second version a mistake, or + b'Capheight': _to_float, # do some AFM files contain 'Capheight'? -JKS + b'XHeight': _to_float, + b'Ascender': _to_float, + b'Descender': _to_float, + b'StdHW': _to_float, + b'StdVW': _to_float, + b'StartCharMetrics': _to_int, + b'CharacterSet': _to_str, + b'Characters': _to_int, + } + d = {} + first_line = True + for line in fh: + line = line.rstrip() + if line.startswith(b'Comment'): + continue + lst = line.split(b' ', 1) + key = lst[0] + if first_line: + # AFM spec, Section 4: The StartFontMetrics keyword + # [followed by a version number] must be the first line in + # the file, and the EndFontMetrics keyword must be the + # last non-empty line in the file. We just check the + # first header entry. + if key != b'StartFontMetrics': + raise RuntimeError('Not an AFM file') + first_line = False + if len(lst) == 2: + val = lst[1] + else: + val = b'' + try: + converter = header_converters[key] + except KeyError: + _log.error('Found an unknown keyword in AFM header (was %r)' % key) + continue + try: + d[key] = converter(val) + except ValueError: + _log.error('Value error parsing header in AFM: %s, %s', key, val) + continue + if key == b'StartCharMetrics': + break + else: + raise RuntimeError('Bad parse') + return d + + +CharMetrics = namedtuple('CharMetrics', 'width, name, bbox') +CharMetrics.__doc__ = """ + Represents the character metrics of a single character. + + Notes + ----- + The fields do currently only describe a subset of character metrics + information defined in the AFM standard. + """ +CharMetrics.width.__doc__ = """The character width (WX).""" +CharMetrics.name.__doc__ = """The character name (N).""" +CharMetrics.bbox.__doc__ = """ + The bbox of the character (B) as a tuple (*llx*, *lly*, *urx*, *ury*).""" + + +def _parse_char_metrics(fh): + """ + Parse the given filehandle for character metrics information and return + the information as dicts. + + It is assumed that the file cursor is on the line behind + 'StartCharMetrics'. + + Returns + ------- + ascii_d : dict + A mapping "ASCII num of the character" to `.CharMetrics`. + name_d : dict + A mapping "character name" to `.CharMetrics`. + + Notes + ----- + This function is incomplete per the standard, but thus far parses + all the sample afm files tried. + """ + required_keys = {'C', 'WX', 'N', 'B'} + + ascii_d = {} + name_d = {} + for line in fh: + # We are defensively letting values be utf8. The spec requires + # ascii, but there are non-compliant fonts in circulation + line = _to_str(line.rstrip()) # Convert from byte-literal + if line.startswith('EndCharMetrics'): + return ascii_d, name_d + # Split the metric line into a dictionary, keyed by metric identifiers + vals = dict(s.strip().split(' ', 1) for s in line.split(';') if s) + # There may be other metrics present, but only these are needed + if not required_keys.issubset(vals): + raise RuntimeError('Bad char metrics line: %s' % line) + num = _to_int(vals['C']) + wx = _to_float(vals['WX']) + name = vals['N'] + bbox = _to_list_of_floats(vals['B']) + bbox = list(map(int, bbox)) + metrics = CharMetrics(wx, name, bbox) + # Workaround: If the character name is 'Euro', give it the + # corresponding character code, according to WinAnsiEncoding (see PDF + # Reference). + if name == 'Euro': + num = 128 + elif name == 'minus': + num = ord("\N{MINUS SIGN}") # 0x2212 + if num != -1: + ascii_d[num] = metrics + name_d[name] = metrics + raise RuntimeError('Bad parse') + + +def _parse_kern_pairs(fh): + """ + Return a kern pairs dictionary; keys are (*char1*, *char2*) tuples and + values are the kern pair value. For example, a kern pairs line like + ``KPX A y -50`` + + will be represented as:: + + d[ ('A', 'y') ] = -50 + + """ + + line = next(fh) + if not line.startswith(b'StartKernPairs'): + raise RuntimeError('Bad start of kern pairs data: %s' % line) + + d = {} + for line in fh: + line = line.rstrip() + if not line: + continue + if line.startswith(b'EndKernPairs'): + next(fh) # EndKernData + return d + vals = line.split() + if len(vals) != 4 or vals[0] != b'KPX': + raise RuntimeError('Bad kern pairs line: %s' % line) + c1, c2, val = _to_str(vals[1]), _to_str(vals[2]), _to_float(vals[3]) + d[(c1, c2)] = val + raise RuntimeError('Bad kern pairs parse') + + +CompositePart = namedtuple('CompositePart', 'name, dx, dy') +CompositePart.__doc__ = """ + Represents the information on a composite element of a composite char.""" +CompositePart.name.__doc__ = """Name of the part, e.g. 'acute'.""" +CompositePart.dx.__doc__ = """x-displacement of the part from the origin.""" +CompositePart.dy.__doc__ = """y-displacement of the part from the origin.""" + + +def _parse_composites(fh): + """ + Parse the given filehandle for composites information return them as a + dict. + + It is assumed that the file cursor is on the line behind 'StartComposites'. + + Returns + ------- + dict + A dict mapping composite character names to a parts list. The parts + list is a list of `.CompositePart` entries describing the parts of + the composite. + + Examples + -------- + A composite definition line:: + + CC Aacute 2 ; PCC A 0 0 ; PCC acute 160 170 ; + + will be represented as:: + + composites['Aacute'] = [CompositePart(name='A', dx=0, dy=0), + CompositePart(name='acute', dx=160, dy=170)] + + """ + composites = {} + for line in fh: + line = line.rstrip() + if not line: + continue + if line.startswith(b'EndComposites'): + return composites + vals = line.split(b';') + cc = vals[0].split() + name, numParts = cc[1], _to_int(cc[2]) + pccParts = [] + for s in vals[1:-1]: + pcc = s.split() + part = CompositePart(pcc[1], _to_float(pcc[2]), _to_float(pcc[3])) + pccParts.append(part) + composites[name] = pccParts + + raise RuntimeError('Bad composites parse') + + +def _parse_optional(fh): + """ + Parse the optional fields for kern pair data and composites. + + Returns + ------- + kern_data : dict + A dict containing kerning information. May be empty. + See `._parse_kern_pairs`. + composites : dict + A dict containing composite information. May be empty. + See `._parse_composites`. + """ + optional = { + b'StartKernData': _parse_kern_pairs, + b'StartComposites': _parse_composites, + } + + d = {b'StartKernData': {}, + b'StartComposites': {}} + for line in fh: + line = line.rstrip() + if not line: + continue + key = line.split()[0] + + if key in optional: + d[key] = optional[key](fh) + + return d[b'StartKernData'], d[b'StartComposites'] + + +class AFM: + + def __init__(self, fh): + """Parse the AFM file in file object *fh*.""" + self._header = _parse_header(fh) + self._metrics, self._metrics_by_name = _parse_char_metrics(fh) + self._kern, self._composite = _parse_optional(fh) + + def get_bbox_char(self, c, isord=False): + if not isord: + c = ord(c) + return self._metrics[c].bbox + + def string_width_height(self, s): + """ + Return the string width (including kerning) and string height + as a (*w*, *h*) tuple. + """ + if not len(s): + return 0, 0 + total_width = 0 + namelast = None + miny = 1e9 + maxy = 0 + for c in s: + if c == '\n': + continue + wx, name, bbox = self._metrics[ord(c)] + + total_width += wx + self._kern.get((namelast, name), 0) + l, b, w, h = bbox + miny = min(miny, b) + maxy = max(maxy, b + h) + + namelast = name + + return total_width, maxy - miny + + def get_str_bbox_and_descent(self, s): + """Return the string bounding box and the maximal descent.""" + if not len(s): + return 0, 0, 0, 0, 0 + total_width = 0 + namelast = None + miny = 1e9 + maxy = 0 + left = 0 + if not isinstance(s, str): + s = _to_str(s) + for c in s: + if c == '\n': + continue + name = uni2type1.get(ord(c), f"uni{ord(c):04X}") + try: + wx, _, bbox = self._metrics_by_name[name] + except KeyError: + name = 'question' + wx, _, bbox = self._metrics_by_name[name] + total_width += wx + self._kern.get((namelast, name), 0) + l, b, w, h = bbox + left = min(left, l) + miny = min(miny, b) + maxy = max(maxy, b + h) + + namelast = name + + return left, miny, total_width, maxy - miny, -miny + + def get_str_bbox(self, s): + """Return the string bounding box.""" + return self.get_str_bbox_and_descent(s)[:4] + + def get_name_char(self, c, isord=False): + """Get the name of the character, i.e., ';' is 'semicolon'.""" + if not isord: + c = ord(c) + return self._metrics[c].name + + def get_width_char(self, c, isord=False): + """ + Get the width of the character from the character metric WX field. + """ + if not isord: + c = ord(c) + return self._metrics[c].width + + def get_width_from_char_name(self, name): + """Get the width of the character from a type1 character name.""" + return self._metrics_by_name[name].width + + def get_height_char(self, c, isord=False): + """Get the bounding box (ink) height of character *c* (space is 0).""" + if not isord: + c = ord(c) + return self._metrics[c].bbox[-1] + + def get_kern_dist(self, c1, c2): + """ + Return the kerning pair distance (possibly 0) for chars *c1* and *c2*. + """ + name1, name2 = self.get_name_char(c1), self.get_name_char(c2) + return self.get_kern_dist_from_name(name1, name2) + + def get_kern_dist_from_name(self, name1, name2): + """ + Return the kerning pair distance (possibly 0) for chars + *name1* and *name2*. + """ + return self._kern.get((name1, name2), 0) + + def get_fontname(self): + """Return the font name, e.g., 'Times-Roman'.""" + return self._header[b'FontName'] + + def get_fullname(self): + """Return the font full name, e.g., 'Times-Roman'.""" + name = self._header.get(b'FullName') + if name is None: # use FontName as a substitute + name = self._header[b'FontName'] + return name + + def get_familyname(self): + """Return the font family name, e.g., 'Times'.""" + name = self._header.get(b'FamilyName') + if name is not None: + return name + + # FamilyName not specified so we'll make a guess + name = self.get_fullname() + extras = (r'(?i)([ -](regular|plain|italic|oblique|bold|semibold|' + r'light|ultralight|extra|condensed))+$') + return re.sub(extras, '', name) + + @property + def family_name(self): + """The font family name, e.g., 'Times'.""" + return self.get_familyname() + + def get_weight(self): + """Return the font weight, e.g., 'Bold' or 'Roman'.""" + return self._header[b'Weight'] + + def get_angle(self): + """Return the fontangle as float.""" + return self._header[b'ItalicAngle'] + + def get_capheight(self): + """Return the cap height as float.""" + return self._header[b'CapHeight'] + + def get_xheight(self): + """Return the xheight as float.""" + return self._header[b'XHeight'] + + def get_underline_thickness(self): + """Return the underline thickness as float.""" + return self._header[b'UnderlineThickness'] + + def get_horizontal_stem_width(self): + """ + Return the standard horizontal stem width as float, or *None* if + not specified in AFM file. + """ + return self._header.get(b'StdHW', None) + + def get_vertical_stem_width(self): + """ + Return the standard vertical stem width as float, or *None* if + not specified in AFM file. + """ + return self._header.get(b'StdVW', None) diff --git a/minor_project/lib/python3.6/site-packages/matplotlib/animation.py b/minor_project/lib/python3.6/site-packages/matplotlib/animation.py new file mode 100644 index 0000000..2568d45 --- /dev/null +++ b/minor_project/lib/python3.6/site-packages/matplotlib/animation.py @@ -0,0 +1,1735 @@ +# TODO: +# * Documentation -- this will need a new section of the User's Guide. +# Both for Animations and just timers. +# - Also need to update http://www.scipy.org/Cookbook/Matplotlib/Animations +# * Blit +# * Currently broken with Qt4 for widgets that don't start on screen +# * Still a few edge cases that aren't working correctly +# * Can this integrate better with existing matplotlib animation artist flag? +# - If animated removes from default draw(), perhaps we could use this to +# simplify initial draw. +# * Example +# * Frameless animation - pure procedural with no loop +# * Need example that uses something like inotify or subprocess +# * Complex syncing examples +# * Movies +# * Can blit be enabled for movies? +# * Need to consider event sources to allow clicking through multiple figures + +import abc +import base64 +import contextlib +from io import BytesIO, TextIOWrapper +import itertools +import logging +from pathlib import Path +import shutil +import subprocess +import sys +from tempfile import TemporaryDirectory +import uuid + +import numpy as np + +import matplotlib as mpl +from matplotlib._animation_data import ( + DISPLAY_TEMPLATE, INCLUDED_FRAMES, JS_INCLUDE, STYLE_INCLUDE) +from matplotlib import cbook + + +_log = logging.getLogger(__name__) + +# Process creation flag for subprocess to prevent it raising a terminal +# window. See for example: +# https://stackoverflow.com/questions/24130623/using-python-subprocess-popen-cant-prevent-exe-stopped-working-prompt +if sys.platform == 'win32': + subprocess_creation_flags = CREATE_NO_WINDOW = 0x08000000 +else: + # Apparently None won't work here + subprocess_creation_flags = 0 + +# Other potential writing methods: +# * http://pymedia.org/ +# * libming (produces swf) python wrappers: https://github.com/libming/libming +# * Wrap x264 API: + +# (http://stackoverflow.com/questions/2940671/ +# how-to-encode-series-of-images-into-h264-using-x264-api-c-c ) + + +def adjusted_figsize(w, h, dpi, n): + """ + Compute figure size so that pixels are a multiple of n. + + Parameters + ---------- + w, h : float + Size in inches. + + dpi : float + The dpi. + + n : int + The target multiple. + + Returns + ------- + wnew, hnew : float + The new figure size in inches. + """ + + # this maybe simplified if / when we adopt consistent rounding for + # pixel size across the whole library + def correct_roundoff(x, dpi, n): + if int(x*dpi) % n != 0: + if int(np.nextafter(x, np.inf)*dpi) % n == 0: + x = np.nextafter(x, np.inf) + elif int(np.nextafter(x, -np.inf)*dpi) % n == 0: + x = np.nextafter(x, -np.inf) + return x + + wnew = int(w * dpi / n) * n / dpi + hnew = int(h * dpi / n) * n / dpi + return correct_roundoff(wnew, dpi, n), correct_roundoff(hnew, dpi, n) + + +class MovieWriterRegistry: + """Registry of available writer classes by human readable name.""" + + def __init__(self): + self._registered = dict() + + @cbook.deprecated("3.2") + def set_dirty(self): + """Sets a flag to re-setup the writers.""" + + def register(self, name): + """ + Decorator for registering a class under a name. + + Example use:: + + @registry.register(name) + class Foo: + pass + """ + def wrapper(writer_cls): + self._registered[name] = writer_cls + return writer_cls + return wrapper + + @cbook.deprecated("3.2") + def ensure_not_dirty(self): + """If dirty, reasks the writers if they are available""" + + @cbook.deprecated("3.2") + def reset_available_writers(self): + """Reset the available state of all registered writers""" + + @cbook.deprecated("3.2") + @property + def avail(self): + return {name: self._registered[name] for name in self.list()} + + def is_available(self, name): + """ + Check if given writer is available by name. + + Parameters + ---------- + name : str + + Returns + ------- + bool + """ + try: + cls = self._registered[name] + except KeyError: + return False + return cls.isAvailable() + + def __iter__(self): + """Iterate over names of available writer class.""" + for name in self._registered: + if self.is_available(name): + yield name + + def list(self): + """Get a list of available MovieWriters.""" + return [*self] + + def __getitem__(self, name): + """Get an available writer class from its name.""" + if self.is_available(name): + return self._registered[name] + raise RuntimeError(f"Requested MovieWriter ({name}) not available") + + +writers = MovieWriterRegistry() + + +class AbstractMovieWriter(abc.ABC): + """ + Abstract base class for writing movies. Fundamentally, what a MovieWriter + does is provide is a way to grab frames by calling grab_frame(). + + setup() is called to start the process and finish() is called afterwards. + + This class is set up to provide for writing movie frame data to a pipe. + saving() is provided as a context manager to facilitate this process as:: + + with moviewriter.saving(fig, outfile='myfile.mp4', dpi=100): + # Iterate over frames + moviewriter.grab_frame(**savefig_kwargs) + + The use of the context manager ensures that setup() and finish() are + performed as necessary. + + An instance of a concrete subclass of this class can be given as the + ``writer`` argument of `Animation.save()`. + """ + + def __init__(self, fps=5, metadata=None, codec=None, bitrate=None): + self.fps = fps + self.metadata = metadata if metadata is not None else {} + self.codec = ( + mpl.rcParams['animation.codec'] if codec is None else codec) + self.bitrate = ( + mpl.rcParams['animation.bitrate'] if bitrate is None else bitrate) + + @abc.abstractmethod + def setup(self, fig, outfile, dpi=None): + """ + Setup for writing the movie file. + + Parameters + ---------- + fig : `~matplotlib.figure.Figure` + The figure object that contains the information for frames. + outfile : str + The filename of the resulting movie file. + dpi : float, default: ``fig.dpi`` + The DPI (or resolution) for the file. This controls the size + in pixels of the resulting movie file. + """ + self.outfile = outfile + self.fig = fig + if dpi is None: + dpi = self.fig.dpi + self.dpi = dpi + + @property + def frame_size(self): + """A tuple ``(width, height)`` in pixels of a movie frame.""" + w, h = self.fig.get_size_inches() + return int(w * self.dpi), int(h * self.dpi) + + @abc.abstractmethod + def grab_frame(self, **savefig_kwargs): + """ + Grab the image information from the figure and save as a movie frame. + + All keyword arguments in *savefig_kwargs* are passed on to the + `~.Figure.savefig` call that saves the figure. + """ + + @abc.abstractmethod + def finish(self): + """Finish any processing for writing the movie.""" + + @contextlib.contextmanager + def saving(self, fig, outfile, dpi, *args, **kwargs): + """ + Context manager to facilitate writing the movie file. + + ``*args, **kw`` are any parameters that should be passed to `setup`. + """ + # This particular sequence is what contextlib.contextmanager wants + self.setup(fig, outfile, dpi, *args, **kwargs) + try: + yield self + finally: + self.finish() + + +class MovieWriter(AbstractMovieWriter): + """ + Base class for writing movies. + + This is a base class for MovieWriter subclasses that write a movie frame + data to a pipe. You cannot instantiate this class directly. + See examples for how to use its subclasses. + + Attributes + ---------- + frame_format : str + The format used in writing frame data, defaults to 'rgba'. + fig : `~matplotlib.figure.Figure` + The figure to capture data from. + This must be provided by the sub-classes. + """ + + # Builtin writer subclasses additionally define the _exec_key and _args_key + # attributes, which indicate the rcParams entries where the path to the + # executable and additional command-line arguments to the executable are + # stored. Third-party writers cannot meaningfully set these as they cannot + # extend rcParams with new keys. + + exec_key = cbook._deprecate_privatize_attribute("3.3") + args_key = cbook._deprecate_privatize_attribute("3.3") + + def __init__(self, fps=5, codec=None, bitrate=None, extra_args=None, + metadata=None): + """ + Parameters + ---------- + fps : int, default: 5 + Movie frame rate (per second). + codec : str or None, default: :rc:`animation.codec` + The codec to use. + bitrate : int, default: :rc:`animation.bitrate` + The bitrate of the movie, in kilobits per second. Higher values + means higher quality movies, but increase the file size. A value + of -1 lets the underlying movie encoder select the bitrate. + extra_args : list of str or None, optional + Extra command-line arguments passed to the underlying movie + encoder. The default, None, means to use + :rc:`animation.[name-of-encoder]_args` for the builtin writers. + metadata : Dict[str, str], default: {} + A dictionary of keys and values for metadata to include in the + output file. Some keys that may be of use include: + title, artist, genre, subject, copyright, srcform, comment. + """ + if type(self) is MovieWriter: + # TODO MovieWriter is still an abstract class and needs to be + # extended with a mixin. This should be clearer in naming + # and description. For now, just give a reasonable error + # message to users. + raise TypeError( + 'MovieWriter cannot be instantiated directly. Please use one ' + 'of its subclasses.') + + super().__init__(fps=fps, metadata=metadata, codec=codec, + bitrate=bitrate) + + self.frame_format = 'rgba' + self.extra_args = extra_args + + def _adjust_frame_size(self): + if self.codec == 'h264': + wo, ho = self.fig.get_size_inches() + w, h = adjusted_figsize(wo, ho, self.dpi, 2) + if (wo, ho) != (w, h): + self.fig.set_size_inches(w, h, forward=True) + _log.info('figure size in inches has been adjusted ' + 'from %s x %s to %s x %s', wo, ho, w, h) + else: + w, h = self.fig.get_size_inches() + _log.debug('frame size in pixels is %s x %s', *self.frame_size) + return w, h + + def setup(self, fig, outfile, dpi=None): + # docstring inherited + super().setup(fig, outfile, dpi=dpi) + self._w, self._h = self._adjust_frame_size() + # Run here so that grab_frame() can write the data to a pipe. This + # eliminates the need for temp files. + self._run() + + def _run(self): + # Uses subprocess to call the program for assembling frames into a + # movie file. *args* returns the sequence of command line arguments + # from a few configuration options. + command = self._args() + _log.info('MovieWriter._run: running command: %s', + cbook._pformat_subprocess(command)) + PIPE = subprocess.PIPE + self._proc = subprocess.Popen( + command, stdin=PIPE, stdout=PIPE, stderr=PIPE, + creationflags=subprocess_creation_flags) + + def finish(self): + """Finish any processing for writing the movie.""" + self.cleanup() + + def grab_frame(self, **savefig_kwargs): + # docstring inherited + _log.debug('MovieWriter.grab_frame: Grabbing frame.') + # Readjust the figure size in case it has been changed by the user. + # All frames must have the same size to save the movie correctly. + self.fig.set_size_inches(self._w, self._h) + # Save the figure data to the sink, using the frame format and dpi. + self.fig.savefig(self._frame_sink(), format=self.frame_format, + dpi=self.dpi, **savefig_kwargs) + + def _frame_sink(self): + """Return the place to which frames should be written.""" + return self._proc.stdin + + def _args(self): + """Assemble list of encoder-specific command-line arguments.""" + return NotImplementedError("args needs to be implemented by subclass.") + + def cleanup(self): + """Clean-up and collect the process used to write the movie file.""" + out, err = self._proc.communicate() + self._frame_sink().close() + # Use the encoding/errors that universal_newlines would use. + out = TextIOWrapper(BytesIO(out)).read() + err = TextIOWrapper(BytesIO(err)).read() + if out: + _log.log( + logging.WARNING if self._proc.returncode else logging.DEBUG, + "MovieWriter stdout:\n%s", out) + if err: + _log.log( + logging.WARNING if self._proc.returncode else logging.DEBUG, + "MovieWriter stderr:\n%s", err) + if self._proc.returncode: + raise subprocess.CalledProcessError( + self._proc.returncode, self._proc.args, out, err) + + @classmethod + def bin_path(cls): + """ + Return the binary path to the commandline tool used by a specific + subclass. This is a class method so that the tool can be looked for + before making a particular MovieWriter subclass available. + """ + return str(mpl.rcParams[cls._exec_key]) + + @classmethod + def isAvailable(cls): + """Return whether a MovieWriter subclass is actually available.""" + return shutil.which(cls.bin_path()) is not None + + +class FileMovieWriter(MovieWriter): + """ + `MovieWriter` for writing to individual files and stitching at the end. + + This must be sub-classed to be useful. + """ + def __init__(self, *args, **kwargs): + MovieWriter.__init__(self, *args, **kwargs) + self.frame_format = mpl.rcParams['animation.frame_format'] + + @cbook._delete_parameter("3.3", "clear_temp") + def setup(self, fig, outfile, dpi=None, frame_prefix=None, + clear_temp=True): + """ + Setup for writing the movie file. + + Parameters + ---------- + fig : `~matplotlib.figure.Figure` + The figure to grab the rendered frames from. + outfile : str + The filename of the resulting movie file. + dpi : float, optional + The dpi of the output file. This, with the figure size, + controls the size in pixels of the resulting movie file. + Default is ``fig.dpi``. + frame_prefix : str, optional + The filename prefix to use for temporary files. If None (the + default), files are written to a temporary directory which is + deleted by `cleanup` (regardless of the value of *clear_temp*). + clear_temp : bool, optional + If the temporary files should be deleted after stitching + the final result. Setting this to ``False`` can be useful for + debugging. Defaults to ``True``. + """ + self.fig = fig + self.outfile = outfile + if dpi is None: + dpi = self.fig.dpi + self.dpi = dpi + self._adjust_frame_size() + + if frame_prefix is None: + self._tmpdir = TemporaryDirectory() + self.temp_prefix = str(Path(self._tmpdir.name, 'tmp')) + else: + self._tmpdir = None + self.temp_prefix = frame_prefix + self._clear_temp = clear_temp + self._frame_counter = 0 # used for generating sequential file names + self._temp_paths = list() + self.fname_format_str = '%s%%07d.%s' + + @cbook.deprecated("3.3") + @property + def clear_temp(self): + return self._clear_temp + + @clear_temp.setter + def clear_temp(self, value): + self._clear_temp = value + + @property + def frame_format(self): + """ + Format (png, jpeg, etc.) to use for saving the frames, which can be + decided by the individual subclasses. + """ + return self._frame_format + + @frame_format.setter + def frame_format(self, frame_format): + if frame_format in self.supported_formats: + self._frame_format = frame_format + else: + self._frame_format = self.supported_formats[0] + + def _base_temp_name(self): + # Generates a template name (without number) given the frame format + # for extension and the prefix. + return self.fname_format_str % (self.temp_prefix, self.frame_format) + + def _frame_sink(self): + # Creates a filename for saving using the basename and the current + # counter. + path = Path(self._base_temp_name() % self._frame_counter) + + # Save the filename so we can delete it later if necessary + self._temp_paths.append(path) + _log.debug('FileMovieWriter.frame_sink: saving frame %d to path=%s', + self._frame_counter, path) + self._frame_counter += 1 # Ensures each created name is 'unique' + + # This file returned here will be closed once it's used by savefig() + # because it will no longer be referenced and will be gc-ed. + return open(path, 'wb') + + def grab_frame(self, **savefig_kwargs): + # docstring inherited + # Overloaded to explicitly close temp file. + _log.debug('MovieWriter.grab_frame: Grabbing frame.') + # Tell the figure to save its data to the sink, using the + # frame format and dpi. + with self._frame_sink() as myframesink: + self.fig.savefig(myframesink, format=self.frame_format, + dpi=self.dpi, **savefig_kwargs) + + def finish(self): + # Call run here now that all frame grabbing is done. All temp files + # are available to be assembled. + self._run() + MovieWriter.finish(self) # Will call clean-up + + def cleanup(self): + MovieWriter.cleanup(self) + if self._tmpdir: + _log.debug('MovieWriter: clearing temporary path=%s', self._tmpdir) + self._tmpdir.cleanup() + else: + if self._clear_temp: + _log.debug('MovieWriter: clearing temporary paths=%s', + self._temp_paths) + for path in self._temp_paths: + path.unlink() + + +@writers.register('pillow') +class PillowWriter(AbstractMovieWriter): + @classmethod + def isAvailable(cls): + return True + + def setup(self, fig, outfile, dpi=None): + super().setup(fig, outfile, dpi=dpi) + self._frames = [] + + def grab_frame(self, **savefig_kwargs): + from PIL import Image + buf = BytesIO() + self.fig.savefig( + buf, **{**savefig_kwargs, "format": "rgba", "dpi": self.dpi}) + renderer = self.fig.canvas.get_renderer() + self._frames.append(Image.frombuffer( + "RGBA", self.frame_size, buf.getbuffer(), "raw", "RGBA", 0, 1)) + + def finish(self): + self._frames[0].save( + self.outfile, save_all=True, append_images=self._frames[1:], + duration=int(1000 / self.fps), loop=0) + + +# Base class of ffmpeg information. Has the config keys and the common set +# of arguments that controls the *output* side of things. +class FFMpegBase: + """ + Mixin class for FFMpeg output. + + To be useful this must be multiply-inherited from with a + `MovieWriterBase` sub-class. + """ + + _exec_key = 'animation.ffmpeg_path' + _args_key = 'animation.ffmpeg_args' + + @property + def output_args(self): + args = [] + if Path(self.outfile).suffix == '.gif': + self.codec = 'gif' + else: + args.extend(['-vcodec', self.codec]) + extra_args = (self.extra_args if self.extra_args is not None + else mpl.rcParams[self._args_key]) + # For h264, the default format is yuv444p, which is not compatible + # with quicktime (and others). Specifying yuv420p fixes playback on + # iOS, as well as HTML5 video in firefox and safari (on both Win and + # OSX). Also fixes internet explorer. This is as of 2015/10/29. + if self.codec == 'h264' and '-pix_fmt' not in extra_args: + args.extend(['-pix_fmt', 'yuv420p']) + # For GIF, we're telling FFMPEG to split the video stream, to generate + # a palette, and then use it for encoding. + elif self.codec == 'gif' and '-filter_complex' not in extra_args: + args.extend(['-filter_complex', + 'split [a][b];[a] palettegen [p];[b][p] paletteuse']) + if self.bitrate > 0: + args.extend(['-b', '%dk' % self.bitrate]) # %dk: bitrate in kbps. + args.extend(extra_args) + for k, v in self.metadata.items(): + args.extend(['-metadata', '%s=%s' % (k, v)]) + + return args + ['-y', self.outfile] + + @classmethod + def isAvailable(cls): + return ( + super().isAvailable() + # Ubuntu 12.04 ships a broken ffmpeg binary which we shouldn't use. + # NOTE: when removed, remove the same method in AVConvBase. + and b'LibAv' not in subprocess.run( + [cls.bin_path()], creationflags=subprocess_creation_flags, + stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, + stderr=subprocess.PIPE).stderr) + + +# Combine FFMpeg options with pipe-based writing +@writers.register('ffmpeg') +class FFMpegWriter(FFMpegBase, MovieWriter): + """ + Pipe-based ffmpeg writer. + + Frames are streamed directly to ffmpeg via a pipe and written in a single + pass. + """ + def _args(self): + # Returns the command line parameters for subprocess to use + # ffmpeg to create a movie using a pipe. + args = [self.bin_path(), '-f', 'rawvideo', '-vcodec', 'rawvideo', + '-s', '%dx%d' % self.frame_size, '-pix_fmt', self.frame_format, + '-r', str(self.fps)] + # Logging is quieted because subprocess.PIPE has limited buffer size. + # If you have a lot of frames in your animation and set logging to + # DEBUG, you will have a buffer overrun. + if _log.getEffectiveLevel() > logging.DEBUG: + args += ['-loglevel', 'error'] + args += ['-i', 'pipe:'] + self.output_args + return args + + +# Combine FFMpeg options with temp file-based writing +@writers.register('ffmpeg_file') +class FFMpegFileWriter(FFMpegBase, FileMovieWriter): + """ + File-based ffmpeg writer. + + Frames are written to temporary files on disk and then stitched + together at the end. + """ + supported_formats = ['png', 'jpeg', 'ppm', 'tiff', 'sgi', 'bmp', + 'pbm', 'raw', 'rgba'] + + def _args(self): + # Returns the command line parameters for subprocess to use + # ffmpeg to create a movie using a collection of temp images + return [self.bin_path(), '-r', str(self.fps), + '-i', self._base_temp_name(), + '-vframes', str(self._frame_counter)] + self.output_args + + +# Base class of avconv information. AVConv has identical arguments to FFMpeg. +@cbook.deprecated('3.3') +class AVConvBase(FFMpegBase): + """ + Mixin class for avconv output. + + To be useful this must be multiply-inherited from with a + `MovieWriterBase` sub-class. + """ + + _exec_key = 'animation.avconv_path' + _args_key = 'animation.avconv_args' + + # NOTE : should be removed when the same method is removed in FFMpegBase. + isAvailable = classmethod(MovieWriter.isAvailable.__func__) + + +# Combine AVConv options with pipe-based writing +@writers.register('avconv') +class AVConvWriter(AVConvBase, FFMpegWriter): + """ + Pipe-based avconv writer. + + Frames are streamed directly to avconv via a pipe and written in a single + pass. + """ + + +# Combine AVConv options with file-based writing +@writers.register('avconv_file') +class AVConvFileWriter(AVConvBase, FFMpegFileWriter): + """ + File-based avconv writer. + + Frames are written to temporary files on disk and then stitched + together at the end. + """ + + +# Base class for animated GIFs with ImageMagick +class ImageMagickBase: + """ + Mixin class for ImageMagick output. + + To be useful this must be multiply-inherited from with a + `MovieWriterBase` sub-class. + """ + + _exec_key = 'animation.convert_path' + _args_key = 'animation.convert_args' + + @property + def delay(self): + return 100. / self.fps + + @property + def output_args(self): + extra_args = (self.extra_args if self.extra_args is not None + else mpl.rcParams[self._args_key]) + return [*extra_args, self.outfile] + + @classmethod + def bin_path(cls): + binpath = super().bin_path() + if binpath == 'convert': + binpath = mpl._get_executable_info('magick').executable + return binpath + + @classmethod + def isAvailable(cls): + try: + return super().isAvailable() + except mpl.ExecutableNotFoundError as _enf: + # May be raised by get_executable_info. + _log.debug('ImageMagick unavailable due to: %s', _enf) + return False + + +# Combine ImageMagick options with pipe-based writing +@writers.register('imagemagick') +class ImageMagickWriter(ImageMagickBase, MovieWriter): + """ + Pipe-based animated gif. + + Frames are streamed directly to ImageMagick via a pipe and written + in a single pass. + + """ + def _args(self): + return ([self.bin_path(), + '-size', '%ix%i' % self.frame_size, '-depth', '8', + '-delay', str(self.delay), '-loop', '0', + '%s:-' % self.frame_format] + + self.output_args) + + +# Combine ImageMagick options with temp file-based writing +@writers.register('imagemagick_file') +class ImageMagickFileWriter(ImageMagickBase, FileMovieWriter): + """ + File-based animated gif writer. + + Frames are written to temporary files on disk and then stitched + together at the end. + + """ + + supported_formats = ['png', 'jpeg', 'ppm', 'tiff', 'sgi', 'bmp', + 'pbm', 'raw', 'rgba'] + + def _args(self): + return ([self.bin_path(), '-delay', str(self.delay), '-loop', '0', + '%s*.%s' % (self.temp_prefix, self.frame_format)] + + self.output_args) + + +# Taken directly from jakevdp's JSAnimation package at +# http://github.com/jakevdp/JSAnimation +def _included_frames(paths, frame_format): + """paths should be a list of Paths""" + return INCLUDED_FRAMES.format(Nframes=len(paths), + frame_dir=paths[0].parent, + frame_format=frame_format) + + +def _embedded_frames(frame_list, frame_format): + """frame_list should be a list of base64-encoded png files""" + template = ' frames[{0}] = "data:image/{1};base64,{2}"\n' + return "\n" + "".join( + template.format(i, frame_format, frame_data.replace('\n', '\\\n')) + for i, frame_data in enumerate(frame_list)) + + +@writers.register('html') +class HTMLWriter(FileMovieWriter): + """Writer for JavaScript-based HTML movies.""" + + supported_formats = ['png', 'jpeg', 'tiff', 'svg'] + + @cbook.deprecated("3.3") + @property + def args_key(self): + return 'animation.html_args' + + @classmethod + def isAvailable(cls): + return True + + def __init__(self, fps=30, codec=None, bitrate=None, extra_args=None, + metadata=None, embed_frames=False, default_mode='loop', + embed_limit=None): + + if extra_args: + _log.warning("HTMLWriter ignores 'extra_args'") + extra_args = () # Don't lookup nonexistent rcParam[args_key]. + self.embed_frames = embed_frames + self.default_mode = default_mode.lower() + cbook._check_in_list(['loop', 'once', 'reflect'], + default_mode=self.default_mode) + + # Save embed limit, which is given in MB + if embed_limit is None: + self._bytes_limit = mpl.rcParams['animation.embed_limit'] + else: + self._bytes_limit = embed_limit + # Convert from MB to bytes + self._bytes_limit *= 1024 * 1024 + + super().__init__(fps, codec, bitrate, extra_args, metadata) + + def setup(self, fig, outfile, dpi, frame_dir=None): + outfile = Path(outfile) + cbook._check_in_list(['.html', '.htm'], + outfile_extension=outfile.suffix) + + self._saved_frames = [] + self._total_bytes = 0 + self._hit_limit = False + + if not self.embed_frames: + if frame_dir is None: + frame_dir = outfile.with_name(outfile.stem + '_frames') + frame_dir.mkdir(parents=True, exist_ok=True) + frame_prefix = frame_dir / 'frame' + else: + frame_prefix = None + + super().setup(fig, outfile, dpi, frame_prefix) + self._clear_temp = False + + def grab_frame(self, **savefig_kwargs): + if self.embed_frames: + # Just stop processing if we hit the limit + if self._hit_limit: + return + f = BytesIO() + self.fig.savefig(f, format=self.frame_format, + dpi=self.dpi, **savefig_kwargs) + imgdata64 = base64.encodebytes(f.getvalue()).decode('ascii') + self._total_bytes += len(imgdata64) + if self._total_bytes >= self._bytes_limit: + _log.warning( + "Animation size has reached %s bytes, exceeding the limit " + "of %s. If you're sure you want a larger animation " + "embedded, set the animation.embed_limit rc parameter to " + "a larger value (in MB). This and further frames will be " + "dropped.", self._total_bytes, self._bytes_limit) + self._hit_limit = True + else: + self._saved_frames.append(imgdata64) + else: + return super().grab_frame(**savefig_kwargs) + + def finish(self): + # save the frames to an html file + if self.embed_frames: + fill_frames = _embedded_frames(self._saved_frames, + self.frame_format) + Nframes = len(self._saved_frames) + else: + # temp names is filled by FileMovieWriter + fill_frames = _included_frames(self._temp_paths, self.frame_format) + Nframes = len(self._temp_paths) + mode_dict = dict(once_checked='', + loop_checked='', + reflect_checked='') + mode_dict[self.default_mode + '_checked'] = 'checked' + + interval = 1000 // self.fps + + with open(self.outfile, 'w') as of: + of.write(JS_INCLUDE + STYLE_INCLUDE) + of.write(DISPLAY_TEMPLATE.format(id=uuid.uuid4().hex, + Nframes=Nframes, + fill_frames=fill_frames, + interval=interval, + **mode_dict)) + + +class Animation: + """ + A base class for Animations. + + This class is not usable as is, and should be subclassed to provide needed + behavior. + + Parameters + ---------- + fig : `~matplotlib.figure.Figure` + The figure object used to get needed events, such as draw or resize. + + event_source : object, optional + A class that can run a callback when desired events + are generated, as well as be stopped and started. + + Examples include timers (see `TimedAnimation`) and file + system notifications. + + blit : bool, default: False + Whether blitting is used to optimize drawing. + + See Also + -------- + FuncAnimation, ArtistAnimation + """ + + def __init__(self, fig, event_source=None, blit=False): + self._fig = fig + # Disables blitting for backends that don't support it. This + # allows users to request it if available, but still have a + # fallback that works if it is not. + self._blit = blit and fig.canvas.supports_blit + + # These are the basics of the animation. The frame sequence represents + # information for each frame of the animation and depends on how the + # drawing is handled by the subclasses. The event source fires events + # that cause the frame sequence to be iterated. + self.frame_seq = self.new_frame_seq() + self.event_source = event_source + + # Instead of starting the event source now, we connect to the figure's + # draw_event, so that we only start once the figure has been drawn. + self._first_draw_id = fig.canvas.mpl_connect('draw_event', self._start) + + # Connect to the figure's close_event so that we don't continue to + # fire events and try to draw to a deleted figure. + self._close_id = self._fig.canvas.mpl_connect('close_event', + self._stop) + if self._blit: + self._setup_blit() + + def _start(self, *args): + """ + Starts interactive animation. Adds the draw frame command to the GUI + handler, calls show to start the event loop. + """ + # Do not start the event source if saving() it. + if self._fig.canvas.is_saving(): + return + # First disconnect our draw event handler + self._fig.canvas.mpl_disconnect(self._first_draw_id) + + # Now do any initial draw + self._init_draw() + + # Add our callback for stepping the animation and + # actually start the event_source. + self.event_source.add_callback(self._step) + self.event_source.start() + + def _stop(self, *args): + # On stop we disconnect all of our events. + if self._blit: + self._fig.canvas.mpl_disconnect(self._resize_id) + self._fig.canvas.mpl_disconnect(self._close_id) + self.event_source.remove_callback(self._step) + self.event_source = None + + def save(self, filename, writer=None, fps=None, dpi=None, codec=None, + bitrate=None, extra_args=None, metadata=None, extra_anim=None, + savefig_kwargs=None, *, progress_callback=None): + """ + Save the animation as a movie file by drawing every frame. + + Parameters + ---------- + filename : str + The output filename, e.g., :file:`mymovie.mp4`. + + writer : `MovieWriter` or str, default: :rc:`animation.writer` + A `MovieWriter` instance to use or a key that identifies a + class to use, such as 'ffmpeg'. + + fps : int, optional + Movie frame rate (per second). If not set, the frame rate from the + animation's frame interval. + + dpi : float, default: :rc:`savefig.dpi` + Controls the dots per inch for the movie frames. Together with + the figure's size in inches, this controls the size of the movie. + + codec : str, default: :rc:`animation.codec`. + The video codec to use. Not all codecs are supported by a given + `MovieWriter`. + + bitrate : int, default: :rc:`animation.bitrate` + The bitrate of the movie, in kilobits per second. Higher values + means higher quality movies, but increase the file size. A value + of -1 lets the underlying movie encoder select the bitrate. + + extra_args : list of str or None, optional + Extra command-line arguments passed to the underlying movie + encoder. The default, None, means to use + :rc:`animation.[name-of-encoder]_args` for the builtin writers. + + metadata : Dict[str, str], default {} + Dictionary of keys and values for metadata to include in + the output file. Some keys that may be of use include: + title, artist, genre, subject, copyright, srcform, comment. + + extra_anim : list, default: [] + Additional `Animation` objects that should be included + in the saved movie file. These need to be from the same + `matplotlib.figure.Figure` instance. Also, animation frames will + just be simply combined, so there should be a 1:1 correspondence + between the frames from the different animations. + + savefig_kwargs : dict, default: {} + Keyword arguments passed to each `~.Figure.savefig` call used to + save the individual frames. + + progress_callback : function, optional + A callback function that will be called for every frame to notify + the saving progress. It must have the signature :: + + def func(current_frame: int, total_frames: int) -> Any + + where *current_frame* is the current frame number and + *total_frames* is the total number of frames to be saved. + *total_frames* is set to None, if the total number of frames can + not be determined. Return values may exist but are ignored. + + Example code to write the progress to stdout:: + + progress_callback =\ + lambda i, n: print(f'Saving frame {i} of {n}') + + Notes + ----- + *fps*, *codec*, *bitrate*, *extra_args* and *metadata* are used to + construct a `.MovieWriter` instance and can only be passed if + *writer* is a string. If they are passed as non-*None* and *writer* + is a `.MovieWriter`, a `RuntimeError` will be raised. + """ + + if writer is None: + writer = mpl.rcParams['animation.writer'] + elif (not isinstance(writer, str) and + any(arg is not None + for arg in (fps, codec, bitrate, extra_args, metadata))): + raise RuntimeError('Passing in values for arguments ' + 'fps, codec, bitrate, extra_args, or metadata ' + 'is not supported when writer is an existing ' + 'MovieWriter instance. These should instead be ' + 'passed as arguments when creating the ' + 'MovieWriter instance.') + + if savefig_kwargs is None: + savefig_kwargs = {} + + if fps is None and hasattr(self, '_interval'): + # Convert interval in ms to frames per second + fps = 1000. / self._interval + + # Re-use the savefig DPI for ours if none is given + if dpi is None: + dpi = mpl.rcParams['savefig.dpi'] + if dpi == 'figure': + dpi = self._fig.dpi + + writer_kwargs = {} + if codec is not None: + writer_kwargs['codec'] = codec + if bitrate is not None: + writer_kwargs['bitrate'] = bitrate + if extra_args is not None: + writer_kwargs['extra_args'] = extra_args + if metadata is not None: + writer_kwargs['metadata'] = metadata + + all_anim = [self] + if extra_anim is not None: + all_anim.extend(anim + for anim + in extra_anim if anim._fig is self._fig) + + # If we have the name of a writer, instantiate an instance of the + # registered class. + if isinstance(writer, str): + try: + writer_cls = writers[writer] + except RuntimeError: # Raised if not available. + writer_cls = PillowWriter # Always available. + _log.warning("MovieWriter %s unavailable; using Pillow " + "instead.", writer) + writer = writer_cls(fps, **writer_kwargs) + _log.info('Animation.save using %s', type(writer)) + + if 'bbox_inches' in savefig_kwargs: + _log.warning("Warning: discarding the 'bbox_inches' argument in " + "'savefig_kwargs' as it may cause frame size " + "to vary, which is inappropriate for animation.") + savefig_kwargs.pop('bbox_inches') + + # Create a new sequence of frames for saved data. This is different + # from new_frame_seq() to give the ability to save 'live' generated + # frame information to be saved later. + # TODO: Right now, after closing the figure, saving a movie won't work + # since GUI widgets are gone. Either need to remove extra code to + # allow for this non-existent use case or find a way to make it work. + if mpl.rcParams['savefig.bbox'] == 'tight': + _log.info("Disabling savefig.bbox = 'tight', as it may cause " + "frame size to vary, which is inappropriate for " + "animation.") + # canvas._is_saving = True makes the draw_event animation-starting + # callback a no-op; canvas.manager = None prevents resizing the GUI + # widget (both are likewise done in savefig()). + with mpl.rc_context({'savefig.bbox': None}), \ + writer.saving(self._fig, filename, dpi), \ + cbook._setattr_cm(self._fig.canvas, + _is_saving=True, manager=None): + for anim in all_anim: + anim._init_draw() # Clear the initial frame + frame_number = 0 + # TODO: Currently only FuncAnimation has a save_count + # attribute. Can we generalize this to all Animations? + save_count_list = [getattr(a, 'save_count', None) + for a in all_anim] + if None in save_count_list: + total_frames = None + else: + total_frames = sum(save_count_list) + for data in zip(*[a.new_saved_frame_seq() for a in all_anim]): + for anim, d in zip(all_anim, data): + # TODO: See if turning off blit is really necessary + anim._draw_next_frame(d, blit=False) + if progress_callback is not None: + progress_callback(frame_number, total_frames) + frame_number += 1 + writer.grab_frame(**savefig_kwargs) + + def _step(self, *args): + """ + Handler for getting events. By default, gets the next frame in the + sequence and hands the data off to be drawn. + """ + # Returns True to indicate that the event source should continue to + # call _step, until the frame sequence reaches the end of iteration, + # at which point False will be returned. + try: + framedata = next(self.frame_seq) + self._draw_next_frame(framedata, self._blit) + return True + except StopIteration: + return False + + def new_frame_seq(self): + """Return a new sequence of frame information.""" + # Default implementation is just an iterator over self._framedata + return iter(self._framedata) + + def new_saved_frame_seq(self): + """Return a new sequence of saved/cached frame information.""" + # Default is the same as the regular frame sequence + return self.new_frame_seq() + + def _draw_next_frame(self, framedata, blit): + # Breaks down the drawing of the next frame into steps of pre- and + # post- draw, as well as the drawing of the frame itself. + self._pre_draw(framedata, blit) + self._draw_frame(framedata) + self._post_draw(framedata, blit) + + def _init_draw(self): + # Initial draw to clear the frame. Also used by the blitting code + # when a clean base is required. + pass + + def _pre_draw(self, framedata, blit): + # Perform any cleaning or whatnot before the drawing of the frame. + # This default implementation allows blit to clear the frame. + if blit: + self._blit_clear(self._drawn_artists) + + def _draw_frame(self, framedata): + # Performs actual drawing of the frame. + raise NotImplementedError('Needs to be implemented by subclasses to' + ' actually make an animation.') + + def _post_draw(self, framedata, blit): + # After the frame is rendered, this handles the actual flushing of + # the draw, which can be a direct draw_idle() or make use of the + # blitting. + if blit and self._drawn_artists: + self._blit_draw(self._drawn_artists) + else: + self._fig.canvas.draw_idle() + + # The rest of the code in this class is to facilitate easy blitting + def _blit_draw(self, artists): + # Handles blitted drawing, which renders only the artists given instead + # of the entire figure. + updated_ax = {a.axes for a in artists} + # Enumerate artists to cache axes' backgrounds. We do not draw + # artists yet to not cache foreground from plots with shared axes + for ax in updated_ax: + # If we haven't cached the background for the current view of this + # axes object, do so now. This might not always be reliable, but + # it's an attempt to automate the process. + cur_view = ax._get_view() + view, bg = self._blit_cache.get(ax, (object(), None)) + if cur_view != view: + self._blit_cache[ax] = ( + cur_view, ax.figure.canvas.copy_from_bbox(ax.bbox)) + # Make a separate pass to draw foreground. + for a in artists: + a.axes.draw_artist(a) + # After rendering all the needed artists, blit each axes individually. + for ax in updated_ax: + ax.figure.canvas.blit(ax.bbox) + + def _blit_clear(self, artists): + # Get a list of the axes that need clearing from the artists that + # have been drawn. Grab the appropriate saved background from the + # cache and restore. + axes = {a.axes for a in artists} + for ax in axes: + try: + view, bg = self._blit_cache[ax] + except KeyError: + continue + if ax._get_view() == view: + ax.figure.canvas.restore_region(bg) + else: + self._blit_cache.pop(ax) + + def _setup_blit(self): + # Setting up the blit requires: a cache of the background for the + # axes + self._blit_cache = dict() + self._drawn_artists = [] + self._resize_id = self._fig.canvas.mpl_connect('resize_event', + self._on_resize) + self._post_draw(None, self._blit) + + def _on_resize(self, event): + # On resize, we need to disable the resize event handling so we don't + # get too many events. Also stop the animation events, so that + # we're paused. Reset the cache and re-init. Set up an event handler + # to catch once the draw has actually taken place. + self._fig.canvas.mpl_disconnect(self._resize_id) + self.event_source.stop() + self._blit_cache.clear() + self._init_draw() + self._resize_id = self._fig.canvas.mpl_connect('draw_event', + self._end_redraw) + + def _end_redraw(self, event): + # Now that the redraw has happened, do the post draw flushing and + # blit handling. Then re-enable all of the original events. + self._post_draw(None, False) + self.event_source.start() + self._fig.canvas.mpl_disconnect(self._resize_id) + self._resize_id = self._fig.canvas.mpl_connect('resize_event', + self._on_resize) + + def to_html5_video(self, embed_limit=None): + """ + Convert the animation to an HTML5 ``