zlib.c 186 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968
  1. /*
  2. * zlib amalagamation for SSFN. Contains the deflate stream functions and compress but nothing else.
  3. */
  4. /* adler32.c -- compute the Adler-32 checksum of a data stream
  5. * Copyright (C) 1995-2011, 2016 Mark Adler
  6. * For conditions of distribution and use, see copyright notice in zlib.h
  7. */
  8. /* @(#) $Id$ */
  9. #include "zlib.h"
  10. local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
  11. #define BASE 65521U /* largest prime smaller than 65536 */
  12. #define NMAX 5552
  13. /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
  14. #define DO1(buf,i) {adler += (buf)[i]; sum2 += adler;}
  15. #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
  16. #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
  17. #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
  18. #define DO16(buf) DO8(buf,0); DO8(buf,8);
  19. /* use NO_DIVIDE if your processor does not do division in hardware --
  20. try it both ways to see which is faster */
  21. #ifdef NO_DIVIDE
  22. /* note that this assumes BASE is 65521, where 65536 % 65521 == 15
  23. (thank you to John Reiser for pointing this out) */
  24. # define CHOP(a) \
  25. do { \
  26. unsigned long tmp = a >> 16; \
  27. a &= 0xffffUL; \
  28. a += (tmp << 4) - tmp; \
  29. } while (0)
  30. # define MOD28(a) \
  31. do { \
  32. CHOP(a); \
  33. if (a >= BASE) a -= BASE; \
  34. } while (0)
  35. # define MOD(a) \
  36. do { \
  37. CHOP(a); \
  38. MOD28(a); \
  39. } while (0)
  40. # define MOD63(a) \
  41. do { /* this assumes a is not negative */ \
  42. z_off64_t tmp = a >> 32; \
  43. a &= 0xffffffffL; \
  44. a += (tmp << 8) - (tmp << 5) + tmp; \
  45. tmp = a >> 16; \
  46. a &= 0xffffL; \
  47. a += (tmp << 4) - tmp; \
  48. tmp = a >> 16; \
  49. a &= 0xffffL; \
  50. a += (tmp << 4) - tmp; \
  51. if (a >= BASE) a -= BASE; \
  52. } while (0)
  53. #else
  54. # define MOD(a) a %= BASE
  55. # define MOD28(a) a %= BASE
  56. # define MOD63(a) a %= BASE
  57. #endif
  58. /* ========================================================================= */
  59. uLong ZEXPORT adler32_z(adler, buf, len)
  60. uLong adler;
  61. const Bytef *buf;
  62. z_size_t len;
  63. {
  64. unsigned long sum2;
  65. unsigned n;
  66. /* split Adler-32 into component sums */
  67. sum2 = (adler >> 16) & 0xffff;
  68. adler &= 0xffff;
  69. /* in case user likes doing a byte at a time, keep it fast */
  70. if (len == 1) {
  71. adler += buf[0];
  72. if (adler >= BASE)
  73. adler -= BASE;
  74. sum2 += adler;
  75. if (sum2 >= BASE)
  76. sum2 -= BASE;
  77. return adler | (sum2 << 16);
  78. }
  79. /* initial Adler-32 value (deferred check for len == 1 speed) */
  80. if (buf == Z_NULL)
  81. return 1L;
  82. /* in case short lengths are provided, keep it somewhat fast */
  83. if (len < 16) {
  84. while (len--) {
  85. adler += *buf++;
  86. sum2 += adler;
  87. }
  88. if (adler >= BASE)
  89. adler -= BASE;
  90. MOD28(sum2); /* only added so many BASE's */
  91. return adler | (sum2 << 16);
  92. }
  93. /* do length NMAX blocks -- requires just one modulo operation */
  94. while (len >= NMAX) {
  95. len -= NMAX;
  96. n = NMAX / 16; /* NMAX is divisible by 16 */
  97. do {
  98. DO16(buf); /* 16 sums unrolled */
  99. buf += 16;
  100. } while (--n);
  101. MOD(adler);
  102. MOD(sum2);
  103. }
  104. /* do remaining bytes (less than NMAX, still just one modulo) */
  105. if (len) { /* avoid modulos if none remaining */
  106. while (len >= 16) {
  107. len -= 16;
  108. DO16(buf);
  109. buf += 16;
  110. }
  111. while (len--) {
  112. adler += *buf++;
  113. sum2 += adler;
  114. }
  115. MOD(adler);
  116. MOD(sum2);
  117. }
  118. /* return recombined sums */
  119. return adler | (sum2 << 16);
  120. }
  121. /* ========================================================================= */
  122. uLong ZEXPORT adler32(adler, buf, len)
  123. uLong adler;
  124. const Bytef *buf;
  125. uInt len;
  126. {
  127. return adler32_z(adler, buf, len);
  128. }
  129. /* ========================================================================= */
  130. local uLong adler32_combine_(adler1, adler2, len2)
  131. uLong adler1;
  132. uLong adler2;
  133. z_off64_t len2;
  134. {
  135. unsigned long sum1;
  136. unsigned long sum2;
  137. unsigned rem;
  138. /* for negative len, return invalid adler32 as a clue for debugging */
  139. if (len2 < 0)
  140. return 0xffffffffUL;
  141. /* the derivation of this formula is left as an exercise for the reader */
  142. MOD63(len2); /* assumes len2 >= 0 */
  143. rem = (unsigned)len2;
  144. sum1 = adler1 & 0xffff;
  145. sum2 = rem * sum1;
  146. MOD(sum2);
  147. sum1 += (adler2 & 0xffff) + BASE - 1;
  148. sum2 += ((adler1 >> 16) & 0xffff) + ((adler2 >> 16) & 0xffff) + BASE - rem;
  149. if (sum1 >= BASE) sum1 -= BASE;
  150. if (sum1 >= BASE) sum1 -= BASE;
  151. if (sum2 >= ((unsigned long)BASE << 1)) sum2 -= ((unsigned long)BASE << 1);
  152. if (sum2 >= BASE) sum2 -= BASE;
  153. return sum1 | (sum2 << 16);
  154. }
  155. /* ========================================================================= */
  156. uLong ZEXPORT adler32_combine(adler1, adler2, len2)
  157. uLong adler1;
  158. uLong adler2;
  159. z_off_t len2;
  160. {
  161. return adler32_combine_(adler1, adler2, len2);
  162. }
  163. uLong ZEXPORT adler32_combine64(adler1, adler2, len2)
  164. uLong adler1;
  165. uLong adler2;
  166. z_off64_t len2;
  167. {
  168. return adler32_combine_(adler1, adler2, len2);
  169. }
  170. /* crc32.c -- compute the CRC-32 of a data stream
  171. * Copyright (C) 1995-2006, 2010, 2011, 2012, 2016 Mark Adler
  172. * For conditions of distribution and use, see copyright notice in zlib.h
  173. *
  174. * Thanks to Rodney Brown <rbrown64@csc.com.au> for his contribution of faster
  175. * CRC methods: exclusive-oring 32 bits of data at a time, and pre-computing
  176. * tables for updating the shift register in one step with three exclusive-ors
  177. * instead of four steps with four exclusive-ors. This results in about a
  178. * factor of two increase in speed on a Power PC G4 (PPC7455) using gcc -O3.
  179. */
  180. /* @(#) $Id$ */
  181. /*
  182. Note on the use of DYNAMIC_CRC_TABLE: there is no mutex or semaphore
  183. protection on the static variables used to control the first-use generation
  184. of the crc tables. Therefore, if you #define DYNAMIC_CRC_TABLE, you should
  185. first call get_crc_table() to initialize the tables before allowing more than
  186. one thread to use crc32().
  187. DYNAMIC_CRC_TABLE and MAKECRCH can be #defined to write out crc32.h.
  188. */
  189. #ifdef MAKECRCH
  190. # include <stdio.h>
  191. # ifndef DYNAMIC_CRC_TABLE
  192. # define DYNAMIC_CRC_TABLE
  193. # endif /* !DYNAMIC_CRC_TABLE */
  194. #endif /* MAKECRCH */
  195. /* Definitions for doing the crc four data bytes at a time. */
  196. #if !defined(NOBYFOUR) && defined(Z_U4)
  197. # define BYFOUR
  198. #endif
  199. #ifdef BYFOUR
  200. local unsigned long crc32_little OF((unsigned long,
  201. const unsigned char FAR *, z_size_t));
  202. local unsigned long crc32_big OF((unsigned long,
  203. const unsigned char FAR *, z_size_t));
  204. # define TBLS 8
  205. #else
  206. # define TBLS 1
  207. #endif /* BYFOUR */
  208. /* Local functions for crc concatenation */
  209. local unsigned long gf2_matrix_times OF((unsigned long *mat,
  210. unsigned long vec));
  211. local void gf2_matrix_square OF((unsigned long *square, unsigned long *mat));
  212. local uLong crc32_combine_ OF((uLong crc1, uLong crc2, z_off64_t len2));
  213. #ifdef DYNAMIC_CRC_TABLE
  214. local volatile int crc_table_empty = 1;
  215. local z_crc_t FAR crc_table[TBLS][256];
  216. local void make_crc_table OF((void));
  217. #ifdef MAKECRCH
  218. local void write_table OF((FILE *, const z_crc_t FAR *));
  219. #endif /* MAKECRCH */
  220. /*
  221. Generate tables for a byte-wise 32-bit CRC calculation on the polynomial:
  222. x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x+1.
  223. Polynomials over GF(2) are represented in binary, one bit per coefficient,
  224. with the lowest powers in the most significant bit. Then adding polynomials
  225. is just exclusive-or, and multiplying a polynomial by x is a right shift by
  226. one. If we call the above polynomial p, and represent a byte as the
  227. polynomial q, also with the lowest power in the most significant bit (so the
  228. byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p,
  229. where a mod b means the remainder after dividing a by b.
  230. This calculation is done using the shift-register method of multiplying and
  231. taking the remainder. The register is initialized to zero, and for each
  232. incoming bit, x^32 is added mod p to the register if the bit is a one (where
  233. x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by
  234. x (which is shifting right by one and adding x^32 mod p if the bit shifted
  235. out is a one). We start with the highest power (least significant bit) of
  236. q and repeat for all eight bits of q.
  237. The first table is simply the CRC of all possible eight bit values. This is
  238. all the information needed to generate CRCs on data a byte at a time for all
  239. combinations of CRC register values and incoming bytes. The remaining tables
  240. allow for word-at-a-time CRC calculation for both big-endian and little-
  241. endian machines, where a word is four bytes.
  242. */
  243. local void make_crc_table()
  244. {
  245. z_crc_t c;
  246. int n, k;
  247. z_crc_t poly; /* polynomial exclusive-or pattern */
  248. /* terms of polynomial defining this crc (except x^32): */
  249. static volatile int first = 1; /* flag to limit concurrent making */
  250. static const unsigned char p[] = {0,1,2,4,5,7,8,10,11,12,16,22,23,26};
  251. /* See if another task is already doing this (not thread-safe, but better
  252. than nothing -- significantly reduces duration of vulnerability in
  253. case the advice about DYNAMIC_CRC_TABLE is ignored) */
  254. if (first) {
  255. first = 0;
  256. /* make exclusive-or pattern from polynomial (0xedb88320UL) */
  257. poly = 0;
  258. for (n = 0; n < (int)(sizeof(p)/sizeof(unsigned char)); n++)
  259. poly |= (z_crc_t)1 << (31 - p[n]);
  260. /* generate a crc for every 8-bit value */
  261. for (n = 0; n < 256; n++) {
  262. c = (z_crc_t)n;
  263. for (k = 0; k < 8; k++)
  264. c = c & 1 ? poly ^ (c >> 1) : c >> 1;
  265. crc_table[0][n] = c;
  266. }
  267. #ifdef BYFOUR
  268. /* generate crc for each value followed by one, two, and three zeros,
  269. and then the byte reversal of those as well as the first table */
  270. for (n = 0; n < 256; n++) {
  271. c = crc_table[0][n];
  272. crc_table[4][n] = ZSWAP32(c);
  273. for (k = 1; k < 4; k++) {
  274. c = crc_table[0][c & 0xff] ^ (c >> 8);
  275. crc_table[k][n] = c;
  276. crc_table[k + 4][n] = ZSWAP32(c);
  277. }
  278. }
  279. #endif /* BYFOUR */
  280. crc_table_empty = 0;
  281. }
  282. else { /* not first */
  283. /* wait for the other guy to finish (not efficient, but rare) */
  284. while (crc_table_empty)
  285. ;
  286. }
  287. #ifdef MAKECRCH
  288. /* write out CRC tables to crc32.h */
  289. {
  290. FILE *out;
  291. out = fopen("crc32.h", "w");
  292. if (out == NULL) return;
  293. fprintf(out, "/* crc32.h -- tables for rapid CRC calculation\n");
  294. fprintf(out, " * Generated automatically by crc32.c\n */\n\n");
  295. fprintf(out, "local const z_crc_t FAR ");
  296. fprintf(out, "crc_table[TBLS][256] =\n{\n {\n");
  297. write_table(out, crc_table[0]);
  298. # ifdef BYFOUR
  299. fprintf(out, "#ifdef BYFOUR\n");
  300. for (k = 1; k < 8; k++) {
  301. fprintf(out, " },\n {\n");
  302. write_table(out, crc_table[k]);
  303. }
  304. fprintf(out, "#endif\n");
  305. # endif /* BYFOUR */
  306. fprintf(out, " }\n};\n");
  307. fclose(out);
  308. }
  309. #endif /* MAKECRCH */
  310. }
  311. #ifdef MAKECRCH
  312. local void write_table(out, table)
  313. FILE *out;
  314. const z_crc_t FAR *table;
  315. {
  316. int n;
  317. for (n = 0; n < 256; n++)
  318. fprintf(out, "%s0x%08lxUL%s", n % 5 ? "" : " ",
  319. (unsigned long)(table[n]),
  320. n == 255 ? "\n" : (n % 5 == 4 ? ",\n" : ", "));
  321. }
  322. #endif /* MAKECRCH */
  323. #else /* !DYNAMIC_CRC_TABLE */
  324. /* ========================================================================
  325. * Tables of CRC-32s of all single-byte values, made by make_crc_table().
  326. */
  327. /* crc32.h -- tables for rapid CRC calculation
  328. * Generated automatically by crc32.c
  329. */
  330. local const z_crc_t FAR crc_table[TBLS][256] =
  331. {
  332. {
  333. 0x00000000UL, 0x77073096UL, 0xee0e612cUL, 0x990951baUL, 0x076dc419UL,
  334. 0x706af48fUL, 0xe963a535UL, 0x9e6495a3UL, 0x0edb8832UL, 0x79dcb8a4UL,
  335. 0xe0d5e91eUL, 0x97d2d988UL, 0x09b64c2bUL, 0x7eb17cbdUL, 0xe7b82d07UL,
  336. 0x90bf1d91UL, 0x1db71064UL, 0x6ab020f2UL, 0xf3b97148UL, 0x84be41deUL,
  337. 0x1adad47dUL, 0x6ddde4ebUL, 0xf4d4b551UL, 0x83d385c7UL, 0x136c9856UL,
  338. 0x646ba8c0UL, 0xfd62f97aUL, 0x8a65c9ecUL, 0x14015c4fUL, 0x63066cd9UL,
  339. 0xfa0f3d63UL, 0x8d080df5UL, 0x3b6e20c8UL, 0x4c69105eUL, 0xd56041e4UL,
  340. 0xa2677172UL, 0x3c03e4d1UL, 0x4b04d447UL, 0xd20d85fdUL, 0xa50ab56bUL,
  341. 0x35b5a8faUL, 0x42b2986cUL, 0xdbbbc9d6UL, 0xacbcf940UL, 0x32d86ce3UL,
  342. 0x45df5c75UL, 0xdcd60dcfUL, 0xabd13d59UL, 0x26d930acUL, 0x51de003aUL,
  343. 0xc8d75180UL, 0xbfd06116UL, 0x21b4f4b5UL, 0x56b3c423UL, 0xcfba9599UL,
  344. 0xb8bda50fUL, 0x2802b89eUL, 0x5f058808UL, 0xc60cd9b2UL, 0xb10be924UL,
  345. 0x2f6f7c87UL, 0x58684c11UL, 0xc1611dabUL, 0xb6662d3dUL, 0x76dc4190UL,
  346. 0x01db7106UL, 0x98d220bcUL, 0xefd5102aUL, 0x71b18589UL, 0x06b6b51fUL,
  347. 0x9fbfe4a5UL, 0xe8b8d433UL, 0x7807c9a2UL, 0x0f00f934UL, 0x9609a88eUL,
  348. 0xe10e9818UL, 0x7f6a0dbbUL, 0x086d3d2dUL, 0x91646c97UL, 0xe6635c01UL,
  349. 0x6b6b51f4UL, 0x1c6c6162UL, 0x856530d8UL, 0xf262004eUL, 0x6c0695edUL,
  350. 0x1b01a57bUL, 0x8208f4c1UL, 0xf50fc457UL, 0x65b0d9c6UL, 0x12b7e950UL,
  351. 0x8bbeb8eaUL, 0xfcb9887cUL, 0x62dd1ddfUL, 0x15da2d49UL, 0x8cd37cf3UL,
  352. 0xfbd44c65UL, 0x4db26158UL, 0x3ab551ceUL, 0xa3bc0074UL, 0xd4bb30e2UL,
  353. 0x4adfa541UL, 0x3dd895d7UL, 0xa4d1c46dUL, 0xd3d6f4fbUL, 0x4369e96aUL,
  354. 0x346ed9fcUL, 0xad678846UL, 0xda60b8d0UL, 0x44042d73UL, 0x33031de5UL,
  355. 0xaa0a4c5fUL, 0xdd0d7cc9UL, 0x5005713cUL, 0x270241aaUL, 0xbe0b1010UL,
  356. 0xc90c2086UL, 0x5768b525UL, 0x206f85b3UL, 0xb966d409UL, 0xce61e49fUL,
  357. 0x5edef90eUL, 0x29d9c998UL, 0xb0d09822UL, 0xc7d7a8b4UL, 0x59b33d17UL,
  358. 0x2eb40d81UL, 0xb7bd5c3bUL, 0xc0ba6cadUL, 0xedb88320UL, 0x9abfb3b6UL,
  359. 0x03b6e20cUL, 0x74b1d29aUL, 0xead54739UL, 0x9dd277afUL, 0x04db2615UL,
  360. 0x73dc1683UL, 0xe3630b12UL, 0x94643b84UL, 0x0d6d6a3eUL, 0x7a6a5aa8UL,
  361. 0xe40ecf0bUL, 0x9309ff9dUL, 0x0a00ae27UL, 0x7d079eb1UL, 0xf00f9344UL,
  362. 0x8708a3d2UL, 0x1e01f268UL, 0x6906c2feUL, 0xf762575dUL, 0x806567cbUL,
  363. 0x196c3671UL, 0x6e6b06e7UL, 0xfed41b76UL, 0x89d32be0UL, 0x10da7a5aUL,
  364. 0x67dd4accUL, 0xf9b9df6fUL, 0x8ebeeff9UL, 0x17b7be43UL, 0x60b08ed5UL,
  365. 0xd6d6a3e8UL, 0xa1d1937eUL, 0x38d8c2c4UL, 0x4fdff252UL, 0xd1bb67f1UL,
  366. 0xa6bc5767UL, 0x3fb506ddUL, 0x48b2364bUL, 0xd80d2bdaUL, 0xaf0a1b4cUL,
  367. 0x36034af6UL, 0x41047a60UL, 0xdf60efc3UL, 0xa867df55UL, 0x316e8eefUL,
  368. 0x4669be79UL, 0xcb61b38cUL, 0xbc66831aUL, 0x256fd2a0UL, 0x5268e236UL,
  369. 0xcc0c7795UL, 0xbb0b4703UL, 0x220216b9UL, 0x5505262fUL, 0xc5ba3bbeUL,
  370. 0xb2bd0b28UL, 0x2bb45a92UL, 0x5cb36a04UL, 0xc2d7ffa7UL, 0xb5d0cf31UL,
  371. 0x2cd99e8bUL, 0x5bdeae1dUL, 0x9b64c2b0UL, 0xec63f226UL, 0x756aa39cUL,
  372. 0x026d930aUL, 0x9c0906a9UL, 0xeb0e363fUL, 0x72076785UL, 0x05005713UL,
  373. 0x95bf4a82UL, 0xe2b87a14UL, 0x7bb12baeUL, 0x0cb61b38UL, 0x92d28e9bUL,
  374. 0xe5d5be0dUL, 0x7cdcefb7UL, 0x0bdbdf21UL, 0x86d3d2d4UL, 0xf1d4e242UL,
  375. 0x68ddb3f8UL, 0x1fda836eUL, 0x81be16cdUL, 0xf6b9265bUL, 0x6fb077e1UL,
  376. 0x18b74777UL, 0x88085ae6UL, 0xff0f6a70UL, 0x66063bcaUL, 0x11010b5cUL,
  377. 0x8f659effUL, 0xf862ae69UL, 0x616bffd3UL, 0x166ccf45UL, 0xa00ae278UL,
  378. 0xd70dd2eeUL, 0x4e048354UL, 0x3903b3c2UL, 0xa7672661UL, 0xd06016f7UL,
  379. 0x4969474dUL, 0x3e6e77dbUL, 0xaed16a4aUL, 0xd9d65adcUL, 0x40df0b66UL,
  380. 0x37d83bf0UL, 0xa9bcae53UL, 0xdebb9ec5UL, 0x47b2cf7fUL, 0x30b5ffe9UL,
  381. 0xbdbdf21cUL, 0xcabac28aUL, 0x53b39330UL, 0x24b4a3a6UL, 0xbad03605UL,
  382. 0xcdd70693UL, 0x54de5729UL, 0x23d967bfUL, 0xb3667a2eUL, 0xc4614ab8UL,
  383. 0x5d681b02UL, 0x2a6f2b94UL, 0xb40bbe37UL, 0xc30c8ea1UL, 0x5a05df1bUL,
  384. 0x2d02ef8dUL
  385. #ifdef BYFOUR
  386. },
  387. {
  388. 0x00000000UL, 0x191b3141UL, 0x32366282UL, 0x2b2d53c3UL, 0x646cc504UL,
  389. 0x7d77f445UL, 0x565aa786UL, 0x4f4196c7UL, 0xc8d98a08UL, 0xd1c2bb49UL,
  390. 0xfaefe88aUL, 0xe3f4d9cbUL, 0xacb54f0cUL, 0xb5ae7e4dUL, 0x9e832d8eUL,
  391. 0x87981ccfUL, 0x4ac21251UL, 0x53d92310UL, 0x78f470d3UL, 0x61ef4192UL,
  392. 0x2eaed755UL, 0x37b5e614UL, 0x1c98b5d7UL, 0x05838496UL, 0x821b9859UL,
  393. 0x9b00a918UL, 0xb02dfadbUL, 0xa936cb9aUL, 0xe6775d5dUL, 0xff6c6c1cUL,
  394. 0xd4413fdfUL, 0xcd5a0e9eUL, 0x958424a2UL, 0x8c9f15e3UL, 0xa7b24620UL,
  395. 0xbea97761UL, 0xf1e8e1a6UL, 0xe8f3d0e7UL, 0xc3de8324UL, 0xdac5b265UL,
  396. 0x5d5daeaaUL, 0x44469febUL, 0x6f6bcc28UL, 0x7670fd69UL, 0x39316baeUL,
  397. 0x202a5aefUL, 0x0b07092cUL, 0x121c386dUL, 0xdf4636f3UL, 0xc65d07b2UL,
  398. 0xed705471UL, 0xf46b6530UL, 0xbb2af3f7UL, 0xa231c2b6UL, 0x891c9175UL,
  399. 0x9007a034UL, 0x179fbcfbUL, 0x0e848dbaUL, 0x25a9de79UL, 0x3cb2ef38UL,
  400. 0x73f379ffUL, 0x6ae848beUL, 0x41c51b7dUL, 0x58de2a3cUL, 0xf0794f05UL,
  401. 0xe9627e44UL, 0xc24f2d87UL, 0xdb541cc6UL, 0x94158a01UL, 0x8d0ebb40UL,
  402. 0xa623e883UL, 0xbf38d9c2UL, 0x38a0c50dUL, 0x21bbf44cUL, 0x0a96a78fUL,
  403. 0x138d96ceUL, 0x5ccc0009UL, 0x45d73148UL, 0x6efa628bUL, 0x77e153caUL,
  404. 0xbabb5d54UL, 0xa3a06c15UL, 0x888d3fd6UL, 0x91960e97UL, 0xded79850UL,
  405. 0xc7cca911UL, 0xece1fad2UL, 0xf5facb93UL, 0x7262d75cUL, 0x6b79e61dUL,
  406. 0x4054b5deUL, 0x594f849fUL, 0x160e1258UL, 0x0f152319UL, 0x243870daUL,
  407. 0x3d23419bUL, 0x65fd6ba7UL, 0x7ce65ae6UL, 0x57cb0925UL, 0x4ed03864UL,
  408. 0x0191aea3UL, 0x188a9fe2UL, 0x33a7cc21UL, 0x2abcfd60UL, 0xad24e1afUL,
  409. 0xb43fd0eeUL, 0x9f12832dUL, 0x8609b26cUL, 0xc94824abUL, 0xd05315eaUL,
  410. 0xfb7e4629UL, 0xe2657768UL, 0x2f3f79f6UL, 0x362448b7UL, 0x1d091b74UL,
  411. 0x04122a35UL, 0x4b53bcf2UL, 0x52488db3UL, 0x7965de70UL, 0x607eef31UL,
  412. 0xe7e6f3feUL, 0xfefdc2bfUL, 0xd5d0917cUL, 0xcccba03dUL, 0x838a36faUL,
  413. 0x9a9107bbUL, 0xb1bc5478UL, 0xa8a76539UL, 0x3b83984bUL, 0x2298a90aUL,
  414. 0x09b5fac9UL, 0x10aecb88UL, 0x5fef5d4fUL, 0x46f46c0eUL, 0x6dd93fcdUL,
  415. 0x74c20e8cUL, 0xf35a1243UL, 0xea412302UL, 0xc16c70c1UL, 0xd8774180UL,
  416. 0x9736d747UL, 0x8e2de606UL, 0xa500b5c5UL, 0xbc1b8484UL, 0x71418a1aUL,
  417. 0x685abb5bUL, 0x4377e898UL, 0x5a6cd9d9UL, 0x152d4f1eUL, 0x0c367e5fUL,
  418. 0x271b2d9cUL, 0x3e001cddUL, 0xb9980012UL, 0xa0833153UL, 0x8bae6290UL,
  419. 0x92b553d1UL, 0xddf4c516UL, 0xc4eff457UL, 0xefc2a794UL, 0xf6d996d5UL,
  420. 0xae07bce9UL, 0xb71c8da8UL, 0x9c31de6bUL, 0x852aef2aUL, 0xca6b79edUL,
  421. 0xd37048acUL, 0xf85d1b6fUL, 0xe1462a2eUL, 0x66de36e1UL, 0x7fc507a0UL,
  422. 0x54e85463UL, 0x4df36522UL, 0x02b2f3e5UL, 0x1ba9c2a4UL, 0x30849167UL,
  423. 0x299fa026UL, 0xe4c5aeb8UL, 0xfdde9ff9UL, 0xd6f3cc3aUL, 0xcfe8fd7bUL,
  424. 0x80a96bbcUL, 0x99b25afdUL, 0xb29f093eUL, 0xab84387fUL, 0x2c1c24b0UL,
  425. 0x350715f1UL, 0x1e2a4632UL, 0x07317773UL, 0x4870e1b4UL, 0x516bd0f5UL,
  426. 0x7a468336UL, 0x635db277UL, 0xcbfad74eUL, 0xd2e1e60fUL, 0xf9ccb5ccUL,
  427. 0xe0d7848dUL, 0xaf96124aUL, 0xb68d230bUL, 0x9da070c8UL, 0x84bb4189UL,
  428. 0x03235d46UL, 0x1a386c07UL, 0x31153fc4UL, 0x280e0e85UL, 0x674f9842UL,
  429. 0x7e54a903UL, 0x5579fac0UL, 0x4c62cb81UL, 0x8138c51fUL, 0x9823f45eUL,
  430. 0xb30ea79dUL, 0xaa1596dcUL, 0xe554001bUL, 0xfc4f315aUL, 0xd7626299UL,
  431. 0xce7953d8UL, 0x49e14f17UL, 0x50fa7e56UL, 0x7bd72d95UL, 0x62cc1cd4UL,
  432. 0x2d8d8a13UL, 0x3496bb52UL, 0x1fbbe891UL, 0x06a0d9d0UL, 0x5e7ef3ecUL,
  433. 0x4765c2adUL, 0x6c48916eUL, 0x7553a02fUL, 0x3a1236e8UL, 0x230907a9UL,
  434. 0x0824546aUL, 0x113f652bUL, 0x96a779e4UL, 0x8fbc48a5UL, 0xa4911b66UL,
  435. 0xbd8a2a27UL, 0xf2cbbce0UL, 0xebd08da1UL, 0xc0fdde62UL, 0xd9e6ef23UL,
  436. 0x14bce1bdUL, 0x0da7d0fcUL, 0x268a833fUL, 0x3f91b27eUL, 0x70d024b9UL,
  437. 0x69cb15f8UL, 0x42e6463bUL, 0x5bfd777aUL, 0xdc656bb5UL, 0xc57e5af4UL,
  438. 0xee530937UL, 0xf7483876UL, 0xb809aeb1UL, 0xa1129ff0UL, 0x8a3fcc33UL,
  439. 0x9324fd72UL
  440. },
  441. {
  442. 0x00000000UL, 0x01c26a37UL, 0x0384d46eUL, 0x0246be59UL, 0x0709a8dcUL,
  443. 0x06cbc2ebUL, 0x048d7cb2UL, 0x054f1685UL, 0x0e1351b8UL, 0x0fd13b8fUL,
  444. 0x0d9785d6UL, 0x0c55efe1UL, 0x091af964UL, 0x08d89353UL, 0x0a9e2d0aUL,
  445. 0x0b5c473dUL, 0x1c26a370UL, 0x1de4c947UL, 0x1fa2771eUL, 0x1e601d29UL,
  446. 0x1b2f0bacUL, 0x1aed619bUL, 0x18abdfc2UL, 0x1969b5f5UL, 0x1235f2c8UL,
  447. 0x13f798ffUL, 0x11b126a6UL, 0x10734c91UL, 0x153c5a14UL, 0x14fe3023UL,
  448. 0x16b88e7aUL, 0x177ae44dUL, 0x384d46e0UL, 0x398f2cd7UL, 0x3bc9928eUL,
  449. 0x3a0bf8b9UL, 0x3f44ee3cUL, 0x3e86840bUL, 0x3cc03a52UL, 0x3d025065UL,
  450. 0x365e1758UL, 0x379c7d6fUL, 0x35dac336UL, 0x3418a901UL, 0x3157bf84UL,
  451. 0x3095d5b3UL, 0x32d36beaUL, 0x331101ddUL, 0x246be590UL, 0x25a98fa7UL,
  452. 0x27ef31feUL, 0x262d5bc9UL, 0x23624d4cUL, 0x22a0277bUL, 0x20e69922UL,
  453. 0x2124f315UL, 0x2a78b428UL, 0x2bbade1fUL, 0x29fc6046UL, 0x283e0a71UL,
  454. 0x2d711cf4UL, 0x2cb376c3UL, 0x2ef5c89aUL, 0x2f37a2adUL, 0x709a8dc0UL,
  455. 0x7158e7f7UL, 0x731e59aeUL, 0x72dc3399UL, 0x7793251cUL, 0x76514f2bUL,
  456. 0x7417f172UL, 0x75d59b45UL, 0x7e89dc78UL, 0x7f4bb64fUL, 0x7d0d0816UL,
  457. 0x7ccf6221UL, 0x798074a4UL, 0x78421e93UL, 0x7a04a0caUL, 0x7bc6cafdUL,
  458. 0x6cbc2eb0UL, 0x6d7e4487UL, 0x6f38fadeUL, 0x6efa90e9UL, 0x6bb5866cUL,
  459. 0x6a77ec5bUL, 0x68315202UL, 0x69f33835UL, 0x62af7f08UL, 0x636d153fUL,
  460. 0x612bab66UL, 0x60e9c151UL, 0x65a6d7d4UL, 0x6464bde3UL, 0x662203baUL,
  461. 0x67e0698dUL, 0x48d7cb20UL, 0x4915a117UL, 0x4b531f4eUL, 0x4a917579UL,
  462. 0x4fde63fcUL, 0x4e1c09cbUL, 0x4c5ab792UL, 0x4d98dda5UL, 0x46c49a98UL,
  463. 0x4706f0afUL, 0x45404ef6UL, 0x448224c1UL, 0x41cd3244UL, 0x400f5873UL,
  464. 0x4249e62aUL, 0x438b8c1dUL, 0x54f16850UL, 0x55330267UL, 0x5775bc3eUL,
  465. 0x56b7d609UL, 0x53f8c08cUL, 0x523aaabbUL, 0x507c14e2UL, 0x51be7ed5UL,
  466. 0x5ae239e8UL, 0x5b2053dfUL, 0x5966ed86UL, 0x58a487b1UL, 0x5deb9134UL,
  467. 0x5c29fb03UL, 0x5e6f455aUL, 0x5fad2f6dUL, 0xe1351b80UL, 0xe0f771b7UL,
  468. 0xe2b1cfeeUL, 0xe373a5d9UL, 0xe63cb35cUL, 0xe7fed96bUL, 0xe5b86732UL,
  469. 0xe47a0d05UL, 0xef264a38UL, 0xeee4200fUL, 0xeca29e56UL, 0xed60f461UL,
  470. 0xe82fe2e4UL, 0xe9ed88d3UL, 0xebab368aUL, 0xea695cbdUL, 0xfd13b8f0UL,
  471. 0xfcd1d2c7UL, 0xfe976c9eUL, 0xff5506a9UL, 0xfa1a102cUL, 0xfbd87a1bUL,
  472. 0xf99ec442UL, 0xf85cae75UL, 0xf300e948UL, 0xf2c2837fUL, 0xf0843d26UL,
  473. 0xf1465711UL, 0xf4094194UL, 0xf5cb2ba3UL, 0xf78d95faUL, 0xf64fffcdUL,
  474. 0xd9785d60UL, 0xd8ba3757UL, 0xdafc890eUL, 0xdb3ee339UL, 0xde71f5bcUL,
  475. 0xdfb39f8bUL, 0xddf521d2UL, 0xdc374be5UL, 0xd76b0cd8UL, 0xd6a966efUL,
  476. 0xd4efd8b6UL, 0xd52db281UL, 0xd062a404UL, 0xd1a0ce33UL, 0xd3e6706aUL,
  477. 0xd2241a5dUL, 0xc55efe10UL, 0xc49c9427UL, 0xc6da2a7eUL, 0xc7184049UL,
  478. 0xc25756ccUL, 0xc3953cfbUL, 0xc1d382a2UL, 0xc011e895UL, 0xcb4dafa8UL,
  479. 0xca8fc59fUL, 0xc8c97bc6UL, 0xc90b11f1UL, 0xcc440774UL, 0xcd866d43UL,
  480. 0xcfc0d31aUL, 0xce02b92dUL, 0x91af9640UL, 0x906dfc77UL, 0x922b422eUL,
  481. 0x93e92819UL, 0x96a63e9cUL, 0x976454abUL, 0x9522eaf2UL, 0x94e080c5UL,
  482. 0x9fbcc7f8UL, 0x9e7eadcfUL, 0x9c381396UL, 0x9dfa79a1UL, 0x98b56f24UL,
  483. 0x99770513UL, 0x9b31bb4aUL, 0x9af3d17dUL, 0x8d893530UL, 0x8c4b5f07UL,
  484. 0x8e0de15eUL, 0x8fcf8b69UL, 0x8a809decUL, 0x8b42f7dbUL, 0x89044982UL,
  485. 0x88c623b5UL, 0x839a6488UL, 0x82580ebfUL, 0x801eb0e6UL, 0x81dcdad1UL,
  486. 0x8493cc54UL, 0x8551a663UL, 0x8717183aUL, 0x86d5720dUL, 0xa9e2d0a0UL,
  487. 0xa820ba97UL, 0xaa6604ceUL, 0xaba46ef9UL, 0xaeeb787cUL, 0xaf29124bUL,
  488. 0xad6fac12UL, 0xacadc625UL, 0xa7f18118UL, 0xa633eb2fUL, 0xa4755576UL,
  489. 0xa5b73f41UL, 0xa0f829c4UL, 0xa13a43f3UL, 0xa37cfdaaUL, 0xa2be979dUL,
  490. 0xb5c473d0UL, 0xb40619e7UL, 0xb640a7beUL, 0xb782cd89UL, 0xb2cddb0cUL,
  491. 0xb30fb13bUL, 0xb1490f62UL, 0xb08b6555UL, 0xbbd72268UL, 0xba15485fUL,
  492. 0xb853f606UL, 0xb9919c31UL, 0xbcde8ab4UL, 0xbd1ce083UL, 0xbf5a5edaUL,
  493. 0xbe9834edUL
  494. },
  495. {
  496. 0x00000000UL, 0xb8bc6765UL, 0xaa09c88bUL, 0x12b5afeeUL, 0x8f629757UL,
  497. 0x37def032UL, 0x256b5fdcUL, 0x9dd738b9UL, 0xc5b428efUL, 0x7d084f8aUL,
  498. 0x6fbde064UL, 0xd7018701UL, 0x4ad6bfb8UL, 0xf26ad8ddUL, 0xe0df7733UL,
  499. 0x58631056UL, 0x5019579fUL, 0xe8a530faUL, 0xfa109f14UL, 0x42acf871UL,
  500. 0xdf7bc0c8UL, 0x67c7a7adUL, 0x75720843UL, 0xcdce6f26UL, 0x95ad7f70UL,
  501. 0x2d111815UL, 0x3fa4b7fbUL, 0x8718d09eUL, 0x1acfe827UL, 0xa2738f42UL,
  502. 0xb0c620acUL, 0x087a47c9UL, 0xa032af3eUL, 0x188ec85bUL, 0x0a3b67b5UL,
  503. 0xb28700d0UL, 0x2f503869UL, 0x97ec5f0cUL, 0x8559f0e2UL, 0x3de59787UL,
  504. 0x658687d1UL, 0xdd3ae0b4UL, 0xcf8f4f5aUL, 0x7733283fUL, 0xeae41086UL,
  505. 0x525877e3UL, 0x40edd80dUL, 0xf851bf68UL, 0xf02bf8a1UL, 0x48979fc4UL,
  506. 0x5a22302aUL, 0xe29e574fUL, 0x7f496ff6UL, 0xc7f50893UL, 0xd540a77dUL,
  507. 0x6dfcc018UL, 0x359fd04eUL, 0x8d23b72bUL, 0x9f9618c5UL, 0x272a7fa0UL,
  508. 0xbafd4719UL, 0x0241207cUL, 0x10f48f92UL, 0xa848e8f7UL, 0x9b14583dUL,
  509. 0x23a83f58UL, 0x311d90b6UL, 0x89a1f7d3UL, 0x1476cf6aUL, 0xaccaa80fUL,
  510. 0xbe7f07e1UL, 0x06c36084UL, 0x5ea070d2UL, 0xe61c17b7UL, 0xf4a9b859UL,
  511. 0x4c15df3cUL, 0xd1c2e785UL, 0x697e80e0UL, 0x7bcb2f0eUL, 0xc377486bUL,
  512. 0xcb0d0fa2UL, 0x73b168c7UL, 0x6104c729UL, 0xd9b8a04cUL, 0x446f98f5UL,
  513. 0xfcd3ff90UL, 0xee66507eUL, 0x56da371bUL, 0x0eb9274dUL, 0xb6054028UL,
  514. 0xa4b0efc6UL, 0x1c0c88a3UL, 0x81dbb01aUL, 0x3967d77fUL, 0x2bd27891UL,
  515. 0x936e1ff4UL, 0x3b26f703UL, 0x839a9066UL, 0x912f3f88UL, 0x299358edUL,
  516. 0xb4446054UL, 0x0cf80731UL, 0x1e4da8dfUL, 0xa6f1cfbaUL, 0xfe92dfecUL,
  517. 0x462eb889UL, 0x549b1767UL, 0xec277002UL, 0x71f048bbUL, 0xc94c2fdeUL,
  518. 0xdbf98030UL, 0x6345e755UL, 0x6b3fa09cUL, 0xd383c7f9UL, 0xc1366817UL,
  519. 0x798a0f72UL, 0xe45d37cbUL, 0x5ce150aeUL, 0x4e54ff40UL, 0xf6e89825UL,
  520. 0xae8b8873UL, 0x1637ef16UL, 0x048240f8UL, 0xbc3e279dUL, 0x21e91f24UL,
  521. 0x99557841UL, 0x8be0d7afUL, 0x335cb0caUL, 0xed59b63bUL, 0x55e5d15eUL,
  522. 0x47507eb0UL, 0xffec19d5UL, 0x623b216cUL, 0xda874609UL, 0xc832e9e7UL,
  523. 0x708e8e82UL, 0x28ed9ed4UL, 0x9051f9b1UL, 0x82e4565fUL, 0x3a58313aUL,
  524. 0xa78f0983UL, 0x1f336ee6UL, 0x0d86c108UL, 0xb53aa66dUL, 0xbd40e1a4UL,
  525. 0x05fc86c1UL, 0x1749292fUL, 0xaff54e4aUL, 0x322276f3UL, 0x8a9e1196UL,
  526. 0x982bbe78UL, 0x2097d91dUL, 0x78f4c94bUL, 0xc048ae2eUL, 0xd2fd01c0UL,
  527. 0x6a4166a5UL, 0xf7965e1cUL, 0x4f2a3979UL, 0x5d9f9697UL, 0xe523f1f2UL,
  528. 0x4d6b1905UL, 0xf5d77e60UL, 0xe762d18eUL, 0x5fdeb6ebUL, 0xc2098e52UL,
  529. 0x7ab5e937UL, 0x680046d9UL, 0xd0bc21bcUL, 0x88df31eaUL, 0x3063568fUL,
  530. 0x22d6f961UL, 0x9a6a9e04UL, 0x07bda6bdUL, 0xbf01c1d8UL, 0xadb46e36UL,
  531. 0x15080953UL, 0x1d724e9aUL, 0xa5ce29ffUL, 0xb77b8611UL, 0x0fc7e174UL,
  532. 0x9210d9cdUL, 0x2aacbea8UL, 0x38191146UL, 0x80a57623UL, 0xd8c66675UL,
  533. 0x607a0110UL, 0x72cfaefeUL, 0xca73c99bUL, 0x57a4f122UL, 0xef189647UL,
  534. 0xfdad39a9UL, 0x45115eccUL, 0x764dee06UL, 0xcef18963UL, 0xdc44268dUL,
  535. 0x64f841e8UL, 0xf92f7951UL, 0x41931e34UL, 0x5326b1daUL, 0xeb9ad6bfUL,
  536. 0xb3f9c6e9UL, 0x0b45a18cUL, 0x19f00e62UL, 0xa14c6907UL, 0x3c9b51beUL,
  537. 0x842736dbUL, 0x96929935UL, 0x2e2efe50UL, 0x2654b999UL, 0x9ee8defcUL,
  538. 0x8c5d7112UL, 0x34e11677UL, 0xa9362eceUL, 0x118a49abUL, 0x033fe645UL,
  539. 0xbb838120UL, 0xe3e09176UL, 0x5b5cf613UL, 0x49e959fdUL, 0xf1553e98UL,
  540. 0x6c820621UL, 0xd43e6144UL, 0xc68bceaaUL, 0x7e37a9cfUL, 0xd67f4138UL,
  541. 0x6ec3265dUL, 0x7c7689b3UL, 0xc4caeed6UL, 0x591dd66fUL, 0xe1a1b10aUL,
  542. 0xf3141ee4UL, 0x4ba87981UL, 0x13cb69d7UL, 0xab770eb2UL, 0xb9c2a15cUL,
  543. 0x017ec639UL, 0x9ca9fe80UL, 0x241599e5UL, 0x36a0360bUL, 0x8e1c516eUL,
  544. 0x866616a7UL, 0x3eda71c2UL, 0x2c6fde2cUL, 0x94d3b949UL, 0x090481f0UL,
  545. 0xb1b8e695UL, 0xa30d497bUL, 0x1bb12e1eUL, 0x43d23e48UL, 0xfb6e592dUL,
  546. 0xe9dbf6c3UL, 0x516791a6UL, 0xccb0a91fUL, 0x740cce7aUL, 0x66b96194UL,
  547. 0xde0506f1UL
  548. },
  549. {
  550. 0x00000000UL, 0x96300777UL, 0x2c610eeeUL, 0xba510999UL, 0x19c46d07UL,
  551. 0x8ff46a70UL, 0x35a563e9UL, 0xa395649eUL, 0x3288db0eUL, 0xa4b8dc79UL,
  552. 0x1ee9d5e0UL, 0x88d9d297UL, 0x2b4cb609UL, 0xbd7cb17eUL, 0x072db8e7UL,
  553. 0x911dbf90UL, 0x6410b71dUL, 0xf220b06aUL, 0x4871b9f3UL, 0xde41be84UL,
  554. 0x7dd4da1aUL, 0xebe4dd6dUL, 0x51b5d4f4UL, 0xc785d383UL, 0x56986c13UL,
  555. 0xc0a86b64UL, 0x7af962fdUL, 0xecc9658aUL, 0x4f5c0114UL, 0xd96c0663UL,
  556. 0x633d0ffaUL, 0xf50d088dUL, 0xc8206e3bUL, 0x5e10694cUL, 0xe44160d5UL,
  557. 0x727167a2UL, 0xd1e4033cUL, 0x47d4044bUL, 0xfd850dd2UL, 0x6bb50aa5UL,
  558. 0xfaa8b535UL, 0x6c98b242UL, 0xd6c9bbdbUL, 0x40f9bcacUL, 0xe36cd832UL,
  559. 0x755cdf45UL, 0xcf0dd6dcUL, 0x593dd1abUL, 0xac30d926UL, 0x3a00de51UL,
  560. 0x8051d7c8UL, 0x1661d0bfUL, 0xb5f4b421UL, 0x23c4b356UL, 0x9995bacfUL,
  561. 0x0fa5bdb8UL, 0x9eb80228UL, 0x0888055fUL, 0xb2d90cc6UL, 0x24e90bb1UL,
  562. 0x877c6f2fUL, 0x114c6858UL, 0xab1d61c1UL, 0x3d2d66b6UL, 0x9041dc76UL,
  563. 0x0671db01UL, 0xbc20d298UL, 0x2a10d5efUL, 0x8985b171UL, 0x1fb5b606UL,
  564. 0xa5e4bf9fUL, 0x33d4b8e8UL, 0xa2c90778UL, 0x34f9000fUL, 0x8ea80996UL,
  565. 0x18980ee1UL, 0xbb0d6a7fUL, 0x2d3d6d08UL, 0x976c6491UL, 0x015c63e6UL,
  566. 0xf4516b6bUL, 0x62616c1cUL, 0xd8306585UL, 0x4e0062f2UL, 0xed95066cUL,
  567. 0x7ba5011bUL, 0xc1f40882UL, 0x57c40ff5UL, 0xc6d9b065UL, 0x50e9b712UL,
  568. 0xeab8be8bUL, 0x7c88b9fcUL, 0xdf1ddd62UL, 0x492dda15UL, 0xf37cd38cUL,
  569. 0x654cd4fbUL, 0x5861b24dUL, 0xce51b53aUL, 0x7400bca3UL, 0xe230bbd4UL,
  570. 0x41a5df4aUL, 0xd795d83dUL, 0x6dc4d1a4UL, 0xfbf4d6d3UL, 0x6ae96943UL,
  571. 0xfcd96e34UL, 0x468867adUL, 0xd0b860daUL, 0x732d0444UL, 0xe51d0333UL,
  572. 0x5f4c0aaaUL, 0xc97c0dddUL, 0x3c710550UL, 0xaa410227UL, 0x10100bbeUL,
  573. 0x86200cc9UL, 0x25b56857UL, 0xb3856f20UL, 0x09d466b9UL, 0x9fe461ceUL,
  574. 0x0ef9de5eUL, 0x98c9d929UL, 0x2298d0b0UL, 0xb4a8d7c7UL, 0x173db359UL,
  575. 0x810db42eUL, 0x3b5cbdb7UL, 0xad6cbac0UL, 0x2083b8edUL, 0xb6b3bf9aUL,
  576. 0x0ce2b603UL, 0x9ad2b174UL, 0x3947d5eaUL, 0xaf77d29dUL, 0x1526db04UL,
  577. 0x8316dc73UL, 0x120b63e3UL, 0x843b6494UL, 0x3e6a6d0dUL, 0xa85a6a7aUL,
  578. 0x0bcf0ee4UL, 0x9dff0993UL, 0x27ae000aUL, 0xb19e077dUL, 0x44930ff0UL,
  579. 0xd2a30887UL, 0x68f2011eUL, 0xfec20669UL, 0x5d5762f7UL, 0xcb676580UL,
  580. 0x71366c19UL, 0xe7066b6eUL, 0x761bd4feUL, 0xe02bd389UL, 0x5a7ada10UL,
  581. 0xcc4add67UL, 0x6fdfb9f9UL, 0xf9efbe8eUL, 0x43beb717UL, 0xd58eb060UL,
  582. 0xe8a3d6d6UL, 0x7e93d1a1UL, 0xc4c2d838UL, 0x52f2df4fUL, 0xf167bbd1UL,
  583. 0x6757bca6UL, 0xdd06b53fUL, 0x4b36b248UL, 0xda2b0dd8UL, 0x4c1b0aafUL,
  584. 0xf64a0336UL, 0x607a0441UL, 0xc3ef60dfUL, 0x55df67a8UL, 0xef8e6e31UL,
  585. 0x79be6946UL, 0x8cb361cbUL, 0x1a8366bcUL, 0xa0d26f25UL, 0x36e26852UL,
  586. 0x95770cccUL, 0x03470bbbUL, 0xb9160222UL, 0x2f260555UL, 0xbe3bbac5UL,
  587. 0x280bbdb2UL, 0x925ab42bUL, 0x046ab35cUL, 0xa7ffd7c2UL, 0x31cfd0b5UL,
  588. 0x8b9ed92cUL, 0x1daede5bUL, 0xb0c2649bUL, 0x26f263ecUL, 0x9ca36a75UL,
  589. 0x0a936d02UL, 0xa906099cUL, 0x3f360eebUL, 0x85670772UL, 0x13570005UL,
  590. 0x824abf95UL, 0x147ab8e2UL, 0xae2bb17bUL, 0x381bb60cUL, 0x9b8ed292UL,
  591. 0x0dbed5e5UL, 0xb7efdc7cUL, 0x21dfdb0bUL, 0xd4d2d386UL, 0x42e2d4f1UL,
  592. 0xf8b3dd68UL, 0x6e83da1fUL, 0xcd16be81UL, 0x5b26b9f6UL, 0xe177b06fUL,
  593. 0x7747b718UL, 0xe65a0888UL, 0x706a0fffUL, 0xca3b0666UL, 0x5c0b0111UL,
  594. 0xff9e658fUL, 0x69ae62f8UL, 0xd3ff6b61UL, 0x45cf6c16UL, 0x78e20aa0UL,
  595. 0xeed20dd7UL, 0x5483044eUL, 0xc2b30339UL, 0x612667a7UL, 0xf71660d0UL,
  596. 0x4d476949UL, 0xdb776e3eUL, 0x4a6ad1aeUL, 0xdc5ad6d9UL, 0x660bdf40UL,
  597. 0xf03bd837UL, 0x53aebca9UL, 0xc59ebbdeUL, 0x7fcfb247UL, 0xe9ffb530UL,
  598. 0x1cf2bdbdUL, 0x8ac2bacaUL, 0x3093b353UL, 0xa6a3b424UL, 0x0536d0baUL,
  599. 0x9306d7cdUL, 0x2957de54UL, 0xbf67d923UL, 0x2e7a66b3UL, 0xb84a61c4UL,
  600. 0x021b685dUL, 0x942b6f2aUL, 0x37be0bb4UL, 0xa18e0cc3UL, 0x1bdf055aUL,
  601. 0x8def022dUL
  602. },
  603. {
  604. 0x00000000UL, 0x41311b19UL, 0x82623632UL, 0xc3532d2bUL, 0x04c56c64UL,
  605. 0x45f4777dUL, 0x86a75a56UL, 0xc796414fUL, 0x088ad9c8UL, 0x49bbc2d1UL,
  606. 0x8ae8effaUL, 0xcbd9f4e3UL, 0x0c4fb5acUL, 0x4d7eaeb5UL, 0x8e2d839eUL,
  607. 0xcf1c9887UL, 0x5112c24aUL, 0x1023d953UL, 0xd370f478UL, 0x9241ef61UL,
  608. 0x55d7ae2eUL, 0x14e6b537UL, 0xd7b5981cUL, 0x96848305UL, 0x59981b82UL,
  609. 0x18a9009bUL, 0xdbfa2db0UL, 0x9acb36a9UL, 0x5d5d77e6UL, 0x1c6c6cffUL,
  610. 0xdf3f41d4UL, 0x9e0e5acdUL, 0xa2248495UL, 0xe3159f8cUL, 0x2046b2a7UL,
  611. 0x6177a9beUL, 0xa6e1e8f1UL, 0xe7d0f3e8UL, 0x2483dec3UL, 0x65b2c5daUL,
  612. 0xaaae5d5dUL, 0xeb9f4644UL, 0x28cc6b6fUL, 0x69fd7076UL, 0xae6b3139UL,
  613. 0xef5a2a20UL, 0x2c09070bUL, 0x6d381c12UL, 0xf33646dfUL, 0xb2075dc6UL,
  614. 0x715470edUL, 0x30656bf4UL, 0xf7f32abbUL, 0xb6c231a2UL, 0x75911c89UL,
  615. 0x34a00790UL, 0xfbbc9f17UL, 0xba8d840eUL, 0x79dea925UL, 0x38efb23cUL,
  616. 0xff79f373UL, 0xbe48e86aUL, 0x7d1bc541UL, 0x3c2ade58UL, 0x054f79f0UL,
  617. 0x447e62e9UL, 0x872d4fc2UL, 0xc61c54dbUL, 0x018a1594UL, 0x40bb0e8dUL,
  618. 0x83e823a6UL, 0xc2d938bfUL, 0x0dc5a038UL, 0x4cf4bb21UL, 0x8fa7960aUL,
  619. 0xce968d13UL, 0x0900cc5cUL, 0x4831d745UL, 0x8b62fa6eUL, 0xca53e177UL,
  620. 0x545dbbbaUL, 0x156ca0a3UL, 0xd63f8d88UL, 0x970e9691UL, 0x5098d7deUL,
  621. 0x11a9ccc7UL, 0xd2fae1ecUL, 0x93cbfaf5UL, 0x5cd76272UL, 0x1de6796bUL,
  622. 0xdeb55440UL, 0x9f844f59UL, 0x58120e16UL, 0x1923150fUL, 0xda703824UL,
  623. 0x9b41233dUL, 0xa76bfd65UL, 0xe65ae67cUL, 0x2509cb57UL, 0x6438d04eUL,
  624. 0xa3ae9101UL, 0xe29f8a18UL, 0x21cca733UL, 0x60fdbc2aUL, 0xafe124adUL,
  625. 0xeed03fb4UL, 0x2d83129fUL, 0x6cb20986UL, 0xab2448c9UL, 0xea1553d0UL,
  626. 0x29467efbUL, 0x687765e2UL, 0xf6793f2fUL, 0xb7482436UL, 0x741b091dUL,
  627. 0x352a1204UL, 0xf2bc534bUL, 0xb38d4852UL, 0x70de6579UL, 0x31ef7e60UL,
  628. 0xfef3e6e7UL, 0xbfc2fdfeUL, 0x7c91d0d5UL, 0x3da0cbccUL, 0xfa368a83UL,
  629. 0xbb07919aUL, 0x7854bcb1UL, 0x3965a7a8UL, 0x4b98833bUL, 0x0aa99822UL,
  630. 0xc9fab509UL, 0x88cbae10UL, 0x4f5def5fUL, 0x0e6cf446UL, 0xcd3fd96dUL,
  631. 0x8c0ec274UL, 0x43125af3UL, 0x022341eaUL, 0xc1706cc1UL, 0x804177d8UL,
  632. 0x47d73697UL, 0x06e62d8eUL, 0xc5b500a5UL, 0x84841bbcUL, 0x1a8a4171UL,
  633. 0x5bbb5a68UL, 0x98e87743UL, 0xd9d96c5aUL, 0x1e4f2d15UL, 0x5f7e360cUL,
  634. 0x9c2d1b27UL, 0xdd1c003eUL, 0x120098b9UL, 0x533183a0UL, 0x9062ae8bUL,
  635. 0xd153b592UL, 0x16c5f4ddUL, 0x57f4efc4UL, 0x94a7c2efUL, 0xd596d9f6UL,
  636. 0xe9bc07aeUL, 0xa88d1cb7UL, 0x6bde319cUL, 0x2aef2a85UL, 0xed796bcaUL,
  637. 0xac4870d3UL, 0x6f1b5df8UL, 0x2e2a46e1UL, 0xe136de66UL, 0xa007c57fUL,
  638. 0x6354e854UL, 0x2265f34dUL, 0xe5f3b202UL, 0xa4c2a91bUL, 0x67918430UL,
  639. 0x26a09f29UL, 0xb8aec5e4UL, 0xf99fdefdUL, 0x3accf3d6UL, 0x7bfde8cfUL,
  640. 0xbc6ba980UL, 0xfd5ab299UL, 0x3e099fb2UL, 0x7f3884abUL, 0xb0241c2cUL,
  641. 0xf1150735UL, 0x32462a1eUL, 0x73773107UL, 0xb4e17048UL, 0xf5d06b51UL,
  642. 0x3683467aUL, 0x77b25d63UL, 0x4ed7facbUL, 0x0fe6e1d2UL, 0xccb5ccf9UL,
  643. 0x8d84d7e0UL, 0x4a1296afUL, 0x0b238db6UL, 0xc870a09dUL, 0x8941bb84UL,
  644. 0x465d2303UL, 0x076c381aUL, 0xc43f1531UL, 0x850e0e28UL, 0x42984f67UL,
  645. 0x03a9547eUL, 0xc0fa7955UL, 0x81cb624cUL, 0x1fc53881UL, 0x5ef42398UL,
  646. 0x9da70eb3UL, 0xdc9615aaUL, 0x1b0054e5UL, 0x5a314ffcUL, 0x996262d7UL,
  647. 0xd85379ceUL, 0x174fe149UL, 0x567efa50UL, 0x952dd77bUL, 0xd41ccc62UL,
  648. 0x138a8d2dUL, 0x52bb9634UL, 0x91e8bb1fUL, 0xd0d9a006UL, 0xecf37e5eUL,
  649. 0xadc26547UL, 0x6e91486cUL, 0x2fa05375UL, 0xe836123aUL, 0xa9070923UL,
  650. 0x6a542408UL, 0x2b653f11UL, 0xe479a796UL, 0xa548bc8fUL, 0x661b91a4UL,
  651. 0x272a8abdUL, 0xe0bccbf2UL, 0xa18dd0ebUL, 0x62defdc0UL, 0x23efe6d9UL,
  652. 0xbde1bc14UL, 0xfcd0a70dUL, 0x3f838a26UL, 0x7eb2913fUL, 0xb924d070UL,
  653. 0xf815cb69UL, 0x3b46e642UL, 0x7a77fd5bUL, 0xb56b65dcUL, 0xf45a7ec5UL,
  654. 0x370953eeUL, 0x763848f7UL, 0xb1ae09b8UL, 0xf09f12a1UL, 0x33cc3f8aUL,
  655. 0x72fd2493UL
  656. },
  657. {
  658. 0x00000000UL, 0x376ac201UL, 0x6ed48403UL, 0x59be4602UL, 0xdca80907UL,
  659. 0xebc2cb06UL, 0xb27c8d04UL, 0x85164f05UL, 0xb851130eUL, 0x8f3bd10fUL,
  660. 0xd685970dUL, 0xe1ef550cUL, 0x64f91a09UL, 0x5393d808UL, 0x0a2d9e0aUL,
  661. 0x3d475c0bUL, 0x70a3261cUL, 0x47c9e41dUL, 0x1e77a21fUL, 0x291d601eUL,
  662. 0xac0b2f1bUL, 0x9b61ed1aUL, 0xc2dfab18UL, 0xf5b56919UL, 0xc8f23512UL,
  663. 0xff98f713UL, 0xa626b111UL, 0x914c7310UL, 0x145a3c15UL, 0x2330fe14UL,
  664. 0x7a8eb816UL, 0x4de47a17UL, 0xe0464d38UL, 0xd72c8f39UL, 0x8e92c93bUL,
  665. 0xb9f80b3aUL, 0x3cee443fUL, 0x0b84863eUL, 0x523ac03cUL, 0x6550023dUL,
  666. 0x58175e36UL, 0x6f7d9c37UL, 0x36c3da35UL, 0x01a91834UL, 0x84bf5731UL,
  667. 0xb3d59530UL, 0xea6bd332UL, 0xdd011133UL, 0x90e56b24UL, 0xa78fa925UL,
  668. 0xfe31ef27UL, 0xc95b2d26UL, 0x4c4d6223UL, 0x7b27a022UL, 0x2299e620UL,
  669. 0x15f32421UL, 0x28b4782aUL, 0x1fdeba2bUL, 0x4660fc29UL, 0x710a3e28UL,
  670. 0xf41c712dUL, 0xc376b32cUL, 0x9ac8f52eUL, 0xada2372fUL, 0xc08d9a70UL,
  671. 0xf7e75871UL, 0xae591e73UL, 0x9933dc72UL, 0x1c259377UL, 0x2b4f5176UL,
  672. 0x72f11774UL, 0x459bd575UL, 0x78dc897eUL, 0x4fb64b7fUL, 0x16080d7dUL,
  673. 0x2162cf7cUL, 0xa4748079UL, 0x931e4278UL, 0xcaa0047aUL, 0xfdcac67bUL,
  674. 0xb02ebc6cUL, 0x87447e6dUL, 0xdefa386fUL, 0xe990fa6eUL, 0x6c86b56bUL,
  675. 0x5bec776aUL, 0x02523168UL, 0x3538f369UL, 0x087faf62UL, 0x3f156d63UL,
  676. 0x66ab2b61UL, 0x51c1e960UL, 0xd4d7a665UL, 0xe3bd6464UL, 0xba032266UL,
  677. 0x8d69e067UL, 0x20cbd748UL, 0x17a11549UL, 0x4e1f534bUL, 0x7975914aUL,
  678. 0xfc63de4fUL, 0xcb091c4eUL, 0x92b75a4cUL, 0xa5dd984dUL, 0x989ac446UL,
  679. 0xaff00647UL, 0xf64e4045UL, 0xc1248244UL, 0x4432cd41UL, 0x73580f40UL,
  680. 0x2ae64942UL, 0x1d8c8b43UL, 0x5068f154UL, 0x67023355UL, 0x3ebc7557UL,
  681. 0x09d6b756UL, 0x8cc0f853UL, 0xbbaa3a52UL, 0xe2147c50UL, 0xd57ebe51UL,
  682. 0xe839e25aUL, 0xdf53205bUL, 0x86ed6659UL, 0xb187a458UL, 0x3491eb5dUL,
  683. 0x03fb295cUL, 0x5a456f5eUL, 0x6d2fad5fUL, 0x801b35e1UL, 0xb771f7e0UL,
  684. 0xeecfb1e2UL, 0xd9a573e3UL, 0x5cb33ce6UL, 0x6bd9fee7UL, 0x3267b8e5UL,
  685. 0x050d7ae4UL, 0x384a26efUL, 0x0f20e4eeUL, 0x569ea2ecUL, 0x61f460edUL,
  686. 0xe4e22fe8UL, 0xd388ede9UL, 0x8a36abebUL, 0xbd5c69eaUL, 0xf0b813fdUL,
  687. 0xc7d2d1fcUL, 0x9e6c97feUL, 0xa90655ffUL, 0x2c101afaUL, 0x1b7ad8fbUL,
  688. 0x42c49ef9UL, 0x75ae5cf8UL, 0x48e900f3UL, 0x7f83c2f2UL, 0x263d84f0UL,
  689. 0x115746f1UL, 0x944109f4UL, 0xa32bcbf5UL, 0xfa958df7UL, 0xcdff4ff6UL,
  690. 0x605d78d9UL, 0x5737bad8UL, 0x0e89fcdaUL, 0x39e33edbUL, 0xbcf571deUL,
  691. 0x8b9fb3dfUL, 0xd221f5ddUL, 0xe54b37dcUL, 0xd80c6bd7UL, 0xef66a9d6UL,
  692. 0xb6d8efd4UL, 0x81b22dd5UL, 0x04a462d0UL, 0x33cea0d1UL, 0x6a70e6d3UL,
  693. 0x5d1a24d2UL, 0x10fe5ec5UL, 0x27949cc4UL, 0x7e2adac6UL, 0x494018c7UL,
  694. 0xcc5657c2UL, 0xfb3c95c3UL, 0xa282d3c1UL, 0x95e811c0UL, 0xa8af4dcbUL,
  695. 0x9fc58fcaUL, 0xc67bc9c8UL, 0xf1110bc9UL, 0x740744ccUL, 0x436d86cdUL,
  696. 0x1ad3c0cfUL, 0x2db902ceUL, 0x4096af91UL, 0x77fc6d90UL, 0x2e422b92UL,
  697. 0x1928e993UL, 0x9c3ea696UL, 0xab546497UL, 0xf2ea2295UL, 0xc580e094UL,
  698. 0xf8c7bc9fUL, 0xcfad7e9eUL, 0x9613389cUL, 0xa179fa9dUL, 0x246fb598UL,
  699. 0x13057799UL, 0x4abb319bUL, 0x7dd1f39aUL, 0x3035898dUL, 0x075f4b8cUL,
  700. 0x5ee10d8eUL, 0x698bcf8fUL, 0xec9d808aUL, 0xdbf7428bUL, 0x82490489UL,
  701. 0xb523c688UL, 0x88649a83UL, 0xbf0e5882UL, 0xe6b01e80UL, 0xd1dadc81UL,
  702. 0x54cc9384UL, 0x63a65185UL, 0x3a181787UL, 0x0d72d586UL, 0xa0d0e2a9UL,
  703. 0x97ba20a8UL, 0xce0466aaUL, 0xf96ea4abUL, 0x7c78ebaeUL, 0x4b1229afUL,
  704. 0x12ac6fadUL, 0x25c6adacUL, 0x1881f1a7UL, 0x2feb33a6UL, 0x765575a4UL,
  705. 0x413fb7a5UL, 0xc429f8a0UL, 0xf3433aa1UL, 0xaafd7ca3UL, 0x9d97bea2UL,
  706. 0xd073c4b5UL, 0xe71906b4UL, 0xbea740b6UL, 0x89cd82b7UL, 0x0cdbcdb2UL,
  707. 0x3bb10fb3UL, 0x620f49b1UL, 0x55658bb0UL, 0x6822d7bbUL, 0x5f4815baUL,
  708. 0x06f653b8UL, 0x319c91b9UL, 0xb48adebcUL, 0x83e01cbdUL, 0xda5e5abfUL,
  709. 0xed3498beUL
  710. },
  711. {
  712. 0x00000000UL, 0x6567bcb8UL, 0x8bc809aaUL, 0xeeafb512UL, 0x5797628fUL,
  713. 0x32f0de37UL, 0xdc5f6b25UL, 0xb938d79dUL, 0xef28b4c5UL, 0x8a4f087dUL,
  714. 0x64e0bd6fUL, 0x018701d7UL, 0xb8bfd64aUL, 0xddd86af2UL, 0x3377dfe0UL,
  715. 0x56106358UL, 0x9f571950UL, 0xfa30a5e8UL, 0x149f10faUL, 0x71f8ac42UL,
  716. 0xc8c07bdfUL, 0xada7c767UL, 0x43087275UL, 0x266fcecdUL, 0x707fad95UL,
  717. 0x1518112dUL, 0xfbb7a43fUL, 0x9ed01887UL, 0x27e8cf1aUL, 0x428f73a2UL,
  718. 0xac20c6b0UL, 0xc9477a08UL, 0x3eaf32a0UL, 0x5bc88e18UL, 0xb5673b0aUL,
  719. 0xd00087b2UL, 0x6938502fUL, 0x0c5fec97UL, 0xe2f05985UL, 0x8797e53dUL,
  720. 0xd1878665UL, 0xb4e03addUL, 0x5a4f8fcfUL, 0x3f283377UL, 0x8610e4eaUL,
  721. 0xe3775852UL, 0x0dd8ed40UL, 0x68bf51f8UL, 0xa1f82bf0UL, 0xc49f9748UL,
  722. 0x2a30225aUL, 0x4f579ee2UL, 0xf66f497fUL, 0x9308f5c7UL, 0x7da740d5UL,
  723. 0x18c0fc6dUL, 0x4ed09f35UL, 0x2bb7238dUL, 0xc518969fUL, 0xa07f2a27UL,
  724. 0x1947fdbaUL, 0x7c204102UL, 0x928ff410UL, 0xf7e848a8UL, 0x3d58149bUL,
  725. 0x583fa823UL, 0xb6901d31UL, 0xd3f7a189UL, 0x6acf7614UL, 0x0fa8caacUL,
  726. 0xe1077fbeUL, 0x8460c306UL, 0xd270a05eUL, 0xb7171ce6UL, 0x59b8a9f4UL,
  727. 0x3cdf154cUL, 0x85e7c2d1UL, 0xe0807e69UL, 0x0e2fcb7bUL, 0x6b4877c3UL,
  728. 0xa20f0dcbUL, 0xc768b173UL, 0x29c70461UL, 0x4ca0b8d9UL, 0xf5986f44UL,
  729. 0x90ffd3fcUL, 0x7e5066eeUL, 0x1b37da56UL, 0x4d27b90eUL, 0x284005b6UL,
  730. 0xc6efb0a4UL, 0xa3880c1cUL, 0x1ab0db81UL, 0x7fd76739UL, 0x9178d22bUL,
  731. 0xf41f6e93UL, 0x03f7263bUL, 0x66909a83UL, 0x883f2f91UL, 0xed589329UL,
  732. 0x546044b4UL, 0x3107f80cUL, 0xdfa84d1eUL, 0xbacff1a6UL, 0xecdf92feUL,
  733. 0x89b82e46UL, 0x67179b54UL, 0x027027ecUL, 0xbb48f071UL, 0xde2f4cc9UL,
  734. 0x3080f9dbUL, 0x55e74563UL, 0x9ca03f6bUL, 0xf9c783d3UL, 0x176836c1UL,
  735. 0x720f8a79UL, 0xcb375de4UL, 0xae50e15cUL, 0x40ff544eUL, 0x2598e8f6UL,
  736. 0x73888baeUL, 0x16ef3716UL, 0xf8408204UL, 0x9d273ebcUL, 0x241fe921UL,
  737. 0x41785599UL, 0xafd7e08bUL, 0xcab05c33UL, 0x3bb659edUL, 0x5ed1e555UL,
  738. 0xb07e5047UL, 0xd519ecffUL, 0x6c213b62UL, 0x094687daUL, 0xe7e932c8UL,
  739. 0x828e8e70UL, 0xd49eed28UL, 0xb1f95190UL, 0x5f56e482UL, 0x3a31583aUL,
  740. 0x83098fa7UL, 0xe66e331fUL, 0x08c1860dUL, 0x6da63ab5UL, 0xa4e140bdUL,
  741. 0xc186fc05UL, 0x2f294917UL, 0x4a4ef5afUL, 0xf3762232UL, 0x96119e8aUL,
  742. 0x78be2b98UL, 0x1dd99720UL, 0x4bc9f478UL, 0x2eae48c0UL, 0xc001fdd2UL,
  743. 0xa566416aUL, 0x1c5e96f7UL, 0x79392a4fUL, 0x97969f5dUL, 0xf2f123e5UL,
  744. 0x05196b4dUL, 0x607ed7f5UL, 0x8ed162e7UL, 0xebb6de5fUL, 0x528e09c2UL,
  745. 0x37e9b57aUL, 0xd9460068UL, 0xbc21bcd0UL, 0xea31df88UL, 0x8f566330UL,
  746. 0x61f9d622UL, 0x049e6a9aUL, 0xbda6bd07UL, 0xd8c101bfUL, 0x366eb4adUL,
  747. 0x53090815UL, 0x9a4e721dUL, 0xff29cea5UL, 0x11867bb7UL, 0x74e1c70fUL,
  748. 0xcdd91092UL, 0xa8beac2aUL, 0x46111938UL, 0x2376a580UL, 0x7566c6d8UL,
  749. 0x10017a60UL, 0xfeaecf72UL, 0x9bc973caUL, 0x22f1a457UL, 0x479618efUL,
  750. 0xa939adfdUL, 0xcc5e1145UL, 0x06ee4d76UL, 0x6389f1ceUL, 0x8d2644dcUL,
  751. 0xe841f864UL, 0x51792ff9UL, 0x341e9341UL, 0xdab12653UL, 0xbfd69aebUL,
  752. 0xe9c6f9b3UL, 0x8ca1450bUL, 0x620ef019UL, 0x07694ca1UL, 0xbe519b3cUL,
  753. 0xdb362784UL, 0x35999296UL, 0x50fe2e2eUL, 0x99b95426UL, 0xfcdee89eUL,
  754. 0x12715d8cUL, 0x7716e134UL, 0xce2e36a9UL, 0xab498a11UL, 0x45e63f03UL,
  755. 0x208183bbUL, 0x7691e0e3UL, 0x13f65c5bUL, 0xfd59e949UL, 0x983e55f1UL,
  756. 0x2106826cUL, 0x44613ed4UL, 0xaace8bc6UL, 0xcfa9377eUL, 0x38417fd6UL,
  757. 0x5d26c36eUL, 0xb389767cUL, 0xd6eecac4UL, 0x6fd61d59UL, 0x0ab1a1e1UL,
  758. 0xe41e14f3UL, 0x8179a84bUL, 0xd769cb13UL, 0xb20e77abUL, 0x5ca1c2b9UL,
  759. 0x39c67e01UL, 0x80fea99cUL, 0xe5991524UL, 0x0b36a036UL, 0x6e511c8eUL,
  760. 0xa7166686UL, 0xc271da3eUL, 0x2cde6f2cUL, 0x49b9d394UL, 0xf0810409UL,
  761. 0x95e6b8b1UL, 0x7b490da3UL, 0x1e2eb11bUL, 0x483ed243UL, 0x2d596efbUL,
  762. 0xc3f6dbe9UL, 0xa6916751UL, 0x1fa9b0ccUL, 0x7ace0c74UL, 0x9461b966UL,
  763. 0xf10605deUL
  764. #endif
  765. }
  766. };
  767. #endif /* DYNAMIC_CRC_TABLE */
  768. /* =========================================================================
  769. * This function can be used by asm versions of crc32()
  770. */
  771. const z_crc_t FAR * ZEXPORT get_crc_table()
  772. {
  773. #ifdef DYNAMIC_CRC_TABLE
  774. if (crc_table_empty)
  775. make_crc_table();
  776. #endif /* DYNAMIC_CRC_TABLE */
  777. return (const z_crc_t FAR *)crc_table;
  778. }
  779. /* ========================================================================= */
  780. #undef DO1
  781. #define DO1 crc = crc_table[0][((int)crc ^ (*buf++)) & 0xff] ^ (crc >> 8)
  782. #undef DO8
  783. #define DO8 DO1; DO1; DO1; DO1; DO1; DO1; DO1; DO1
  784. /* ========================================================================= */
  785. unsigned long ZEXPORT crc32_z(crc, buf, len)
  786. unsigned long crc;
  787. const unsigned char FAR *buf;
  788. z_size_t len;
  789. {
  790. if (buf == Z_NULL) return 0UL;
  791. #ifdef DYNAMIC_CRC_TABLE
  792. if (crc_table_empty)
  793. make_crc_table();
  794. #endif /* DYNAMIC_CRC_TABLE */
  795. #ifdef BYFOUR
  796. if (sizeof(void *) == sizeof(ptrdiff_t)) {
  797. z_crc_t endian;
  798. endian = 1;
  799. if (*((unsigned char *)(&endian)))
  800. return crc32_little(crc, buf, len);
  801. else
  802. return crc32_big(crc, buf, len);
  803. }
  804. #endif /* BYFOUR */
  805. crc = crc ^ 0xffffffffUL;
  806. while (len >= 8) {
  807. DO8;
  808. len -= 8;
  809. }
  810. if (len) do {
  811. DO1;
  812. } while (--len);
  813. return crc ^ 0xffffffffUL;
  814. }
  815. /* ========================================================================= */
  816. unsigned long ZEXPORT crc32(crc, buf, len)
  817. unsigned long crc;
  818. const unsigned char FAR *buf;
  819. uInt len;
  820. {
  821. return crc32_z(crc, buf, len);
  822. }
  823. #ifdef BYFOUR
  824. /*
  825. This BYFOUR code accesses the passed unsigned char * buffer with a 32-bit
  826. integer pointer type. This violates the strict aliasing rule, where a
  827. compiler can assume, for optimization purposes, that two pointers to
  828. fundamentally different types won't ever point to the same memory. This can
  829. manifest as a problem only if one of the pointers is written to. This code
  830. only reads from those pointers. So long as this code remains isolated in
  831. this compilation unit, there won't be a problem. For this reason, this code
  832. should not be copied and pasted into a compilation unit in which other code
  833. writes to the buffer that is passed to these routines.
  834. */
  835. /* ========================================================================= */
  836. #define DOLIT4 c ^= *buf4++; \
  837. c = crc_table[3][c & 0xff] ^ crc_table[2][(c >> 8) & 0xff] ^ \
  838. crc_table[1][(c >> 16) & 0xff] ^ crc_table[0][c >> 24]
  839. #define DOLIT32 DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4
  840. /* ========================================================================= */
  841. local unsigned long crc32_little(crc, buf, len)
  842. unsigned long crc;
  843. const unsigned char FAR *buf;
  844. z_size_t len;
  845. {
  846. register z_crc_t c;
  847. register const z_crc_t FAR *buf4;
  848. c = (z_crc_t)crc;
  849. c = ~c;
  850. while (len && ((ptrdiff_t)buf & 3)) {
  851. c = crc_table[0][(c ^ *buf++) & 0xff] ^ (c >> 8);
  852. len--;
  853. }
  854. buf4 = (const z_crc_t FAR *)(const void FAR *)buf;
  855. while (len >= 32) {
  856. DOLIT32;
  857. len -= 32;
  858. }
  859. while (len >= 4) {
  860. DOLIT4;
  861. len -= 4;
  862. }
  863. buf = (const unsigned char FAR *)buf4;
  864. if (len) do {
  865. c = crc_table[0][(c ^ *buf++) & 0xff] ^ (c >> 8);
  866. } while (--len);
  867. c = ~c;
  868. return (unsigned long)c;
  869. }
  870. /* ========================================================================= */
  871. #define DOBIG4 c ^= *buf4++; \
  872. c = crc_table[4][c & 0xff] ^ crc_table[5][(c >> 8) & 0xff] ^ \
  873. crc_table[6][(c >> 16) & 0xff] ^ crc_table[7][c >> 24]
  874. #define DOBIG32 DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4
  875. /* ========================================================================= */
  876. local unsigned long crc32_big(crc, buf, len)
  877. unsigned long crc;
  878. const unsigned char FAR *buf;
  879. z_size_t len;
  880. {
  881. register z_crc_t c;
  882. register const z_crc_t FAR *buf4;
  883. c = ZSWAP32((z_crc_t)crc);
  884. c = ~c;
  885. while (len && ((ptrdiff_t)buf & 3)) {
  886. c = crc_table[4][(c >> 24) ^ *buf++] ^ (c << 8);
  887. len--;
  888. }
  889. buf4 = (const z_crc_t FAR *)(const void FAR *)buf;
  890. while (len >= 32) {
  891. DOBIG32;
  892. len -= 32;
  893. }
  894. while (len >= 4) {
  895. DOBIG4;
  896. len -= 4;
  897. }
  898. buf = (const unsigned char FAR *)buf4;
  899. if (len) do {
  900. c = crc_table[4][(c >> 24) ^ *buf++] ^ (c << 8);
  901. } while (--len);
  902. c = ~c;
  903. return (unsigned long)(ZSWAP32(c));
  904. }
  905. #endif /* BYFOUR */
  906. #define GF2_DIM 32 /* dimension of GF(2) vectors (length of CRC) */
  907. /* ========================================================================= */
  908. local unsigned long gf2_matrix_times(mat, vec)
  909. unsigned long *mat;
  910. unsigned long vec;
  911. {
  912. unsigned long sum;
  913. sum = 0;
  914. while (vec) {
  915. if (vec & 1)
  916. sum ^= *mat;
  917. vec >>= 1;
  918. mat++;
  919. }
  920. return sum;
  921. }
  922. /* ========================================================================= */
  923. local void gf2_matrix_square(square, mat)
  924. unsigned long *square;
  925. unsigned long *mat;
  926. {
  927. int n;
  928. for (n = 0; n < GF2_DIM; n++)
  929. square[n] = gf2_matrix_times(mat, mat[n]);
  930. }
  931. /* ========================================================================= */
  932. local uLong crc32_combine_(crc1, crc2, len2)
  933. uLong crc1;
  934. uLong crc2;
  935. z_off64_t len2;
  936. {
  937. int n;
  938. unsigned long row;
  939. unsigned long even[GF2_DIM]; /* even-power-of-two zeros operator */
  940. unsigned long odd[GF2_DIM]; /* odd-power-of-two zeros operator */
  941. /* degenerate case (also disallow negative lengths) */
  942. if (len2 <= 0)
  943. return crc1;
  944. /* put operator for one zero bit in odd */
  945. odd[0] = 0xedb88320UL; /* CRC-32 polynomial */
  946. row = 1;
  947. for (n = 1; n < GF2_DIM; n++) {
  948. odd[n] = row;
  949. row <<= 1;
  950. }
  951. /* put operator for two zero bits in even */
  952. gf2_matrix_square(even, odd);
  953. /* put operator for four zero bits in odd */
  954. gf2_matrix_square(odd, even);
  955. /* apply len2 zeros to crc1 (first square will put the operator for one
  956. zero byte, eight zero bits, in even) */
  957. do {
  958. /* apply zeros operator for this bit of len2 */
  959. gf2_matrix_square(even, odd);
  960. if (len2 & 1)
  961. crc1 = gf2_matrix_times(even, crc1);
  962. len2 >>= 1;
  963. /* if no more bits set, then done */
  964. if (len2 == 0)
  965. break;
  966. /* another iteration of the loop with odd and even swapped */
  967. gf2_matrix_square(odd, even);
  968. if (len2 & 1)
  969. crc1 = gf2_matrix_times(odd, crc1);
  970. len2 >>= 1;
  971. /* if no more bits set, then done */
  972. } while (len2 != 0);
  973. /* return combined crc */
  974. crc1 ^= crc2;
  975. return crc1;
  976. }
  977. /* ========================================================================= */
  978. uLong ZEXPORT crc32_combine(crc1, crc2, len2)
  979. uLong crc1;
  980. uLong crc2;
  981. z_off_t len2;
  982. {
  983. return crc32_combine_(crc1, crc2, len2);
  984. }
  985. uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
  986. uLong crc1;
  987. uLong crc2;
  988. z_off64_t len2;
  989. {
  990. return crc32_combine_(crc1, crc2, len2);
  991. }
  992. /* deflate.c -- compress data using the deflation algorithm
  993. * Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
  994. * For conditions of distribution and use, see copyright notice in zlib.h
  995. */
  996. /*
  997. * ALGORITHM
  998. *
  999. * The "deflation" process depends on being able to identify portions
  1000. * of the input text which are identical to earlier input (within a
  1001. * sliding window trailing behind the input currently being processed).
  1002. *
  1003. * The most straightforward technique turns out to be the fastest for
  1004. * most input files: try all possible matches and select the longest.
  1005. * The key feature of this algorithm is that insertions into the string
  1006. * dictionary are very simple and thus fast, and deletions are avoided
  1007. * completely. Insertions are performed at each input character, whereas
  1008. * string matches are performed only when the previous match ends. So it
  1009. * is preferable to spend more time in matches to allow very fast string
  1010. * insertions and avoid deletions. The matching algorithm for small
  1011. * strings is inspired from that of Rabin & Karp. A brute force approach
  1012. * is used to find longer strings when a small match has been found.
  1013. * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
  1014. * (by Leonid Broukhis).
  1015. * A previous version of this file used a more sophisticated algorithm
  1016. * (by Fiala and Greene) which is guaranteed to run in linear amortized
  1017. * time, but has a larger average cost, uses more memory and is patented.
  1018. * However the F&G algorithm may be faster for some highly redundant
  1019. * files if the parameter max_chain_length (described below) is too large.
  1020. *
  1021. * ACKNOWLEDGEMENTS
  1022. *
  1023. * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
  1024. * I found it in 'freeze' written by Leonid Broukhis.
  1025. * Thanks to many people for bug reports and testing.
  1026. *
  1027. * REFERENCES
  1028. *
  1029. * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
  1030. * Available in http://tools.ietf.org/html/rfc1951
  1031. *
  1032. * A description of the Rabin and Karp algorithm is given in the book
  1033. * "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
  1034. *
  1035. * Fiala,E.R., and Greene,D.H.
  1036. * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
  1037. *
  1038. */
  1039. /* @(#) $Id$ */
  1040. const char deflate_copyright[] =
  1041. " deflate 1.2.11 Copyright 1995-2017 Jean-loup Gailly and Mark Adler ";
  1042. /*
  1043. If you use the zlib library in a product, an acknowledgment is welcome
  1044. in the documentation of your product. If for some reason you cannot
  1045. include such an acknowledgment, I would appreciate that you keep this
  1046. copyright string in the executable of your product.
  1047. */
  1048. /* ===========================================================================
  1049. * Function prototypes.
  1050. */
  1051. typedef enum {
  1052. need_more, /* block not completed, need more input or more output */
  1053. block_done, /* block flush performed */
  1054. finish_started, /* finish started, need only more output at next deflate */
  1055. finish_done /* finish done, accept no more input or output */
  1056. } block_state;
  1057. typedef block_state (*compress_func) OF((deflate_state *s, int flush));
  1058. /* Compression function. Returns the block state after the call. */
  1059. local int deflateStateCheck OF((z_streamp strm));
  1060. local void slide_hash OF((deflate_state *s));
  1061. local void fill_window OF((deflate_state *s));
  1062. local block_state deflate_stored OF((deflate_state *s, int flush));
  1063. local block_state deflate_fast OF((deflate_state *s, int flush));
  1064. #ifndef FASTEST
  1065. local block_state deflate_slow OF((deflate_state *s, int flush));
  1066. #endif
  1067. local block_state deflate_rle OF((deflate_state *s, int flush));
  1068. local block_state deflate_huff OF((deflate_state *s, int flush));
  1069. local void lm_init OF((deflate_state *s));
  1070. local void putShortMSB OF((deflate_state *s, uInt b));
  1071. local void flush_pending OF((z_streamp strm));
  1072. local unsigned read_buf OF((z_streamp strm, Bytef *buf, unsigned size));
  1073. #ifdef ASMV
  1074. # pragma message("Assembler code may have bugs -- use at your own risk")
  1075. void match_init OF((void)); /* asm code initialization */
  1076. uInt longest_match OF((deflate_state *s, IPos cur_match));
  1077. #else
  1078. local uInt longest_match OF((deflate_state *s, IPos cur_match));
  1079. #endif
  1080. #ifdef ZLIB_DEBUG
  1081. local void check_match OF((deflate_state *s, IPos start, IPos match,
  1082. int length));
  1083. #endif
  1084. /* ===========================================================================
  1085. * Local data
  1086. */
  1087. #define NIL 0
  1088. /* Tail of hash chains */
  1089. #ifndef TOO_FAR
  1090. # define TOO_FAR 4096
  1091. #endif
  1092. /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
  1093. /* Values for max_lazy_match, good_match and max_chain_length, depending on
  1094. * the desired pack level (0..9). The values given below have been tuned to
  1095. * exclude worst case performance for pathological files. Better values may be
  1096. * found for specific files.
  1097. */
  1098. typedef struct config_s {
  1099. ush good_length; /* reduce lazy search above this match length */
  1100. ush max_lazy; /* do not perform lazy search above this match length */
  1101. ush nice_length; /* quit search above this match length */
  1102. ush max_chain;
  1103. compress_func func;
  1104. } config;
  1105. #ifdef FASTEST
  1106. local const config configuration_table[2] = {
  1107. /* good lazy nice chain */
  1108. /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
  1109. /* 1 */ {4, 4, 8, 4, deflate_fast}}; /* max speed, no lazy matches */
  1110. #else
  1111. local const config configuration_table[10] = {
  1112. /* good lazy nice chain */
  1113. /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
  1114. /* 1 */ {4, 4, 8, 4, deflate_fast}, /* max speed, no lazy matches */
  1115. /* 2 */ {4, 5, 16, 8, deflate_fast},
  1116. /* 3 */ {4, 6, 32, 32, deflate_fast},
  1117. /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */
  1118. /* 5 */ {8, 16, 32, 32, deflate_slow},
  1119. /* 6 */ {8, 16, 128, 128, deflate_slow},
  1120. /* 7 */ {8, 32, 128, 256, deflate_slow},
  1121. /* 8 */ {32, 128, 258, 1024, deflate_slow},
  1122. /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* max compression */
  1123. #endif
  1124. /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
  1125. * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
  1126. * meaning.
  1127. */
  1128. /* rank Z_BLOCK between Z_NO_FLUSH and Z_PARTIAL_FLUSH */
  1129. #define RANK(f) (((f) * 2) - ((f) > 4 ? 9 : 0))
  1130. /* ===========================================================================
  1131. * Update a hash value with the given input byte
  1132. * IN assertion: all calls to UPDATE_HASH are made with consecutive input
  1133. * characters, so that a running hash key can be computed from the previous
  1134. * key instead of complete recalculation each time.
  1135. */
  1136. #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
  1137. /* ===========================================================================
  1138. * Insert string str in the dictionary and set match_head to the previous head
  1139. * of the hash chain (the most recent string with same hash key). Return
  1140. * the previous length of the hash chain.
  1141. * If this file is compiled with -DFASTEST, the compression level is forced
  1142. * to 1, and no hash chains are maintained.
  1143. * IN assertion: all calls to INSERT_STRING are made with consecutive input
  1144. * characters and the first MIN_MATCH bytes of str are valid (except for
  1145. * the last MIN_MATCH-1 bytes of the input file).
  1146. */
  1147. #ifdef FASTEST
  1148. #define INSERT_STRING(s, str, match_head) \
  1149. (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
  1150. match_head = s->head[s->ins_h], \
  1151. s->head[s->ins_h] = (Pos)(str))
  1152. #else
  1153. #define INSERT_STRING(s, str, match_head) \
  1154. (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
  1155. match_head = s->prev[(str) & s->w_mask] = s->head[s->ins_h], \
  1156. s->head[s->ins_h] = (Pos)(str))
  1157. #endif
  1158. /* ===========================================================================
  1159. * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
  1160. * prev[] will be initialized on the fly.
  1161. */
  1162. #define CLEAR_HASH(s) \
  1163. s->head[s->hash_size-1] = NIL; \
  1164. zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
  1165. /* ===========================================================================
  1166. * Slide the hash table when sliding the window down (could be avoided with 32
  1167. * bit values at the expense of memory usage). We slide even when level == 0 to
  1168. * keep the hash table consistent if we switch back to level > 0 later.
  1169. */
  1170. local void slide_hash(s)
  1171. deflate_state *s;
  1172. {
  1173. unsigned n, m;
  1174. Posf *p;
  1175. uInt wsize = s->w_size;
  1176. n = s->hash_size;
  1177. p = &s->head[n];
  1178. do {
  1179. m = *--p;
  1180. *p = (Pos)(m >= wsize ? m - wsize : NIL);
  1181. } while (--n);
  1182. n = wsize;
  1183. #ifndef FASTEST
  1184. p = &s->prev[n];
  1185. do {
  1186. m = *--p;
  1187. *p = (Pos)(m >= wsize ? m - wsize : NIL);
  1188. /* If n is not on any hash chain, prev[n] is garbage but
  1189. * its value will never be used.
  1190. */
  1191. } while (--n);
  1192. #endif
  1193. }
  1194. /* ========================================================================= */
  1195. int ZEXPORT deflateInit_(strm, level, version, stream_size)
  1196. z_streamp strm;
  1197. int level;
  1198. const char *version;
  1199. int stream_size;
  1200. {
  1201. return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
  1202. Z_DEFAULT_STRATEGY, version, stream_size);
  1203. /* To do: ignore strm->next_in if we use it as window */
  1204. }
  1205. /* ========================================================================= */
  1206. int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
  1207. version, stream_size)
  1208. z_streamp strm;
  1209. int level;
  1210. int method;
  1211. int windowBits;
  1212. int memLevel;
  1213. int strategy;
  1214. const char *version;
  1215. int stream_size;
  1216. {
  1217. deflate_state *s;
  1218. int wrap = 1;
  1219. static const char my_version[] = ZLIB_VERSION;
  1220. ushf *overlay;
  1221. /* We overlay pending_buf and d_buf+l_buf. This works since the average
  1222. * output size for (length,distance) codes is <= 24 bits.
  1223. */
  1224. if (version == Z_NULL || version[0] != my_version[0] ||
  1225. stream_size != sizeof(z_stream)) {
  1226. return Z_VERSION_ERROR;
  1227. }
  1228. if (strm == Z_NULL) return Z_STREAM_ERROR;
  1229. strm->msg = Z_NULL;
  1230. if (strm->zalloc == (alloc_func)0) {
  1231. #ifdef Z_SOLO
  1232. return Z_STREAM_ERROR;
  1233. #else
  1234. strm->zalloc = zcalloc;
  1235. strm->opaque = (voidpf)0;
  1236. #endif
  1237. }
  1238. if (strm->zfree == (free_func)0)
  1239. #ifdef Z_SOLO
  1240. return Z_STREAM_ERROR;
  1241. #else
  1242. strm->zfree = zcfree;
  1243. #endif
  1244. #ifdef FASTEST
  1245. if (level != 0) level = 1;
  1246. #else
  1247. if (level == Z_DEFAULT_COMPRESSION) level = 6;
  1248. #endif
  1249. if (windowBits < 0) { /* suppress zlib wrapper */
  1250. wrap = 0;
  1251. windowBits = -windowBits;
  1252. }
  1253. #ifdef GZIP
  1254. else if (windowBits > 15) {
  1255. wrap = 2; /* write gzip wrapper instead */
  1256. windowBits -= 16;
  1257. }
  1258. #endif
  1259. if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
  1260. windowBits < 8 || windowBits > 15 || level < 0 || level > 9 ||
  1261. strategy < 0 || strategy > Z_FIXED || (windowBits == 8 && wrap != 1)) {
  1262. return Z_STREAM_ERROR;
  1263. }
  1264. if (windowBits == 8) windowBits = 9; /* until 256-byte window bug fixed */
  1265. s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
  1266. if (s == Z_NULL) return Z_MEM_ERROR;
  1267. strm->state = (struct internal_state FAR *)s;
  1268. s->strm = strm;
  1269. s->status = INIT_STATE; /* to pass state test in deflateReset() */
  1270. s->wrap = wrap;
  1271. s->gzhead = Z_NULL;
  1272. s->w_bits = (uInt)windowBits;
  1273. s->w_size = 1 << s->w_bits;
  1274. s->w_mask = s->w_size - 1;
  1275. s->hash_bits = (uInt)memLevel + 7;
  1276. s->hash_size = 1 << s->hash_bits;
  1277. s->hash_mask = s->hash_size - 1;
  1278. s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
  1279. s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
  1280. s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
  1281. s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos));
  1282. s->high_water = 0; /* nothing written to s->window yet */
  1283. s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
  1284. overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
  1285. s->pending_buf = (uchf *) overlay;
  1286. s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
  1287. if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
  1288. s->pending_buf == Z_NULL) {
  1289. s->status = FINISH_STATE;
  1290. strm->msg = ERR_MSG(Z_MEM_ERROR);
  1291. deflateEnd (strm);
  1292. return Z_MEM_ERROR;
  1293. }
  1294. s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
  1295. s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
  1296. s->level = level;
  1297. s->strategy = strategy;
  1298. s->method = (Byte)method;
  1299. return deflateReset(strm);
  1300. }
  1301. /* =========================================================================
  1302. * Check for a valid deflate stream state. Return 0 if ok, 1 if not.
  1303. */
  1304. local int deflateStateCheck (strm)
  1305. z_streamp strm;
  1306. {
  1307. deflate_state *s;
  1308. if (strm == Z_NULL ||
  1309. strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
  1310. return 1;
  1311. s = strm->state;
  1312. if (s == Z_NULL || s->strm != strm || (s->status != INIT_STATE &&
  1313. #ifdef GZIP
  1314. s->status != GZIP_STATE &&
  1315. #endif
  1316. s->status != EXTRA_STATE &&
  1317. s->status != NAME_STATE &&
  1318. s->status != COMMENT_STATE &&
  1319. s->status != HCRC_STATE &&
  1320. s->status != BUSY_STATE &&
  1321. s->status != FINISH_STATE))
  1322. return 1;
  1323. return 0;
  1324. }
  1325. /* ========================================================================= */
  1326. int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
  1327. z_streamp strm;
  1328. const Bytef *dictionary;
  1329. uInt dictLength;
  1330. {
  1331. deflate_state *s;
  1332. uInt str, n;
  1333. int wrap;
  1334. unsigned avail;
  1335. z_const unsigned char *next;
  1336. if (deflateStateCheck(strm) || dictionary == Z_NULL)
  1337. return Z_STREAM_ERROR;
  1338. s = strm->state;
  1339. wrap = s->wrap;
  1340. if (wrap == 2 || (wrap == 1 && s->status != INIT_STATE) || s->lookahead)
  1341. return Z_STREAM_ERROR;
  1342. /* when using zlib wrappers, compute Adler-32 for provided dictionary */
  1343. if (wrap == 1)
  1344. strm->adler = adler32(strm->adler, dictionary, dictLength);
  1345. s->wrap = 0; /* avoid computing Adler-32 in read_buf */
  1346. /* if dictionary would fill window, just replace the history */
  1347. if (dictLength >= s->w_size) {
  1348. if (wrap == 0) { /* already empty otherwise */
  1349. CLEAR_HASH(s);
  1350. s->strstart = 0;
  1351. s->block_start = 0L;
  1352. s->insert = 0;
  1353. }
  1354. dictionary += dictLength - s->w_size; /* use the tail */
  1355. dictLength = s->w_size;
  1356. }
  1357. /* insert dictionary into window and hash */
  1358. avail = strm->avail_in;
  1359. next = strm->next_in;
  1360. strm->avail_in = dictLength;
  1361. strm->next_in = (z_const Bytef *)dictionary;
  1362. fill_window(s);
  1363. while (s->lookahead >= MIN_MATCH) {
  1364. str = s->strstart;
  1365. n = s->lookahead - (MIN_MATCH-1);
  1366. do {
  1367. UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
  1368. #ifndef FASTEST
  1369. s->prev[str & s->w_mask] = s->head[s->ins_h];
  1370. #endif
  1371. s->head[s->ins_h] = (Pos)str;
  1372. str++;
  1373. } while (--n);
  1374. s->strstart = str;
  1375. s->lookahead = MIN_MATCH-1;
  1376. fill_window(s);
  1377. }
  1378. s->strstart += s->lookahead;
  1379. s->block_start = (long)s->strstart;
  1380. s->insert = s->lookahead;
  1381. s->lookahead = 0;
  1382. s->match_length = s->prev_length = MIN_MATCH-1;
  1383. s->match_available = 0;
  1384. strm->next_in = next;
  1385. strm->avail_in = avail;
  1386. s->wrap = wrap;
  1387. return Z_OK;
  1388. }
  1389. /* ========================================================================= */
  1390. int ZEXPORT deflateGetDictionary (strm, dictionary, dictLength)
  1391. z_streamp strm;
  1392. Bytef *dictionary;
  1393. uInt *dictLength;
  1394. {
  1395. deflate_state *s;
  1396. uInt len;
  1397. if (deflateStateCheck(strm))
  1398. return Z_STREAM_ERROR;
  1399. s = strm->state;
  1400. len = s->strstart + s->lookahead;
  1401. if (len > s->w_size)
  1402. len = s->w_size;
  1403. if (dictionary != Z_NULL && len)
  1404. zmemcpy(dictionary, s->window + s->strstart + s->lookahead - len, len);
  1405. if (dictLength != Z_NULL)
  1406. *dictLength = len;
  1407. return Z_OK;
  1408. }
  1409. /* ========================================================================= */
  1410. int ZEXPORT deflateResetKeep (strm)
  1411. z_streamp strm;
  1412. {
  1413. deflate_state *s;
  1414. if (deflateStateCheck(strm)) {
  1415. return Z_STREAM_ERROR;
  1416. }
  1417. strm->total_in = strm->total_out = 0;
  1418. strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
  1419. strm->data_type = Z_UNKNOWN;
  1420. s = (deflate_state *)strm->state;
  1421. s->pending = 0;
  1422. s->pending_out = s->pending_buf;
  1423. if (s->wrap < 0) {
  1424. s->wrap = -s->wrap; /* was made negative by deflate(..., Z_FINISH); */
  1425. }
  1426. s->status =
  1427. #ifdef GZIP
  1428. s->wrap == 2 ? GZIP_STATE :
  1429. #endif
  1430. s->wrap ? INIT_STATE : BUSY_STATE;
  1431. strm->adler =
  1432. #ifdef GZIP
  1433. s->wrap == 2 ? crc32(0L, Z_NULL, 0) :
  1434. #endif
  1435. adler32(0L, Z_NULL, 0);
  1436. s->last_flush = Z_NO_FLUSH;
  1437. _tr_init(s);
  1438. return Z_OK;
  1439. }
  1440. /* ========================================================================= */
  1441. int ZEXPORT deflateReset (strm)
  1442. z_streamp strm;
  1443. {
  1444. int ret;
  1445. ret = deflateResetKeep(strm);
  1446. if (ret == Z_OK)
  1447. lm_init(strm->state);
  1448. return ret;
  1449. }
  1450. /* ========================================================================= */
  1451. int ZEXPORT deflateSetHeader (strm, head)
  1452. z_streamp strm;
  1453. gz_headerp head;
  1454. {
  1455. if (deflateStateCheck(strm) || strm->state->wrap != 2)
  1456. return Z_STREAM_ERROR;
  1457. strm->state->gzhead = head;
  1458. return Z_OK;
  1459. }
  1460. /* ========================================================================= */
  1461. int ZEXPORT deflatePending (strm, pending, bits)
  1462. unsigned *pending;
  1463. int *bits;
  1464. z_streamp strm;
  1465. {
  1466. if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
  1467. if (pending != Z_NULL)
  1468. *pending = strm->state->pending;
  1469. if (bits != Z_NULL)
  1470. *bits = strm->state->bi_valid;
  1471. return Z_OK;
  1472. }
  1473. /* ========================================================================= */
  1474. int ZEXPORT deflatePrime (strm, bits, value)
  1475. z_streamp strm;
  1476. int bits;
  1477. int value;
  1478. {
  1479. deflate_state *s;
  1480. int put;
  1481. if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
  1482. s = strm->state;
  1483. if ((Bytef *)(s->d_buf) < s->pending_out + ((Buf_size + 7) >> 3))
  1484. return Z_BUF_ERROR;
  1485. do {
  1486. put = Buf_size - s->bi_valid;
  1487. if (put > bits)
  1488. put = bits;
  1489. s->bi_buf |= (ush)((value & ((1 << put) - 1)) << s->bi_valid);
  1490. s->bi_valid += put;
  1491. _tr_flush_bits(s);
  1492. value >>= put;
  1493. bits -= put;
  1494. } while (bits);
  1495. return Z_OK;
  1496. }
  1497. /* ========================================================================= */
  1498. int ZEXPORT deflateParams(strm, level, strategy)
  1499. z_streamp strm;
  1500. int level;
  1501. int strategy;
  1502. {
  1503. deflate_state *s;
  1504. compress_func func;
  1505. if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
  1506. s = strm->state;
  1507. #ifdef FASTEST
  1508. if (level != 0) level = 1;
  1509. #else
  1510. if (level == Z_DEFAULT_COMPRESSION) level = 6;
  1511. #endif
  1512. if (level < 0 || level > 9 || strategy < 0 || strategy > Z_FIXED) {
  1513. return Z_STREAM_ERROR;
  1514. }
  1515. func = configuration_table[s->level].func;
  1516. if ((strategy != s->strategy || func != configuration_table[level].func) &&
  1517. s->high_water) {
  1518. /* Flush the last buffer: */
  1519. int err = deflate(strm, Z_BLOCK);
  1520. if (err == Z_STREAM_ERROR)
  1521. return err;
  1522. if (strm->avail_out == 0)
  1523. return Z_BUF_ERROR;
  1524. }
  1525. if (s->level != level) {
  1526. if (s->level == 0 && s->matches != 0) {
  1527. if (s->matches == 1)
  1528. slide_hash(s);
  1529. else
  1530. CLEAR_HASH(s);
  1531. s->matches = 0;
  1532. }
  1533. s->level = level;
  1534. s->max_lazy_match = configuration_table[level].max_lazy;
  1535. s->good_match = configuration_table[level].good_length;
  1536. s->nice_match = configuration_table[level].nice_length;
  1537. s->max_chain_length = configuration_table[level].max_chain;
  1538. }
  1539. s->strategy = strategy;
  1540. return Z_OK;
  1541. }
  1542. /* ========================================================================= */
  1543. int ZEXPORT deflateTune(strm, good_length, max_lazy, nice_length, max_chain)
  1544. z_streamp strm;
  1545. int good_length;
  1546. int max_lazy;
  1547. int nice_length;
  1548. int max_chain;
  1549. {
  1550. deflate_state *s;
  1551. if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
  1552. s = strm->state;
  1553. s->good_match = (uInt)good_length;
  1554. s->max_lazy_match = (uInt)max_lazy;
  1555. s->nice_match = nice_length;
  1556. s->max_chain_length = (uInt)max_chain;
  1557. return Z_OK;
  1558. }
  1559. /* =========================================================================
  1560. * For the default windowBits of 15 and memLevel of 8, this function returns
  1561. * a close to exact, as well as small, upper bound on the compressed size.
  1562. * They are coded as constants here for a reason--if the #define's are
  1563. * changed, then this function needs to be changed as well. The return
  1564. * value for 15 and 8 only works for those exact settings.
  1565. *
  1566. * For any setting other than those defaults for windowBits and memLevel,
  1567. * the value returned is a conservative worst case for the maximum expansion
  1568. * resulting from using fixed blocks instead of stored blocks, which deflate
  1569. * can emit on compressed data for some combinations of the parameters.
  1570. *
  1571. * This function could be more sophisticated to provide closer upper bounds for
  1572. * every combination of windowBits and memLevel. But even the conservative
  1573. * upper bound of about 14% expansion does not seem onerous for output buffer
  1574. * allocation.
  1575. */
  1576. uLong ZEXPORT deflateBound(strm, sourceLen)
  1577. z_streamp strm;
  1578. uLong sourceLen;
  1579. {
  1580. deflate_state *s;
  1581. uLong complen, wraplen;
  1582. /* conservative upper bound for compressed data */
  1583. complen = sourceLen +
  1584. ((sourceLen + 7) >> 3) + ((sourceLen + 63) >> 6) + 5;
  1585. /* if can't get parameters, return conservative bound plus zlib wrapper */
  1586. if (deflateStateCheck(strm))
  1587. return complen + 6;
  1588. /* compute wrapper length */
  1589. s = strm->state;
  1590. switch (s->wrap) {
  1591. case 0: /* raw deflate */
  1592. wraplen = 0;
  1593. break;
  1594. case 1: /* zlib wrapper */
  1595. wraplen = 6 + (s->strstart ? 4 : 0);
  1596. break;
  1597. #ifdef GZIP
  1598. case 2: /* gzip wrapper */
  1599. wraplen = 18;
  1600. if (s->gzhead != Z_NULL) { /* user-supplied gzip header */
  1601. Bytef *str;
  1602. if (s->gzhead->extra != Z_NULL)
  1603. wraplen += 2 + s->gzhead->extra_len;
  1604. str = s->gzhead->name;
  1605. if (str != Z_NULL)
  1606. do {
  1607. wraplen++;
  1608. } while (*str++);
  1609. str = s->gzhead->comment;
  1610. if (str != Z_NULL)
  1611. do {
  1612. wraplen++;
  1613. } while (*str++);
  1614. if (s->gzhead->hcrc)
  1615. wraplen += 2;
  1616. }
  1617. break;
  1618. #endif
  1619. default: /* for compiler happiness */
  1620. wraplen = 6;
  1621. }
  1622. /* if not default parameters, return conservative bound */
  1623. if (s->w_bits != 15 || s->hash_bits != 8 + 7)
  1624. return complen + wraplen;
  1625. /* default settings: return tight bound for that case */
  1626. return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
  1627. (sourceLen >> 25) + 13 - 6 + wraplen;
  1628. }
  1629. /* =========================================================================
  1630. * Put a short in the pending buffer. The 16-bit value is put in MSB order.
  1631. * IN assertion: the stream state is correct and there is enough room in
  1632. * pending_buf.
  1633. */
  1634. local void putShortMSB (s, b)
  1635. deflate_state *s;
  1636. uInt b;
  1637. {
  1638. put_byte(s, (Byte)(b >> 8));
  1639. put_byte(s, (Byte)(b & 0xff));
  1640. }
  1641. /* =========================================================================
  1642. * Flush as much pending output as possible. All deflate() output, except for
  1643. * some deflate_stored() output, goes through this function so some
  1644. * applications may wish to modify it to avoid allocating a large
  1645. * strm->next_out buffer and copying into it. (See also read_buf()).
  1646. */
  1647. local void flush_pending(strm)
  1648. z_streamp strm;
  1649. {
  1650. unsigned len;
  1651. deflate_state *s = strm->state;
  1652. _tr_flush_bits(s);
  1653. len = s->pending;
  1654. if (len > strm->avail_out) len = strm->avail_out;
  1655. if (len == 0) return;
  1656. zmemcpy(strm->next_out, s->pending_out, len);
  1657. strm->next_out += len;
  1658. s->pending_out += len;
  1659. strm->total_out += len;
  1660. strm->avail_out -= len;
  1661. s->pending -= len;
  1662. if (s->pending == 0) {
  1663. s->pending_out = s->pending_buf;
  1664. }
  1665. }
  1666. /* ===========================================================================
  1667. * Update the header CRC with the bytes s->pending_buf[beg..s->pending - 1].
  1668. */
  1669. #define HCRC_UPDATE(beg) \
  1670. do { \
  1671. if (s->gzhead->hcrc && s->pending > (beg)) \
  1672. strm->adler = crc32(strm->adler, s->pending_buf + (beg), \
  1673. s->pending - (beg)); \
  1674. } while (0)
  1675. /* ========================================================================= */
  1676. int ZEXPORT deflate (strm, flush)
  1677. z_streamp strm;
  1678. int flush;
  1679. {
  1680. int old_flush; /* value of flush param for previous deflate call */
  1681. deflate_state *s;
  1682. if (deflateStateCheck(strm) || flush > Z_BLOCK || flush < 0) {
  1683. return Z_STREAM_ERROR;
  1684. }
  1685. s = strm->state;
  1686. if (strm->next_out == Z_NULL ||
  1687. (strm->avail_in != 0 && strm->next_in == Z_NULL) ||
  1688. (s->status == FINISH_STATE && flush != Z_FINISH)) {
  1689. ERR_RETURN(strm, Z_STREAM_ERROR);
  1690. }
  1691. if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
  1692. old_flush = s->last_flush;
  1693. s->last_flush = flush;
  1694. /* Flush as much pending output as possible */
  1695. if (s->pending != 0) {
  1696. flush_pending(strm);
  1697. if (strm->avail_out == 0) {
  1698. /* Since avail_out is 0, deflate will be called again with
  1699. * more output space, but possibly with both pending and
  1700. * avail_in equal to zero. There won't be anything to do,
  1701. * but this is not an error situation so make sure we
  1702. * return OK instead of BUF_ERROR at next call of deflate:
  1703. */
  1704. s->last_flush = -1;
  1705. return Z_OK;
  1706. }
  1707. /* Make sure there is something to do and avoid duplicate consecutive
  1708. * flushes. For repeated and useless calls with Z_FINISH, we keep
  1709. * returning Z_STREAM_END instead of Z_BUF_ERROR.
  1710. */
  1711. } else if (strm->avail_in == 0 && RANK(flush) <= RANK(old_flush) &&
  1712. flush != Z_FINISH) {
  1713. ERR_RETURN(strm, Z_BUF_ERROR);
  1714. }
  1715. /* User must not provide more input after the first FINISH: */
  1716. if (s->status == FINISH_STATE && strm->avail_in != 0) {
  1717. ERR_RETURN(strm, Z_BUF_ERROR);
  1718. }
  1719. /* Write the header */
  1720. if (s->status == INIT_STATE) {
  1721. /* zlib header */
  1722. uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
  1723. uInt level_flags;
  1724. if (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2)
  1725. level_flags = 0;
  1726. else if (s->level < 6)
  1727. level_flags = 1;
  1728. else if (s->level == 6)
  1729. level_flags = 2;
  1730. else
  1731. level_flags = 3;
  1732. header |= (level_flags << 6);
  1733. if (s->strstart != 0) header |= PRESET_DICT;
  1734. header += 31 - (header % 31);
  1735. putShortMSB(s, header);
  1736. /* Save the adler32 of the preset dictionary: */
  1737. if (s->strstart != 0) {
  1738. putShortMSB(s, (uInt)(strm->adler >> 16));
  1739. putShortMSB(s, (uInt)(strm->adler & 0xffff));
  1740. }
  1741. strm->adler = adler32(0L, Z_NULL, 0);
  1742. s->status = BUSY_STATE;
  1743. /* Compression must start with an empty pending buffer */
  1744. flush_pending(strm);
  1745. if (s->pending != 0) {
  1746. s->last_flush = -1;
  1747. return Z_OK;
  1748. }
  1749. }
  1750. #ifdef GZIP
  1751. if (s->status == GZIP_STATE) {
  1752. /* gzip header */
  1753. strm->adler = crc32(0L, Z_NULL, 0);
  1754. put_byte(s, 31);
  1755. put_byte(s, 139);
  1756. put_byte(s, 8);
  1757. if (s->gzhead == Z_NULL) {
  1758. put_byte(s, 0);
  1759. put_byte(s, 0);
  1760. put_byte(s, 0);
  1761. put_byte(s, 0);
  1762. put_byte(s, 0);
  1763. put_byte(s, s->level == 9 ? 2 :
  1764. (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2 ?
  1765. 4 : 0));
  1766. put_byte(s, OS_CODE);
  1767. s->status = BUSY_STATE;
  1768. /* Compression must start with an empty pending buffer */
  1769. flush_pending(strm);
  1770. if (s->pending != 0) {
  1771. s->last_flush = -1;
  1772. return Z_OK;
  1773. }
  1774. }
  1775. else {
  1776. put_byte(s, (s->gzhead->text ? 1 : 0) +
  1777. (s->gzhead->hcrc ? 2 : 0) +
  1778. (s->gzhead->extra == Z_NULL ? 0 : 4) +
  1779. (s->gzhead->name == Z_NULL ? 0 : 8) +
  1780. (s->gzhead->comment == Z_NULL ? 0 : 16)
  1781. );
  1782. put_byte(s, (Byte)(s->gzhead->time & 0xff));
  1783. put_byte(s, (Byte)((s->gzhead->time >> 8) & 0xff));
  1784. put_byte(s, (Byte)((s->gzhead->time >> 16) & 0xff));
  1785. put_byte(s, (Byte)((s->gzhead->time >> 24) & 0xff));
  1786. put_byte(s, s->level == 9 ? 2 :
  1787. (s->strategy >= Z_HUFFMAN_ONLY || s->level < 2 ?
  1788. 4 : 0));
  1789. put_byte(s, s->gzhead->os & 0xff);
  1790. if (s->gzhead->extra != Z_NULL) {
  1791. put_byte(s, s->gzhead->extra_len & 0xff);
  1792. put_byte(s, (s->gzhead->extra_len >> 8) & 0xff);
  1793. }
  1794. if (s->gzhead->hcrc)
  1795. strm->adler = crc32(strm->adler, s->pending_buf,
  1796. s->pending);
  1797. s->gzindex = 0;
  1798. s->status = EXTRA_STATE;
  1799. }
  1800. }
  1801. if (s->status == EXTRA_STATE) {
  1802. if (s->gzhead->extra != Z_NULL) {
  1803. ulg beg = s->pending; /* start of bytes to update crc */
  1804. uInt left = (s->gzhead->extra_len & 0xffff) - s->gzindex;
  1805. while (s->pending + left > s->pending_buf_size) {
  1806. uInt copy = s->pending_buf_size - s->pending;
  1807. zmemcpy(s->pending_buf + s->pending,
  1808. s->gzhead->extra + s->gzindex, copy);
  1809. s->pending = s->pending_buf_size;
  1810. HCRC_UPDATE(beg);
  1811. s->gzindex += copy;
  1812. flush_pending(strm);
  1813. if (s->pending != 0) {
  1814. s->last_flush = -1;
  1815. return Z_OK;
  1816. }
  1817. beg = 0;
  1818. left -= copy;
  1819. }
  1820. zmemcpy(s->pending_buf + s->pending,
  1821. s->gzhead->extra + s->gzindex, left);
  1822. s->pending += left;
  1823. HCRC_UPDATE(beg);
  1824. s->gzindex = 0;
  1825. }
  1826. s->status = NAME_STATE;
  1827. }
  1828. if (s->status == NAME_STATE) {
  1829. if (s->gzhead->name != Z_NULL) {
  1830. ulg beg = s->pending; /* start of bytes to update crc */
  1831. int val;
  1832. do {
  1833. if (s->pending == s->pending_buf_size) {
  1834. HCRC_UPDATE(beg);
  1835. flush_pending(strm);
  1836. if (s->pending != 0) {
  1837. s->last_flush = -1;
  1838. return Z_OK;
  1839. }
  1840. beg = 0;
  1841. }
  1842. val = s->gzhead->name[s->gzindex++];
  1843. put_byte(s, val);
  1844. } while (val != 0);
  1845. HCRC_UPDATE(beg);
  1846. s->gzindex = 0;
  1847. }
  1848. s->status = COMMENT_STATE;
  1849. }
  1850. if (s->status == COMMENT_STATE) {
  1851. if (s->gzhead->comment != Z_NULL) {
  1852. ulg beg = s->pending; /* start of bytes to update crc */
  1853. int val;
  1854. do {
  1855. if (s->pending == s->pending_buf_size) {
  1856. HCRC_UPDATE(beg);
  1857. flush_pending(strm);
  1858. if (s->pending != 0) {
  1859. s->last_flush = -1;
  1860. return Z_OK;
  1861. }
  1862. beg = 0;
  1863. }
  1864. val = s->gzhead->comment[s->gzindex++];
  1865. put_byte(s, val);
  1866. } while (val != 0);
  1867. HCRC_UPDATE(beg);
  1868. }
  1869. s->status = HCRC_STATE;
  1870. }
  1871. if (s->status == HCRC_STATE) {
  1872. if (s->gzhead->hcrc) {
  1873. if (s->pending + 2 > s->pending_buf_size) {
  1874. flush_pending(strm);
  1875. if (s->pending != 0) {
  1876. s->last_flush = -1;
  1877. return Z_OK;
  1878. }
  1879. }
  1880. put_byte(s, (Byte)(strm->adler & 0xff));
  1881. put_byte(s, (Byte)((strm->adler >> 8) & 0xff));
  1882. strm->adler = crc32(0L, Z_NULL, 0);
  1883. }
  1884. s->status = BUSY_STATE;
  1885. /* Compression must start with an empty pending buffer */
  1886. flush_pending(strm);
  1887. if (s->pending != 0) {
  1888. s->last_flush = -1;
  1889. return Z_OK;
  1890. }
  1891. }
  1892. #endif
  1893. /* Start a new block or continue the current one.
  1894. */
  1895. if (strm->avail_in != 0 || s->lookahead != 0 ||
  1896. (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
  1897. block_state bstate;
  1898. bstate = s->level == 0 ? deflate_stored(s, flush) :
  1899. s->strategy == Z_HUFFMAN_ONLY ? deflate_huff(s, flush) :
  1900. s->strategy == Z_RLE ? deflate_rle(s, flush) :
  1901. (*(configuration_table[s->level].func))(s, flush);
  1902. if (bstate == finish_started || bstate == finish_done) {
  1903. s->status = FINISH_STATE;
  1904. }
  1905. if (bstate == need_more || bstate == finish_started) {
  1906. if (strm->avail_out == 0) {
  1907. s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
  1908. }
  1909. return Z_OK;
  1910. /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
  1911. * of deflate should use the same flush parameter to make sure
  1912. * that the flush is complete. So we don't have to output an
  1913. * empty block here, this will be done at next call. This also
  1914. * ensures that for a very small output buffer, we emit at most
  1915. * one empty block.
  1916. */
  1917. }
  1918. if (bstate == block_done) {
  1919. if (flush == Z_PARTIAL_FLUSH) {
  1920. _tr_align(s);
  1921. } else if (flush != Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */
  1922. _tr_stored_block(s, (char*)0, 0L, 0);
  1923. /* For a full flush, this empty block will be recognized
  1924. * as a special marker by inflate_sync().
  1925. */
  1926. if (flush == Z_FULL_FLUSH) {
  1927. CLEAR_HASH(s); /* forget history */
  1928. if (s->lookahead == 0) {
  1929. s->strstart = 0;
  1930. s->block_start = 0L;
  1931. s->insert = 0;
  1932. }
  1933. }
  1934. }
  1935. flush_pending(strm);
  1936. if (strm->avail_out == 0) {
  1937. s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
  1938. return Z_OK;
  1939. }
  1940. }
  1941. }
  1942. if (flush != Z_FINISH) return Z_OK;
  1943. if (s->wrap <= 0) return Z_STREAM_END;
  1944. /* Write the trailer */
  1945. #ifdef GZIP
  1946. if (s->wrap == 2) {
  1947. put_byte(s, (Byte)(strm->adler & 0xff));
  1948. put_byte(s, (Byte)((strm->adler >> 8) & 0xff));
  1949. put_byte(s, (Byte)((strm->adler >> 16) & 0xff));
  1950. put_byte(s, (Byte)((strm->adler >> 24) & 0xff));
  1951. put_byte(s, (Byte)(strm->total_in & 0xff));
  1952. put_byte(s, (Byte)((strm->total_in >> 8) & 0xff));
  1953. put_byte(s, (Byte)((strm->total_in >> 16) & 0xff));
  1954. put_byte(s, (Byte)((strm->total_in >> 24) & 0xff));
  1955. }
  1956. else
  1957. #endif
  1958. {
  1959. putShortMSB(s, (uInt)(strm->adler >> 16));
  1960. putShortMSB(s, (uInt)(strm->adler & 0xffff));
  1961. }
  1962. flush_pending(strm);
  1963. /* If avail_out is zero, the application will call deflate again
  1964. * to flush the rest.
  1965. */
  1966. if (s->wrap > 0) s->wrap = -s->wrap; /* write the trailer only once! */
  1967. return s->pending != 0 ? Z_OK : Z_STREAM_END;
  1968. }
  1969. /* ========================================================================= */
  1970. int ZEXPORT deflateEnd (strm)
  1971. z_streamp strm;
  1972. {
  1973. int status;
  1974. if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
  1975. status = strm->state->status;
  1976. /* Deallocate in reverse order of allocations: */
  1977. TRY_FREE(strm, strm->state->pending_buf);
  1978. TRY_FREE(strm, strm->state->head);
  1979. TRY_FREE(strm, strm->state->prev);
  1980. TRY_FREE(strm, strm->state->window);
  1981. ZFREE(strm, strm->state);
  1982. strm->state = Z_NULL;
  1983. return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
  1984. }
  1985. /* =========================================================================
  1986. * Copy the source state to the destination state.
  1987. * To simplify the source, this is not supported for 16-bit MSDOS (which
  1988. * doesn't have enough memory anyway to duplicate compression states).
  1989. */
  1990. int ZEXPORT deflateCopy (dest, source)
  1991. z_streamp dest;
  1992. z_streamp source;
  1993. {
  1994. #ifdef MAXSEG_64K
  1995. return Z_STREAM_ERROR;
  1996. #else
  1997. deflate_state *ds;
  1998. deflate_state *ss;
  1999. ushf *overlay;
  2000. if (deflateStateCheck(source) || dest == Z_NULL) {
  2001. return Z_STREAM_ERROR;
  2002. }
  2003. ss = source->state;
  2004. zmemcpy((voidpf)dest, (voidpf)source, sizeof(z_stream));
  2005. ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));
  2006. if (ds == Z_NULL) return Z_MEM_ERROR;
  2007. dest->state = (struct internal_state FAR *) ds;
  2008. zmemcpy((voidpf)ds, (voidpf)ss, sizeof(deflate_state));
  2009. ds->strm = dest;
  2010. ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
  2011. ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos));
  2012. ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos));
  2013. overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
  2014. ds->pending_buf = (uchf *) overlay;
  2015. if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
  2016. ds->pending_buf == Z_NULL) {
  2017. deflateEnd (dest);
  2018. return Z_MEM_ERROR;
  2019. }
  2020. /* following zmemcpy do not work for 16-bit MSDOS */
  2021. zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
  2022. zmemcpy((voidpf)ds->prev, (voidpf)ss->prev, ds->w_size * sizeof(Pos));
  2023. zmemcpy((voidpf)ds->head, (voidpf)ss->head, ds->hash_size * sizeof(Pos));
  2024. zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
  2025. ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
  2026. ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
  2027. ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
  2028. ds->l_desc.dyn_tree = ds->dyn_ltree;
  2029. ds->d_desc.dyn_tree = ds->dyn_dtree;
  2030. ds->bl_desc.dyn_tree = ds->bl_tree;
  2031. return Z_OK;
  2032. #endif /* MAXSEG_64K */
  2033. }
  2034. /* ===========================================================================
  2035. * Read a new buffer from the current input stream, update the adler32
  2036. * and total number of bytes read. All deflate() input goes through
  2037. * this function so some applications may wish to modify it to avoid
  2038. * allocating a large strm->next_in buffer and copying from it.
  2039. * (See also flush_pending()).
  2040. */
  2041. local unsigned read_buf(strm, buf, size)
  2042. z_streamp strm;
  2043. Bytef *buf;
  2044. unsigned size;
  2045. {
  2046. unsigned len = strm->avail_in;
  2047. if (len > size) len = size;
  2048. if (len == 0) return 0;
  2049. strm->avail_in -= len;
  2050. zmemcpy(buf, strm->next_in, len);
  2051. if (strm->state->wrap == 1) {
  2052. strm->adler = adler32(strm->adler, buf, len);
  2053. }
  2054. #ifdef GZIP
  2055. else if (strm->state->wrap == 2) {
  2056. strm->adler = crc32(strm->adler, buf, len);
  2057. }
  2058. #endif
  2059. strm->next_in += len;
  2060. strm->total_in += len;
  2061. return len;
  2062. }
  2063. /* ===========================================================================
  2064. * Initialize the "longest match" routines for a new zlib stream
  2065. */
  2066. local void lm_init (s)
  2067. deflate_state *s;
  2068. {
  2069. s->window_size = (ulg)2L*s->w_size;
  2070. CLEAR_HASH(s);
  2071. /* Set the default configuration parameters:
  2072. */
  2073. s->max_lazy_match = configuration_table[s->level].max_lazy;
  2074. s->good_match = configuration_table[s->level].good_length;
  2075. s->nice_match = configuration_table[s->level].nice_length;
  2076. s->max_chain_length = configuration_table[s->level].max_chain;
  2077. s->strstart = 0;
  2078. s->block_start = 0L;
  2079. s->lookahead = 0;
  2080. s->insert = 0;
  2081. s->match_length = s->prev_length = MIN_MATCH-1;
  2082. s->match_available = 0;
  2083. s->ins_h = 0;
  2084. #ifndef FASTEST
  2085. #ifdef ASMV
  2086. match_init(); /* initialize the asm code */
  2087. #endif
  2088. #endif
  2089. }
  2090. #ifndef FASTEST
  2091. /* ===========================================================================
  2092. * Set match_start to the longest match starting at the given string and
  2093. * return its length. Matches shorter or equal to prev_length are discarded,
  2094. * in which case the result is equal to prev_length and match_start is
  2095. * garbage.
  2096. * IN assertions: cur_match is the head of the hash chain for the current
  2097. * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
  2098. * OUT assertion: the match length is not greater than s->lookahead.
  2099. */
  2100. #ifndef ASMV
  2101. /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
  2102. * match.S. The code will be functionally equivalent.
  2103. */
  2104. local uInt longest_match(s, cur_match)
  2105. deflate_state *s;
  2106. IPos cur_match; /* current match */
  2107. {
  2108. unsigned chain_length = s->max_chain_length;/* max hash chain length */
  2109. register Bytef *scan = s->window + s->strstart; /* current string */
  2110. register Bytef *match; /* matched string */
  2111. register int len; /* length of current match */
  2112. int best_len = (int)s->prev_length; /* best match length so far */
  2113. int nice_match = s->nice_match; /* stop if match long enough */
  2114. IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
  2115. s->strstart - (IPos)MAX_DIST(s) : NIL;
  2116. /* Stop when cur_match becomes <= limit. To simplify the code,
  2117. * we prevent matches with the string of window index 0.
  2118. */
  2119. Posf *prev = s->prev;
  2120. uInt wmask = s->w_mask;
  2121. #ifdef UNALIGNED_OK
  2122. /* Compare two bytes at a time. Note: this is not always beneficial.
  2123. * Try with and without -DUNALIGNED_OK to check.
  2124. */
  2125. register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
  2126. register ush scan_start = *(ushf*)scan;
  2127. register ush scan_end = *(ushf*)(scan+best_len-1);
  2128. #else
  2129. register Bytef *strend = s->window + s->strstart + MAX_MATCH;
  2130. register Byte scan_end1 = scan[best_len-1];
  2131. register Byte scan_end = scan[best_len];
  2132. #endif
  2133. /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
  2134. * It is easy to get rid of this optimization if necessary.
  2135. */
  2136. Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
  2137. /* Do not waste too much time if we already have a good match: */
  2138. if (s->prev_length >= s->good_match) {
  2139. chain_length >>= 2;
  2140. }
  2141. /* Do not look for matches beyond the end of the input. This is necessary
  2142. * to make deflate deterministic.
  2143. */
  2144. if ((uInt)nice_match > s->lookahead) nice_match = (int)s->lookahead;
  2145. Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
  2146. do {
  2147. Assert(cur_match < s->strstart, "no future");
  2148. match = s->window + cur_match;
  2149. /* Skip to next match if the match length cannot increase
  2150. * or if the match length is less than 2. Note that the checks below
  2151. * for insufficient lookahead only occur occasionally for performance
  2152. * reasons. Therefore uninitialized memory will be accessed, and
  2153. * conditional jumps will be made that depend on those values.
  2154. * However the length of the match is limited to the lookahead, so
  2155. * the output of deflate is not affected by the uninitialized values.
  2156. */
  2157. #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
  2158. /* This code assumes sizeof(unsigned short) == 2. Do not use
  2159. * UNALIGNED_OK if your compiler uses a different size.
  2160. */
  2161. if (*(ushf*)(match+best_len-1) != scan_end ||
  2162. *(ushf*)match != scan_start) continue;
  2163. /* It is not necessary to compare scan[2] and match[2] since they are
  2164. * always equal when the other bytes match, given that the hash keys
  2165. * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
  2166. * strstart+3, +5, ... up to strstart+257. We check for insufficient
  2167. * lookahead only every 4th comparison; the 128th check will be made
  2168. * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
  2169. * necessary to put more guard bytes at the end of the window, or
  2170. * to check more often for insufficient lookahead.
  2171. */
  2172. Assert(scan[2] == match[2], "scan[2]?");
  2173. scan++, match++;
  2174. do {
  2175. } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
  2176. *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
  2177. *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
  2178. *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
  2179. scan < strend);
  2180. /* The funny "do {}" generates better code on most compilers */
  2181. /* Here, scan <= window+strstart+257 */
  2182. Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
  2183. if (*scan == *match) scan++;
  2184. len = (MAX_MATCH - 1) - (int)(strend-scan);
  2185. scan = strend - (MAX_MATCH-1);
  2186. #else /* UNALIGNED_OK */
  2187. if (match[best_len] != scan_end ||
  2188. match[best_len-1] != scan_end1 ||
  2189. *match != *scan ||
  2190. *++match != scan[1]) continue;
  2191. /* The check at best_len-1 can be removed because it will be made
  2192. * again later. (This heuristic is not always a win.)
  2193. * It is not necessary to compare scan[2] and match[2] since they
  2194. * are always equal when the other bytes match, given that
  2195. * the hash keys are equal and that HASH_BITS >= 8.
  2196. */
  2197. scan += 2, match++;
  2198. Assert(*scan == *match, "match[2]?");
  2199. /* We check for insufficient lookahead only every 8th comparison;
  2200. * the 256th check will be made at strstart+258.
  2201. */
  2202. do {
  2203. } while (*++scan == *++match && *++scan == *++match &&
  2204. *++scan == *++match && *++scan == *++match &&
  2205. *++scan == *++match && *++scan == *++match &&
  2206. *++scan == *++match && *++scan == *++match &&
  2207. scan < strend);
  2208. Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
  2209. len = MAX_MATCH - (int)(strend - scan);
  2210. scan = strend - MAX_MATCH;
  2211. #endif /* UNALIGNED_OK */
  2212. if (len > best_len) {
  2213. s->match_start = cur_match;
  2214. best_len = len;
  2215. if (len >= nice_match) break;
  2216. #ifdef UNALIGNED_OK
  2217. scan_end = *(ushf*)(scan+best_len-1);
  2218. #else
  2219. scan_end1 = scan[best_len-1];
  2220. scan_end = scan[best_len];
  2221. #endif
  2222. }
  2223. } while ((cur_match = prev[cur_match & wmask]) > limit
  2224. && --chain_length != 0);
  2225. if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
  2226. return s->lookahead;
  2227. }
  2228. #endif /* ASMV */
  2229. #else /* FASTEST */
  2230. /* ---------------------------------------------------------------------------
  2231. * Optimized version for FASTEST only
  2232. */
  2233. local uInt longest_match(s, cur_match)
  2234. deflate_state *s;
  2235. IPos cur_match; /* current match */
  2236. {
  2237. register Bytef *scan = s->window + s->strstart; /* current string */
  2238. register Bytef *match; /* matched string */
  2239. register int len; /* length of current match */
  2240. register Bytef *strend = s->window + s->strstart + MAX_MATCH;
  2241. /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
  2242. * It is easy to get rid of this optimization if necessary.
  2243. */
  2244. Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
  2245. Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
  2246. Assert(cur_match < s->strstart, "no future");
  2247. match = s->window + cur_match;
  2248. /* Return failure if the match length is less than 2:
  2249. */
  2250. if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
  2251. /* The check at best_len-1 can be removed because it will be made
  2252. * again later. (This heuristic is not always a win.)
  2253. * It is not necessary to compare scan[2] and match[2] since they
  2254. * are always equal when the other bytes match, given that
  2255. * the hash keys are equal and that HASH_BITS >= 8.
  2256. */
  2257. scan += 2, match += 2;
  2258. Assert(*scan == *match, "match[2]?");
  2259. /* We check for insufficient lookahead only every 8th comparison;
  2260. * the 256th check will be made at strstart+258.
  2261. */
  2262. do {
  2263. } while (*++scan == *++match && *++scan == *++match &&
  2264. *++scan == *++match && *++scan == *++match &&
  2265. *++scan == *++match && *++scan == *++match &&
  2266. *++scan == *++match && *++scan == *++match &&
  2267. scan < strend);
  2268. Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
  2269. len = MAX_MATCH - (int)(strend - scan);
  2270. if (len < MIN_MATCH) return MIN_MATCH - 1;
  2271. s->match_start = cur_match;
  2272. return (uInt)len <= s->lookahead ? (uInt)len : s->lookahead;
  2273. }
  2274. #endif /* FASTEST */
  2275. #ifdef ZLIB_DEBUG
  2276. #define EQUAL 0
  2277. /* result of memcmp for equal strings */
  2278. /* ===========================================================================
  2279. * Check that the match at match_start is indeed a match.
  2280. */
  2281. local void check_match(s, start, match, length)
  2282. deflate_state *s;
  2283. IPos start, match;
  2284. int length;
  2285. {
  2286. /* check that the match is indeed a match */
  2287. if (zmemcmp(s->window + match,
  2288. s->window + start, length) != EQUAL) {
  2289. fprintf(stderr, " start %u, match %u, length %d\n",
  2290. start, match, length);
  2291. do {
  2292. fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
  2293. } while (--length != 0);
  2294. z_error("invalid match");
  2295. }
  2296. if (z_verbose > 1) {
  2297. fprintf(stderr,"\\[%d,%d]", start-match, length);
  2298. do { putc(s->window[start++], stderr); } while (--length != 0);
  2299. }
  2300. }
  2301. #else
  2302. # define check_match(s, start, match, length)
  2303. #endif /* ZLIB_DEBUG */
  2304. /* ===========================================================================
  2305. * Fill the window when the lookahead becomes insufficient.
  2306. * Updates strstart and lookahead.
  2307. *
  2308. * IN assertion: lookahead < MIN_LOOKAHEAD
  2309. * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
  2310. * At least one byte has been read, or avail_in == 0; reads are
  2311. * performed for at least two bytes (required for the zip translate_eol
  2312. * option -- not supported here).
  2313. */
  2314. local void fill_window(s)
  2315. deflate_state *s;
  2316. {
  2317. unsigned n;
  2318. unsigned more; /* Amount of free space at the end of the window. */
  2319. uInt wsize = s->w_size;
  2320. Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
  2321. do {
  2322. more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
  2323. /* Deal with !@#$% 64K limit: */
  2324. if (sizeof(int) <= 2) {
  2325. if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
  2326. more = wsize;
  2327. } else if (more == (unsigned)(-1)) {
  2328. /* Very unlikely, but possible on 16 bit machine if
  2329. * strstart == 0 && lookahead == 1 (input done a byte at time)
  2330. */
  2331. more--;
  2332. }
  2333. }
  2334. /* If the window is almost full and there is insufficient lookahead,
  2335. * move the upper half to the lower one to make room in the upper half.
  2336. */
  2337. if (s->strstart >= wsize+MAX_DIST(s)) {
  2338. zmemcpy(s->window, s->window+wsize, (unsigned)wsize - more);
  2339. s->match_start -= wsize;
  2340. s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
  2341. s->block_start -= (long) wsize;
  2342. slide_hash(s);
  2343. more += wsize;
  2344. }
  2345. if (s->strm->avail_in == 0) break;
  2346. /* If there was no sliding:
  2347. * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
  2348. * more == window_size - lookahead - strstart
  2349. * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
  2350. * => more >= window_size - 2*WSIZE + 2
  2351. * In the BIG_MEM or MMAP case (not yet supported),
  2352. * window_size == input_size + MIN_LOOKAHEAD &&
  2353. * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
  2354. * Otherwise, window_size == 2*WSIZE so more >= 2.
  2355. * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
  2356. */
  2357. Assert(more >= 2, "more < 2");
  2358. n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
  2359. s->lookahead += n;
  2360. /* Initialize the hash value now that we have some input: */
  2361. if (s->lookahead + s->insert >= MIN_MATCH) {
  2362. uInt str = s->strstart - s->insert;
  2363. s->ins_h = s->window[str];
  2364. UPDATE_HASH(s, s->ins_h, s->window[str + 1]);
  2365. #if MIN_MATCH != 3
  2366. Call UPDATE_HASH() MIN_MATCH-3 more times
  2367. #endif
  2368. while (s->insert) {
  2369. UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
  2370. #ifndef FASTEST
  2371. s->prev[str & s->w_mask] = s->head[s->ins_h];
  2372. #endif
  2373. s->head[s->ins_h] = (Pos)str;
  2374. str++;
  2375. s->insert--;
  2376. if (s->lookahead + s->insert < MIN_MATCH)
  2377. break;
  2378. }
  2379. }
  2380. /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
  2381. * but this is not important since only literal bytes will be emitted.
  2382. */
  2383. } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
  2384. /* If the WIN_INIT bytes after the end of the current data have never been
  2385. * written, then zero those bytes in order to avoid memory check reports of
  2386. * the use of uninitialized (or uninitialised as Julian writes) bytes by
  2387. * the longest match routines. Update the high water mark for the next
  2388. * time through here. WIN_INIT is set to MAX_MATCH since the longest match
  2389. * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
  2390. */
  2391. if (s->high_water < s->window_size) {
  2392. ulg curr = s->strstart + (ulg)(s->lookahead);
  2393. ulg init;
  2394. if (s->high_water < curr) {
  2395. /* Previous high water mark below current data -- zero WIN_INIT
  2396. * bytes or up to end of window, whichever is less.
  2397. */
  2398. init = s->window_size - curr;
  2399. if (init > WIN_INIT)
  2400. init = WIN_INIT;
  2401. zmemzero(s->window + curr, (unsigned)init);
  2402. s->high_water = curr + init;
  2403. }
  2404. else if (s->high_water < (ulg)curr + WIN_INIT) {
  2405. /* High water mark at or above current data, but below current data
  2406. * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
  2407. * to end of window, whichever is less.
  2408. */
  2409. init = (ulg)curr + WIN_INIT - s->high_water;
  2410. if (init > s->window_size - s->high_water)
  2411. init = s->window_size - s->high_water;
  2412. zmemzero(s->window + s->high_water, (unsigned)init);
  2413. s->high_water += init;
  2414. }
  2415. }
  2416. Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
  2417. "not enough room for search");
  2418. }
  2419. /* ===========================================================================
  2420. * Flush the current block, with given end-of-file flag.
  2421. * IN assertion: strstart is set to the end of the current match.
  2422. */
  2423. #define FLUSH_BLOCK_ONLY(s, last) { \
  2424. _tr_flush_block(s, (s->block_start >= 0L ? \
  2425. (charf *)&s->window[(unsigned)s->block_start] : \
  2426. (charf *)Z_NULL), \
  2427. (ulg)((long)s->strstart - s->block_start), \
  2428. (last)); \
  2429. s->block_start = s->strstart; \
  2430. flush_pending(s->strm); \
  2431. Tracev((stderr,"[FLUSH]")); \
  2432. }
  2433. /* Same but force premature exit if necessary. */
  2434. #define FLUSH_BLOCK(s, last) { \
  2435. FLUSH_BLOCK_ONLY(s, last); \
  2436. if (s->strm->avail_out == 0) return (last) ? finish_started : need_more; \
  2437. }
  2438. /* Maximum stored block length in deflate format (not including header). */
  2439. #define MAX_STORED 65535
  2440. /* Minimum of a and b. */
  2441. #define MIN(a, b) ((a) > (b) ? (b) : (a))
  2442. /* ===========================================================================
  2443. * Copy without compression as much as possible from the input stream, return
  2444. * the current block state.
  2445. *
  2446. * In case deflateParams() is used to later switch to a non-zero compression
  2447. * level, s->matches (otherwise unused when storing) keeps track of the number
  2448. * of hash table slides to perform. If s->matches is 1, then one hash table
  2449. * slide will be done when switching. If s->matches is 2, the maximum value
  2450. * allowed here, then the hash table will be cleared, since two or more slides
  2451. * is the same as a clear.
  2452. *
  2453. * deflate_stored() is written to minimize the number of times an input byte is
  2454. * copied. It is most efficient with large input and output buffers, which
  2455. * maximizes the opportunites to have a single copy from next_in to next_out.
  2456. */
  2457. local block_state deflate_stored(s, flush)
  2458. deflate_state *s;
  2459. int flush;
  2460. {
  2461. /* Smallest worthy block size when not flushing or finishing. By default
  2462. * this is 32K. This can be as small as 507 bytes for memLevel == 1. For
  2463. * large input and output buffers, the stored block size will be larger.
  2464. */
  2465. unsigned min_block = MIN(s->pending_buf_size - 5, s->w_size);
  2466. /* Copy as many min_block or larger stored blocks directly to next_out as
  2467. * possible. If flushing, copy the remaining available input to next_out as
  2468. * stored blocks, if there is enough space.
  2469. */
  2470. unsigned len, left, have, last = 0;
  2471. unsigned used = s->strm->avail_in;
  2472. do {
  2473. /* Set len to the maximum size block that we can copy directly with the
  2474. * available input data and output space. Set left to how much of that
  2475. * would be copied from what's left in the window.
  2476. */
  2477. len = MAX_STORED; /* maximum deflate stored block length */
  2478. have = (s->bi_valid + 42) >> 3; /* number of header bytes */
  2479. if (s->strm->avail_out < have) /* need room for header */
  2480. break;
  2481. /* maximum stored block length that will fit in avail_out: */
  2482. have = s->strm->avail_out - have;
  2483. left = s->strstart - s->block_start; /* bytes left in window */
  2484. if (len > (ulg)left + s->strm->avail_in)
  2485. len = left + s->strm->avail_in; /* limit len to the input */
  2486. if (len > have)
  2487. len = have; /* limit len to the output */
  2488. /* If the stored block would be less than min_block in length, or if
  2489. * unable to copy all of the available input when flushing, then try
  2490. * copying to the window and the pending buffer instead. Also don't
  2491. * write an empty block when flushing -- deflate() does that.
  2492. */
  2493. if (len < min_block && ((len == 0 && flush != Z_FINISH) ||
  2494. flush == Z_NO_FLUSH ||
  2495. len != left + s->strm->avail_in))
  2496. break;
  2497. /* Make a dummy stored block in pending to get the header bytes,
  2498. * including any pending bits. This also updates the debugging counts.
  2499. */
  2500. last = flush == Z_FINISH && len == left + s->strm->avail_in ? 1 : 0;
  2501. _tr_stored_block(s, (char *)0, 0L, last);
  2502. /* Replace the lengths in the dummy stored block with len. */
  2503. s->pending_buf[s->pending - 4] = len;
  2504. s->pending_buf[s->pending - 3] = len >> 8;
  2505. s->pending_buf[s->pending - 2] = ~len;
  2506. s->pending_buf[s->pending - 1] = ~len >> 8;
  2507. /* Write the stored block header bytes. */
  2508. flush_pending(s->strm);
  2509. #ifdef ZLIB_DEBUG
  2510. /* Update debugging counts for the data about to be copied. */
  2511. s->compressed_len += len << 3;
  2512. s->bits_sent += len << 3;
  2513. #endif
  2514. /* Copy uncompressed bytes from the window to next_out. */
  2515. if (left) {
  2516. if (left > len)
  2517. left = len;
  2518. zmemcpy(s->strm->next_out, s->window + s->block_start, left);
  2519. s->strm->next_out += left;
  2520. s->strm->avail_out -= left;
  2521. s->strm->total_out += left;
  2522. s->block_start += left;
  2523. len -= left;
  2524. }
  2525. /* Copy uncompressed bytes directly from next_in to next_out, updating
  2526. * the check value.
  2527. */
  2528. if (len) {
  2529. read_buf(s->strm, s->strm->next_out, len);
  2530. s->strm->next_out += len;
  2531. s->strm->avail_out -= len;
  2532. s->strm->total_out += len;
  2533. }
  2534. } while (last == 0);
  2535. /* Update the sliding window with the last s->w_size bytes of the copied
  2536. * data, or append all of the copied data to the existing window if less
  2537. * than s->w_size bytes were copied. Also update the number of bytes to
  2538. * insert in the hash tables, in the event that deflateParams() switches to
  2539. * a non-zero compression level.
  2540. */
  2541. used -= s->strm->avail_in; /* number of input bytes directly copied */
  2542. if (used) {
  2543. /* If any input was used, then no unused input remains in the window,
  2544. * therefore s->block_start == s->strstart.
  2545. */
  2546. if (used >= s->w_size) { /* supplant the previous history */
  2547. s->matches = 2; /* clear hash */
  2548. zmemcpy(s->window, s->strm->next_in - s->w_size, s->w_size);
  2549. s->strstart = s->w_size;
  2550. }
  2551. else {
  2552. if (s->window_size - s->strstart <= used) {
  2553. /* Slide the window down. */
  2554. s->strstart -= s->w_size;
  2555. zmemcpy(s->window, s->window + s->w_size, s->strstart);
  2556. if (s->matches < 2)
  2557. s->matches++; /* add a pending slide_hash() */
  2558. }
  2559. zmemcpy(s->window + s->strstart, s->strm->next_in - used, used);
  2560. s->strstart += used;
  2561. }
  2562. s->block_start = s->strstart;
  2563. s->insert += MIN(used, s->w_size - s->insert);
  2564. }
  2565. if (s->high_water < s->strstart)
  2566. s->high_water = s->strstart;
  2567. /* If the last block was written to next_out, then done. */
  2568. if (last)
  2569. return finish_done;
  2570. /* If flushing and all input has been consumed, then done. */
  2571. if (flush != Z_NO_FLUSH && flush != Z_FINISH &&
  2572. s->strm->avail_in == 0 && (long)s->strstart == s->block_start)
  2573. return block_done;
  2574. /* Fill the window with any remaining input. */
  2575. have = s->window_size - s->strstart - 1;
  2576. if (s->strm->avail_in > have && s->block_start >= (long)s->w_size) {
  2577. /* Slide the window down. */
  2578. s->block_start -= s->w_size;
  2579. s->strstart -= s->w_size;
  2580. zmemcpy(s->window, s->window + s->w_size, s->strstart);
  2581. if (s->matches < 2)
  2582. s->matches++; /* add a pending slide_hash() */
  2583. have += s->w_size; /* more space now */
  2584. }
  2585. if (have > s->strm->avail_in)
  2586. have = s->strm->avail_in;
  2587. if (have) {
  2588. read_buf(s->strm, s->window + s->strstart, have);
  2589. s->strstart += have;
  2590. }
  2591. if (s->high_water < s->strstart)
  2592. s->high_water = s->strstart;
  2593. /* There was not enough avail_out to write a complete worthy or flushed
  2594. * stored block to next_out. Write a stored block to pending instead, if we
  2595. * have enough input for a worthy block, or if flushing and there is enough
  2596. * room for the remaining input as a stored block in the pending buffer.
  2597. */
  2598. have = (s->bi_valid + 42) >> 3; /* number of header bytes */
  2599. /* maximum stored block length that will fit in pending: */
  2600. have = MIN(s->pending_buf_size - have, MAX_STORED);
  2601. min_block = MIN(have, s->w_size);
  2602. left = s->strstart - s->block_start;
  2603. if (left >= min_block ||
  2604. ((left || flush == Z_FINISH) && flush != Z_NO_FLUSH &&
  2605. s->strm->avail_in == 0 && left <= have)) {
  2606. len = MIN(left, have);
  2607. last = flush == Z_FINISH && s->strm->avail_in == 0 &&
  2608. len == left ? 1 : 0;
  2609. _tr_stored_block(s, (charf *)s->window + s->block_start, len, last);
  2610. s->block_start += len;
  2611. flush_pending(s->strm);
  2612. }
  2613. /* We've done all we can with the available input and output. */
  2614. return last ? finish_started : need_more;
  2615. }
  2616. /* ===========================================================================
  2617. * Compress as much as possible from the input stream, return the current
  2618. * block state.
  2619. * This function does not perform lazy evaluation of matches and inserts
  2620. * new strings in the dictionary only for unmatched strings or for short
  2621. * matches. It is used only for the fast compression options.
  2622. */
  2623. local block_state deflate_fast(s, flush)
  2624. deflate_state *s;
  2625. int flush;
  2626. {
  2627. IPos hash_head; /* head of the hash chain */
  2628. int bflush; /* set if current block must be flushed */
  2629. for (;;) {
  2630. /* Make sure that we always have enough lookahead, except
  2631. * at the end of the input file. We need MAX_MATCH bytes
  2632. * for the next match, plus MIN_MATCH bytes to insert the
  2633. * string following the next match.
  2634. */
  2635. if (s->lookahead < MIN_LOOKAHEAD) {
  2636. fill_window(s);
  2637. if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
  2638. return need_more;
  2639. }
  2640. if (s->lookahead == 0) break; /* flush the current block */
  2641. }
  2642. /* Insert the string window[strstart .. strstart+2] in the
  2643. * dictionary, and set hash_head to the head of the hash chain:
  2644. */
  2645. hash_head = NIL;
  2646. if (s->lookahead >= MIN_MATCH) {
  2647. INSERT_STRING(s, s->strstart, hash_head);
  2648. }
  2649. /* Find the longest match, discarding those <= prev_length.
  2650. * At this point we have always match_length < MIN_MATCH
  2651. */
  2652. if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
  2653. /* To simplify the code, we prevent matches with the string
  2654. * of window index 0 (in particular we have to avoid a match
  2655. * of the string with itself at the start of the input file).
  2656. */
  2657. s->match_length = longest_match (s, hash_head);
  2658. /* longest_match() sets match_start */
  2659. }
  2660. if (s->match_length >= MIN_MATCH) {
  2661. check_match(s, s->strstart, s->match_start, s->match_length);
  2662. _tr_tally_dist(s, s->strstart - s->match_start,
  2663. s->match_length - MIN_MATCH, bflush);
  2664. s->lookahead -= s->match_length;
  2665. /* Insert new strings in the hash table only if the match length
  2666. * is not too large. This saves time but degrades compression.
  2667. */
  2668. #ifndef FASTEST
  2669. if (s->match_length <= s->max_insert_length &&
  2670. s->lookahead >= MIN_MATCH) {
  2671. s->match_length--; /* string at strstart already in table */
  2672. do {
  2673. s->strstart++;
  2674. INSERT_STRING(s, s->strstart, hash_head);
  2675. /* strstart never exceeds WSIZE-MAX_MATCH, so there are
  2676. * always MIN_MATCH bytes ahead.
  2677. */
  2678. } while (--s->match_length != 0);
  2679. s->strstart++;
  2680. } else
  2681. #endif
  2682. {
  2683. s->strstart += s->match_length;
  2684. s->match_length = 0;
  2685. s->ins_h = s->window[s->strstart];
  2686. UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
  2687. #if MIN_MATCH != 3
  2688. Call UPDATE_HASH() MIN_MATCH-3 more times
  2689. #endif
  2690. /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
  2691. * matter since it will be recomputed at next deflate call.
  2692. */
  2693. }
  2694. } else {
  2695. /* No match, output a literal byte */
  2696. Tracevv((stderr,"%c", s->window[s->strstart]));
  2697. _tr_tally_lit (s, s->window[s->strstart], bflush);
  2698. s->lookahead--;
  2699. s->strstart++;
  2700. }
  2701. if (bflush) FLUSH_BLOCK(s, 0);
  2702. }
  2703. s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
  2704. if (flush == Z_FINISH) {
  2705. FLUSH_BLOCK(s, 1);
  2706. return finish_done;
  2707. }
  2708. if (s->last_lit)
  2709. FLUSH_BLOCK(s, 0);
  2710. return block_done;
  2711. }
  2712. #ifndef FASTEST
  2713. /* ===========================================================================
  2714. * Same as above, but achieves better compression. We use a lazy
  2715. * evaluation for matches: a match is finally adopted only if there is
  2716. * no better match at the next window position.
  2717. */
  2718. local block_state deflate_slow(s, flush)
  2719. deflate_state *s;
  2720. int flush;
  2721. {
  2722. IPos hash_head; /* head of hash chain */
  2723. int bflush; /* set if current block must be flushed */
  2724. /* Process the input block. */
  2725. for (;;) {
  2726. /* Make sure that we always have enough lookahead, except
  2727. * at the end of the input file. We need MAX_MATCH bytes
  2728. * for the next match, plus MIN_MATCH bytes to insert the
  2729. * string following the next match.
  2730. */
  2731. if (s->lookahead < MIN_LOOKAHEAD) {
  2732. fill_window(s);
  2733. if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
  2734. return need_more;
  2735. }
  2736. if (s->lookahead == 0) break; /* flush the current block */
  2737. }
  2738. /* Insert the string window[strstart .. strstart+2] in the
  2739. * dictionary, and set hash_head to the head of the hash chain:
  2740. */
  2741. hash_head = NIL;
  2742. if (s->lookahead >= MIN_MATCH) {
  2743. INSERT_STRING(s, s->strstart, hash_head);
  2744. }
  2745. /* Find the longest match, discarding those <= prev_length.
  2746. */
  2747. s->prev_length = s->match_length, s->prev_match = s->match_start;
  2748. s->match_length = MIN_MATCH-1;
  2749. if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
  2750. s->strstart - hash_head <= MAX_DIST(s)) {
  2751. /* To simplify the code, we prevent matches with the string
  2752. * of window index 0 (in particular we have to avoid a match
  2753. * of the string with itself at the start of the input file).
  2754. */
  2755. s->match_length = longest_match (s, hash_head);
  2756. /* longest_match() sets match_start */
  2757. if (s->match_length <= 5 && (s->strategy == Z_FILTERED
  2758. #if TOO_FAR <= 32767
  2759. || (s->match_length == MIN_MATCH &&
  2760. s->strstart - s->match_start > TOO_FAR)
  2761. #endif
  2762. )) {
  2763. /* If prev_match is also MIN_MATCH, match_start is garbage
  2764. * but we will ignore the current match anyway.
  2765. */
  2766. s->match_length = MIN_MATCH-1;
  2767. }
  2768. }
  2769. /* If there was a match at the previous step and the current
  2770. * match is not better, output the previous match:
  2771. */
  2772. if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
  2773. uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
  2774. /* Do not insert strings in hash table beyond this. */
  2775. check_match(s, s->strstart-1, s->prev_match, s->prev_length);
  2776. _tr_tally_dist(s, s->strstart -1 - s->prev_match,
  2777. s->prev_length - MIN_MATCH, bflush);
  2778. /* Insert in hash table all strings up to the end of the match.
  2779. * strstart-1 and strstart are already inserted. If there is not
  2780. * enough lookahead, the last two strings are not inserted in
  2781. * the hash table.
  2782. */
  2783. s->lookahead -= s->prev_length-1;
  2784. s->prev_length -= 2;
  2785. do {
  2786. if (++s->strstart <= max_insert) {
  2787. INSERT_STRING(s, s->strstart, hash_head);
  2788. }
  2789. } while (--s->prev_length != 0);
  2790. s->match_available = 0;
  2791. s->match_length = MIN_MATCH-1;
  2792. s->strstart++;
  2793. if (bflush) FLUSH_BLOCK(s, 0);
  2794. } else if (s->match_available) {
  2795. /* If there was no match at the previous position, output a
  2796. * single literal. If there was a match but the current match
  2797. * is longer, truncate the previous match to a single literal.
  2798. */
  2799. Tracevv((stderr,"%c", s->window[s->strstart-1]));
  2800. _tr_tally_lit(s, s->window[s->strstart-1], bflush);
  2801. if (bflush) {
  2802. FLUSH_BLOCK_ONLY(s, 0);
  2803. }
  2804. s->strstart++;
  2805. s->lookahead--;
  2806. if (s->strm->avail_out == 0) return need_more;
  2807. } else {
  2808. /* There is no previous match to compare with, wait for
  2809. * the next step to decide.
  2810. */
  2811. s->match_available = 1;
  2812. s->strstart++;
  2813. s->lookahead--;
  2814. }
  2815. }
  2816. Assert (flush != Z_NO_FLUSH, "no flush?");
  2817. if (s->match_available) {
  2818. Tracevv((stderr,"%c", s->window[s->strstart-1]));
  2819. _tr_tally_lit(s, s->window[s->strstart-1], bflush);
  2820. s->match_available = 0;
  2821. }
  2822. s->insert = s->strstart < MIN_MATCH-1 ? s->strstart : MIN_MATCH-1;
  2823. if (flush == Z_FINISH) {
  2824. FLUSH_BLOCK(s, 1);
  2825. return finish_done;
  2826. }
  2827. if (s->last_lit)
  2828. FLUSH_BLOCK(s, 0);
  2829. return block_done;
  2830. }
  2831. #endif /* FASTEST */
  2832. /* ===========================================================================
  2833. * For Z_RLE, simply look for runs of bytes, generate matches only of distance
  2834. * one. Do not maintain a hash table. (It will be regenerated if this run of
  2835. * deflate switches away from Z_RLE.)
  2836. */
  2837. local block_state deflate_rle(s, flush)
  2838. deflate_state *s;
  2839. int flush;
  2840. {
  2841. int bflush; /* set if current block must be flushed */
  2842. uInt prev; /* byte at distance one to match */
  2843. Bytef *scan, *strend; /* scan goes up to strend for length of run */
  2844. for (;;) {
  2845. /* Make sure that we always have enough lookahead, except
  2846. * at the end of the input file. We need MAX_MATCH bytes
  2847. * for the longest run, plus one for the unrolled loop.
  2848. */
  2849. if (s->lookahead <= MAX_MATCH) {
  2850. fill_window(s);
  2851. if (s->lookahead <= MAX_MATCH && flush == Z_NO_FLUSH) {
  2852. return need_more;
  2853. }
  2854. if (s->lookahead == 0) break; /* flush the current block */
  2855. }
  2856. /* See how many times the previous byte repeats */
  2857. s->match_length = 0;
  2858. if (s->lookahead >= MIN_MATCH && s->strstart > 0) {
  2859. scan = s->window + s->strstart - 1;
  2860. prev = *scan;
  2861. if (prev == *++scan && prev == *++scan && prev == *++scan) {
  2862. strend = s->window + s->strstart + MAX_MATCH;
  2863. do {
  2864. } while (prev == *++scan && prev == *++scan &&
  2865. prev == *++scan && prev == *++scan &&
  2866. prev == *++scan && prev == *++scan &&
  2867. prev == *++scan && prev == *++scan &&
  2868. scan < strend);
  2869. s->match_length = MAX_MATCH - (uInt)(strend - scan);
  2870. if (s->match_length > s->lookahead)
  2871. s->match_length = s->lookahead;
  2872. }
  2873. Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");
  2874. }
  2875. /* Emit match if have run of MIN_MATCH or longer, else emit literal */
  2876. if (s->match_length >= MIN_MATCH) {
  2877. check_match(s, s->strstart, s->strstart - 1, s->match_length);
  2878. _tr_tally_dist(s, 1, s->match_length - MIN_MATCH, bflush);
  2879. s->lookahead -= s->match_length;
  2880. s->strstart += s->match_length;
  2881. s->match_length = 0;
  2882. } else {
  2883. /* No match, output a literal byte */
  2884. Tracevv((stderr,"%c", s->window[s->strstart]));
  2885. _tr_tally_lit (s, s->window[s->strstart], bflush);
  2886. s->lookahead--;
  2887. s->strstart++;
  2888. }
  2889. if (bflush) FLUSH_BLOCK(s, 0);
  2890. }
  2891. s->insert = 0;
  2892. if (flush == Z_FINISH) {
  2893. FLUSH_BLOCK(s, 1);
  2894. return finish_done;
  2895. }
  2896. if (s->last_lit)
  2897. FLUSH_BLOCK(s, 0);
  2898. return block_done;
  2899. }
  2900. /* ===========================================================================
  2901. * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table.
  2902. * (It will be regenerated if this run of deflate switches away from Huffman.)
  2903. */
  2904. local block_state deflate_huff(s, flush)
  2905. deflate_state *s;
  2906. int flush;
  2907. {
  2908. int bflush; /* set if current block must be flushed */
  2909. for (;;) {
  2910. /* Make sure that we have a literal to write. */
  2911. if (s->lookahead == 0) {
  2912. fill_window(s);
  2913. if (s->lookahead == 0) {
  2914. if (flush == Z_NO_FLUSH)
  2915. return need_more;
  2916. break; /* flush the current block */
  2917. }
  2918. }
  2919. /* Output a literal byte */
  2920. s->match_length = 0;
  2921. Tracevv((stderr,"%c", s->window[s->strstart]));
  2922. _tr_tally_lit (s, s->window[s->strstart], bflush);
  2923. s->lookahead--;
  2924. s->strstart++;
  2925. if (bflush) FLUSH_BLOCK(s, 0);
  2926. }
  2927. s->insert = 0;
  2928. if (flush == Z_FINISH) {
  2929. FLUSH_BLOCK(s, 1);
  2930. return finish_done;
  2931. }
  2932. if (s->last_lit)
  2933. FLUSH_BLOCK(s, 0);
  2934. return block_done;
  2935. }
  2936. /* trees.c -- output deflated data using Huffman coding
  2937. * Copyright (C) 1995-2017 Jean-loup Gailly
  2938. * detect_data_type() function provided freely by Cosmin Truta, 2006
  2939. * For conditions of distribution and use, see copyright notice in zlib.h
  2940. */
  2941. /*
  2942. * ALGORITHM
  2943. *
  2944. * The "deflation" process uses several Huffman trees. The more
  2945. * common source values are represented by shorter bit sequences.
  2946. *
  2947. * Each code tree is stored in a compressed form which is itself
  2948. * a Huffman encoding of the lengths of all the code strings (in
  2949. * ascending order by source values). The actual code strings are
  2950. * reconstructed from the lengths in the inflate process, as described
  2951. * in the deflate specification.
  2952. *
  2953. * REFERENCES
  2954. *
  2955. * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
  2956. * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
  2957. *
  2958. * Storer, James A.
  2959. * Data Compression: Methods and Theory, pp. 49-50.
  2960. * Computer Science Press, 1988. ISBN 0-7167-8156-5.
  2961. *
  2962. * Sedgewick, R.
  2963. * Algorithms, p290.
  2964. * Addison-Wesley, 1983. ISBN 0-201-06672-6.
  2965. */
  2966. /* @(#) $Id$ */
  2967. /* #define GEN_TREES_H */
  2968. #ifdef ZLIB_DEBUG
  2969. # include <ctype.h>
  2970. #endif
  2971. /* ===========================================================================
  2972. * Constants
  2973. */
  2974. #define MAX_BL_BITS 7
  2975. /* Bit length codes must not exceed MAX_BL_BITS bits */
  2976. #define END_BLOCK 256
  2977. /* end of block literal code */
  2978. #define REP_3_6 16
  2979. /* repeat previous bit length 3-6 times (2 bits of repeat count) */
  2980. #define REPZ_3_10 17
  2981. /* repeat a zero length 3-10 times (3 bits of repeat count) */
  2982. #define REPZ_11_138 18
  2983. /* repeat a zero length 11-138 times (7 bits of repeat count) */
  2984. local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
  2985. = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
  2986. local const int extra_dbits[D_CODES] /* extra bits for each distance code */
  2987. = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
  2988. local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
  2989. = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
  2990. local const uch bl_order[BL_CODES]
  2991. = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
  2992. /* The lengths of the bit length codes are sent in order of decreasing
  2993. * probability, to avoid transmitting the lengths for unused bit length codes.
  2994. */
  2995. /* ===========================================================================
  2996. * Local data. These are initialized only once.
  2997. */
  2998. #define DIST_CODE_LEN 512 /* see definition of array dist_code below */
  2999. #if defined(GEN_TREES_H) || !defined(STDC)
  3000. /* non ANSI compilers may not accept trees.h */
  3001. local ct_data static_ltree[L_CODES+2];
  3002. /* The static literal tree. Since the bit lengths are imposed, there is no
  3003. * need for the L_CODES extra codes used during heap construction. However
  3004. * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
  3005. * below).
  3006. */
  3007. local ct_data static_dtree[D_CODES];
  3008. /* The static distance tree. (Actually a trivial tree since all codes use
  3009. * 5 bits.)
  3010. */
  3011. uch _dist_code[DIST_CODE_LEN];
  3012. /* Distance codes. The first 256 values correspond to the distances
  3013. * 3 .. 258, the last 256 values correspond to the top 8 bits of
  3014. * the 15 bit distances.
  3015. */
  3016. uch _length_code[MAX_MATCH-MIN_MATCH+1];
  3017. /* length code for each normalized match length (0 == MIN_MATCH) */
  3018. local int base_length[LENGTH_CODES];
  3019. /* First normalized length for each code (0 = MIN_MATCH) */
  3020. local int base_dist[D_CODES];
  3021. /* First normalized distance for each code (0 = distance of 1) */
  3022. #else
  3023. /* header created automatically with -DGEN_TREES_H */
  3024. local const ct_data static_ltree[L_CODES+2] = {
  3025. {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}},
  3026. {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}},
  3027. {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}},
  3028. {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}},
  3029. {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}},
  3030. {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}},
  3031. {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}},
  3032. {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}},
  3033. {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}},
  3034. {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}},
  3035. {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}},
  3036. {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}},
  3037. {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}},
  3038. {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}},
  3039. {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}},
  3040. {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}},
  3041. {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}},
  3042. {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}},
  3043. {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}},
  3044. {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}},
  3045. {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}},
  3046. {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}},
  3047. {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}},
  3048. {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}},
  3049. {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}},
  3050. {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}},
  3051. {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}},
  3052. {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}},
  3053. {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}},
  3054. {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}},
  3055. {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}},
  3056. {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}},
  3057. {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}},
  3058. {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}},
  3059. {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}},
  3060. {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}},
  3061. {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}},
  3062. {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}},
  3063. {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}},
  3064. {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}},
  3065. {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}},
  3066. {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}},
  3067. {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}},
  3068. {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}},
  3069. {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}},
  3070. {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}},
  3071. {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}},
  3072. {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}},
  3073. {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}},
  3074. {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}},
  3075. {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}},
  3076. {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}},
  3077. {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}},
  3078. {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}},
  3079. {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}},
  3080. {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}},
  3081. {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}},
  3082. {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}}
  3083. };
  3084. local const ct_data static_dtree[D_CODES] = {
  3085. {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
  3086. {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
  3087. {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
  3088. {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
  3089. {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
  3090. {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
  3091. };
  3092. const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {
  3093. 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8,
  3094. 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
  3095. 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
  3096. 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
  3097. 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
  3098. 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
  3099. 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
  3100. 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
  3101. 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
  3102. 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
  3103. 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
  3104. 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
  3105. 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17,
  3106. 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
  3107. 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
  3108. 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
  3109. 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
  3110. 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
  3111. 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
  3112. 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
  3113. 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
  3114. 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
  3115. 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
  3116. 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
  3117. 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
  3118. 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
  3119. };
  3120. const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {
  3121. 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12,
  3122. 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
  3123. 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
  3124. 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
  3125. 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
  3126. 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
  3127. 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
  3128. 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
  3129. 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
  3130. 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
  3131. 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
  3132. 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
  3133. 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
  3134. };
  3135. local const int base_length[LENGTH_CODES] = {
  3136. 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
  3137. 64, 80, 96, 112, 128, 160, 192, 224, 0
  3138. };
  3139. local const int base_dist[D_CODES] = {
  3140. 0, 1, 2, 3, 4, 6, 8, 12, 16, 24,
  3141. 32, 48, 64, 96, 128, 192, 256, 384, 512, 768,
  3142. 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576
  3143. };
  3144. #endif /* GEN_TREES_H */
  3145. struct static_tree_desc_s {
  3146. const ct_data *static_tree; /* static tree or NULL */
  3147. const intf *extra_bits; /* extra bits for each code or NULL */
  3148. int extra_base; /* base index for extra_bits */
  3149. int elems; /* max number of elements in the tree */
  3150. int max_length; /* max bit length for the codes */
  3151. };
  3152. local const static_tree_desc static_l_desc =
  3153. {static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
  3154. local const static_tree_desc static_d_desc =
  3155. {static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
  3156. local const static_tree_desc static_bl_desc =
  3157. {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
  3158. /* ===========================================================================
  3159. * Local (static) routines in this file.
  3160. */
  3161. local void tr_static_init OF((void));
  3162. local void init_block OF((deflate_state *s));
  3163. local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
  3164. local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
  3165. local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));
  3166. local void build_tree OF((deflate_state *s, tree_desc *desc));
  3167. local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
  3168. local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
  3169. local int build_bl_tree OF((deflate_state *s));
  3170. local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
  3171. int blcodes));
  3172. local void compress_block OF((deflate_state *s, const ct_data *ltree,
  3173. const ct_data *dtree));
  3174. local int detect_data_type OF((deflate_state *s));
  3175. local unsigned bi_reverse OF((unsigned value, int length));
  3176. local void bi_windup OF((deflate_state *s));
  3177. local void bi_flush OF((deflate_state *s));
  3178. #ifdef GEN_TREES_H
  3179. local void gen_trees_header OF((void));
  3180. #endif
  3181. #ifndef ZLIB_DEBUG
  3182. # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
  3183. /* Send a code of the given tree. c and tree must not have side effects */
  3184. #else /* !ZLIB_DEBUG */
  3185. # define send_code(s, c, tree) \
  3186. { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
  3187. send_bits(s, tree[c].Code, tree[c].Len); }
  3188. #endif
  3189. /* ===========================================================================
  3190. * Output a short LSB first on the stream.
  3191. * IN assertion: there is enough room in pendingBuf.
  3192. */
  3193. #define put_short(s, w) { \
  3194. put_byte(s, (uch)((w) & 0xff)); \
  3195. put_byte(s, (uch)((ush)(w) >> 8)); \
  3196. }
  3197. /* ===========================================================================
  3198. * Send a value on a given number of bits.
  3199. * IN assertion: length <= 16 and value fits in length bits.
  3200. */
  3201. #ifdef ZLIB_DEBUG
  3202. local void send_bits OF((deflate_state *s, int value, int length));
  3203. local void send_bits(s, value, length)
  3204. deflate_state *s;
  3205. int value; /* value to send */
  3206. int length; /* number of bits */
  3207. {
  3208. Tracevv((stderr," l %2d v %4x ", length, value));
  3209. Assert(length > 0 && length <= 15, "invalid length");
  3210. s->bits_sent += (ulg)length;
  3211. /* If not enough room in bi_buf, use (valid) bits from bi_buf and
  3212. * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
  3213. * unused bits in value.
  3214. */
  3215. if (s->bi_valid > (int)Buf_size - length) {
  3216. s->bi_buf |= (ush)value << s->bi_valid;
  3217. put_short(s, s->bi_buf);
  3218. s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
  3219. s->bi_valid += length - Buf_size;
  3220. } else {
  3221. s->bi_buf |= (ush)value << s->bi_valid;
  3222. s->bi_valid += length;
  3223. }
  3224. }
  3225. #else /* !ZLIB_DEBUG */
  3226. #define send_bits(s, value, length) \
  3227. { int len = length;\
  3228. if (s->bi_valid > (int)Buf_size - len) {\
  3229. int val = (int)value;\
  3230. s->bi_buf |= (ush)val << s->bi_valid;\
  3231. put_short(s, s->bi_buf);\
  3232. s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
  3233. s->bi_valid += len - Buf_size;\
  3234. } else {\
  3235. s->bi_buf |= (ush)(value) << s->bi_valid;\
  3236. s->bi_valid += len;\
  3237. }\
  3238. }
  3239. #endif /* ZLIB_DEBUG */
  3240. /* the arguments must not have side effects */
  3241. /* ===========================================================================
  3242. * Initialize the various 'constant' tables.
  3243. */
  3244. local void tr_static_init()
  3245. {
  3246. #if defined(GEN_TREES_H) || !defined(STDC)
  3247. static int static_init_done = 0;
  3248. int n; /* iterates over tree elements */
  3249. int bits; /* bit counter */
  3250. int length; /* length value */
  3251. int code; /* code value */
  3252. int dist; /* distance index */
  3253. ush bl_count[MAX_BITS+1];
  3254. /* number of codes at each bit length for an optimal tree */
  3255. if (static_init_done) return;
  3256. /* For some embedded targets, global variables are not initialized: */
  3257. #ifdef NO_INIT_GLOBAL_POINTERS
  3258. static_l_desc.static_tree = static_ltree;
  3259. static_l_desc.extra_bits = extra_lbits;
  3260. static_d_desc.static_tree = static_dtree;
  3261. static_d_desc.extra_bits = extra_dbits;
  3262. static_bl_desc.extra_bits = extra_blbits;
  3263. #endif
  3264. /* Initialize the mapping length (0..255) -> length code (0..28) */
  3265. length = 0;
  3266. for (code = 0; code < LENGTH_CODES-1; code++) {
  3267. base_length[code] = length;
  3268. for (n = 0; n < (1<<extra_lbits[code]); n++) {
  3269. _length_code[length++] = (uch)code;
  3270. }
  3271. }
  3272. Assert (length == 256, "tr_static_init: length != 256");
  3273. /* Note that the length 255 (match length 258) can be represented
  3274. * in two different ways: code 284 + 5 bits or code 285, so we
  3275. * overwrite length_code[255] to use the best encoding:
  3276. */
  3277. _length_code[length-1] = (uch)code;
  3278. /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
  3279. dist = 0;
  3280. for (code = 0 ; code < 16; code++) {
  3281. base_dist[code] = dist;
  3282. for (n = 0; n < (1<<extra_dbits[code]); n++) {
  3283. _dist_code[dist++] = (uch)code;
  3284. }
  3285. }
  3286. Assert (dist == 256, "tr_static_init: dist != 256");
  3287. dist >>= 7; /* from now on, all distances are divided by 128 */
  3288. for ( ; code < D_CODES; code++) {
  3289. base_dist[code] = dist << 7;
  3290. for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
  3291. _dist_code[256 + dist++] = (uch)code;
  3292. }
  3293. }
  3294. Assert (dist == 256, "tr_static_init: 256+dist != 512");
  3295. /* Construct the codes of the static literal tree */
  3296. for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
  3297. n = 0;
  3298. while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
  3299. while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
  3300. while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
  3301. while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
  3302. /* Codes 286 and 287 do not exist, but we must include them in the
  3303. * tree construction to get a canonical Huffman tree (longest code
  3304. * all ones)
  3305. */
  3306. gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
  3307. /* The static distance tree is trivial: */
  3308. for (n = 0; n < D_CODES; n++) {
  3309. static_dtree[n].Len = 5;
  3310. static_dtree[n].Code = bi_reverse((unsigned)n, 5);
  3311. }
  3312. static_init_done = 1;
  3313. # ifdef GEN_TREES_H
  3314. gen_trees_header();
  3315. # endif
  3316. #endif /* defined(GEN_TREES_H) || !defined(STDC) */
  3317. }
  3318. /* ===========================================================================
  3319. * Genererate the file trees.h describing the static trees.
  3320. */
  3321. #ifdef GEN_TREES_H
  3322. # ifndef ZLIB_DEBUG
  3323. # include <stdio.h>
  3324. # endif
  3325. # define SEPARATOR(i, last, width) \
  3326. ((i) == (last)? "\n};\n\n" : \
  3327. ((i) % (width) == (width)-1 ? ",\n" : ", "))
  3328. void gen_trees_header()
  3329. {
  3330. FILE *header = fopen("trees.h", "w");
  3331. int i;
  3332. Assert (header != NULL, "Can't open trees.h");
  3333. fprintf(header,
  3334. "/* header created automatically with -DGEN_TREES_H */\n\n");
  3335. fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n");
  3336. for (i = 0; i < L_CODES+2; i++) {
  3337. fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code,
  3338. static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));
  3339. }
  3340. fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n");
  3341. for (i = 0; i < D_CODES; i++) {
  3342. fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code,
  3343. static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));
  3344. }
  3345. fprintf(header, "const uch ZLIB_INTERNAL _dist_code[DIST_CODE_LEN] = {\n");
  3346. for (i = 0; i < DIST_CODE_LEN; i++) {
  3347. fprintf(header, "%2u%s", _dist_code[i],
  3348. SEPARATOR(i, DIST_CODE_LEN-1, 20));
  3349. }
  3350. fprintf(header,
  3351. "const uch ZLIB_INTERNAL _length_code[MAX_MATCH-MIN_MATCH+1]= {\n");
  3352. for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {
  3353. fprintf(header, "%2u%s", _length_code[i],
  3354. SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));
  3355. }
  3356. fprintf(header, "local const int base_length[LENGTH_CODES] = {\n");
  3357. for (i = 0; i < LENGTH_CODES; i++) {
  3358. fprintf(header, "%1u%s", base_length[i],
  3359. SEPARATOR(i, LENGTH_CODES-1, 20));
  3360. }
  3361. fprintf(header, "local const int base_dist[D_CODES] = {\n");
  3362. for (i = 0; i < D_CODES; i++) {
  3363. fprintf(header, "%5u%s", base_dist[i],
  3364. SEPARATOR(i, D_CODES-1, 10));
  3365. }
  3366. fclose(header);
  3367. }
  3368. #endif /* GEN_TREES_H */
  3369. /* ===========================================================================
  3370. * Initialize the tree data structures for a new zlib stream.
  3371. */
  3372. void ZLIB_INTERNAL _tr_init(s)
  3373. deflate_state *s;
  3374. {
  3375. tr_static_init();
  3376. s->l_desc.dyn_tree = s->dyn_ltree;
  3377. s->l_desc.stat_desc = &static_l_desc;
  3378. s->d_desc.dyn_tree = s->dyn_dtree;
  3379. s->d_desc.stat_desc = &static_d_desc;
  3380. s->bl_desc.dyn_tree = s->bl_tree;
  3381. s->bl_desc.stat_desc = &static_bl_desc;
  3382. s->bi_buf = 0;
  3383. s->bi_valid = 0;
  3384. #ifdef ZLIB_DEBUG
  3385. s->compressed_len = 0L;
  3386. s->bits_sent = 0L;
  3387. #endif
  3388. /* Initialize the first block of the first file: */
  3389. init_block(s);
  3390. }
  3391. /* ===========================================================================
  3392. * Initialize a new block.
  3393. */
  3394. local void init_block(s)
  3395. deflate_state *s;
  3396. {
  3397. int n; /* iterates over tree elements */
  3398. /* Initialize the trees. */
  3399. for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
  3400. for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
  3401. for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
  3402. s->dyn_ltree[END_BLOCK].Freq = 1;
  3403. s->opt_len = s->static_len = 0L;
  3404. s->last_lit = s->matches = 0;
  3405. }
  3406. #define SMALLEST 1
  3407. /* Index within the heap array of least frequent node in the Huffman tree */
  3408. /* ===========================================================================
  3409. * Remove the smallest element from the heap and recreate the heap with
  3410. * one less element. Updates heap and heap_len.
  3411. */
  3412. #define pqremove(s, tree, top) \
  3413. {\
  3414. top = s->heap[SMALLEST]; \
  3415. s->heap[SMALLEST] = s->heap[s->heap_len--]; \
  3416. pqdownheap(s, tree, SMALLEST); \
  3417. }
  3418. /* ===========================================================================
  3419. * Compares to subtrees, using the tree depth as tie breaker when
  3420. * the subtrees have equal frequency. This minimizes the worst case length.
  3421. */
  3422. #define smaller(tree, n, m, depth) \
  3423. (tree[n].Freq < tree[m].Freq || \
  3424. (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
  3425. /* ===========================================================================
  3426. * Restore the heap property by moving down the tree starting at node k,
  3427. * exchanging a node with the smallest of its two sons if necessary, stopping
  3428. * when the heap property is re-established (each father smaller than its
  3429. * two sons).
  3430. */
  3431. local void pqdownheap(s, tree, k)
  3432. deflate_state *s;
  3433. ct_data *tree; /* the tree to restore */
  3434. int k; /* node to move down */
  3435. {
  3436. int v = s->heap[k];
  3437. int j = k << 1; /* left son of k */
  3438. while (j <= s->heap_len) {
  3439. /* Set j to the smallest of the two sons: */
  3440. if (j < s->heap_len &&
  3441. smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
  3442. j++;
  3443. }
  3444. /* Exit if v is smaller than both sons */
  3445. if (smaller(tree, v, s->heap[j], s->depth)) break;
  3446. /* Exchange v with the smallest son */
  3447. s->heap[k] = s->heap[j]; k = j;
  3448. /* And continue down the tree, setting j to the left son of k */
  3449. j <<= 1;
  3450. }
  3451. s->heap[k] = v;
  3452. }
  3453. /* ===========================================================================
  3454. * Compute the optimal bit lengths for a tree and update the total bit length
  3455. * for the current block.
  3456. * IN assertion: the fields freq and dad are set, heap[heap_max] and
  3457. * above are the tree nodes sorted by increasing frequency.
  3458. * OUT assertions: the field len is set to the optimal bit length, the
  3459. * array bl_count contains the frequencies for each bit length.
  3460. * The length opt_len is updated; static_len is also updated if stree is
  3461. * not null.
  3462. */
  3463. local void gen_bitlen(s, desc)
  3464. deflate_state *s;
  3465. tree_desc *desc; /* the tree descriptor */
  3466. {
  3467. ct_data *tree = desc->dyn_tree;
  3468. int max_code = desc->max_code;
  3469. const ct_data *stree = desc->stat_desc->static_tree;
  3470. const intf *extra = desc->stat_desc->extra_bits;
  3471. int base = desc->stat_desc->extra_base;
  3472. int max_length = desc->stat_desc->max_length;
  3473. int h; /* heap index */
  3474. int n, m; /* iterate over the tree elements */
  3475. int bits; /* bit length */
  3476. int xbits; /* extra bits */
  3477. ush f; /* frequency */
  3478. int overflow = 0; /* number of elements with bit length too large */
  3479. for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
  3480. /* In a first pass, compute the optimal bit lengths (which may
  3481. * overflow in the case of the bit length tree).
  3482. */
  3483. tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
  3484. for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
  3485. n = s->heap[h];
  3486. bits = tree[tree[n].Dad].Len + 1;
  3487. if (bits > max_length) bits = max_length, overflow++;
  3488. tree[n].Len = (ush)bits;
  3489. /* We overwrite tree[n].Dad which is no longer needed */
  3490. if (n > max_code) continue; /* not a leaf node */
  3491. s->bl_count[bits]++;
  3492. xbits = 0;
  3493. if (n >= base) xbits = extra[n-base];
  3494. f = tree[n].Freq;
  3495. s->opt_len += (ulg)f * (unsigned)(bits + xbits);
  3496. if (stree) s->static_len += (ulg)f * (unsigned)(stree[n].Len + xbits);
  3497. }
  3498. if (overflow == 0) return;
  3499. Tracev((stderr,"\nbit length overflow\n"));
  3500. /* This happens for example on obj2 and pic of the Calgary corpus */
  3501. /* Find the first bit length which could increase: */
  3502. do {
  3503. bits = max_length-1;
  3504. while (s->bl_count[bits] == 0) bits--;
  3505. s->bl_count[bits]--; /* move one leaf down the tree */
  3506. s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
  3507. s->bl_count[max_length]--;
  3508. /* The brother of the overflow item also moves one step up,
  3509. * but this does not affect bl_count[max_length]
  3510. */
  3511. overflow -= 2;
  3512. } while (overflow > 0);
  3513. /* Now recompute all bit lengths, scanning in increasing frequency.
  3514. * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
  3515. * lengths instead of fixing only the wrong ones. This idea is taken
  3516. * from 'ar' written by Haruhiko Okumura.)
  3517. */
  3518. for (bits = max_length; bits != 0; bits--) {
  3519. n = s->bl_count[bits];
  3520. while (n != 0) {
  3521. m = s->heap[--h];
  3522. if (m > max_code) continue;
  3523. if ((unsigned) tree[m].Len != (unsigned) bits) {
  3524. Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
  3525. s->opt_len += ((ulg)bits - tree[m].Len) * tree[m].Freq;
  3526. tree[m].Len = (ush)bits;
  3527. }
  3528. n--;
  3529. }
  3530. }
  3531. }
  3532. /* ===========================================================================
  3533. * Generate the codes for a given tree and bit counts (which need not be
  3534. * optimal).
  3535. * IN assertion: the array bl_count contains the bit length statistics for
  3536. * the given tree and the field len is set for all tree elements.
  3537. * OUT assertion: the field code is set for all tree elements of non
  3538. * zero code length.
  3539. */
  3540. local void gen_codes (tree, max_code, bl_count)
  3541. ct_data *tree; /* the tree to decorate */
  3542. int max_code; /* largest code with non zero frequency */
  3543. ushf *bl_count; /* number of codes at each bit length */
  3544. {
  3545. ush next_code[MAX_BITS+1]; /* next code value for each bit length */
  3546. unsigned code = 0; /* running code value */
  3547. int bits; /* bit index */
  3548. int n; /* code index */
  3549. /* The distribution counts are first used to generate the code values
  3550. * without bit reversal.
  3551. */
  3552. for (bits = 1; bits <= MAX_BITS; bits++) {
  3553. code = (code + bl_count[bits-1]) << 1;
  3554. next_code[bits] = (ush)code;
  3555. }
  3556. /* Check that the bit counts in bl_count are consistent. The last code
  3557. * must be all ones.
  3558. */
  3559. Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
  3560. "inconsistent bit counts");
  3561. Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
  3562. for (n = 0; n <= max_code; n++) {
  3563. int len = tree[n].Len;
  3564. if (len == 0) continue;
  3565. /* Now reverse the bits */
  3566. tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
  3567. Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
  3568. n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
  3569. }
  3570. }
  3571. /* ===========================================================================
  3572. * Construct one Huffman tree and assigns the code bit strings and lengths.
  3573. * Update the total bit length for the current block.
  3574. * IN assertion: the field freq is set for all tree elements.
  3575. * OUT assertions: the fields len and code are set to the optimal bit length
  3576. * and corresponding code. The length opt_len is updated; static_len is
  3577. * also updated if stree is not null. The field max_code is set.
  3578. */
  3579. local void build_tree(s, desc)
  3580. deflate_state *s;
  3581. tree_desc *desc; /* the tree descriptor */
  3582. {
  3583. ct_data *tree = desc->dyn_tree;
  3584. const ct_data *stree = desc->stat_desc->static_tree;
  3585. int elems = desc->stat_desc->elems;
  3586. int n, m; /* iterate over heap elements */
  3587. int max_code = -1; /* largest code with non zero frequency */
  3588. int node; /* new node being created */
  3589. /* Construct the initial heap, with least frequent element in
  3590. * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
  3591. * heap[0] is not used.
  3592. */
  3593. s->heap_len = 0, s->heap_max = HEAP_SIZE;
  3594. for (n = 0; n < elems; n++) {
  3595. if (tree[n].Freq != 0) {
  3596. s->heap[++(s->heap_len)] = max_code = n;
  3597. s->depth[n] = 0;
  3598. } else {
  3599. tree[n].Len = 0;
  3600. }
  3601. }
  3602. /* The pkzip format requires that at least one distance code exists,
  3603. * and that at least one bit should be sent even if there is only one
  3604. * possible code. So to avoid special checks later on we force at least
  3605. * two codes of non zero frequency.
  3606. */
  3607. while (s->heap_len < 2) {
  3608. node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
  3609. tree[node].Freq = 1;
  3610. s->depth[node] = 0;
  3611. s->opt_len--; if (stree) s->static_len -= stree[node].Len;
  3612. /* node is 0 or 1 so it does not have extra bits */
  3613. }
  3614. desc->max_code = max_code;
  3615. /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
  3616. * establish sub-heaps of increasing lengths:
  3617. */
  3618. for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
  3619. /* Construct the Huffman tree by repeatedly combining the least two
  3620. * frequent nodes.
  3621. */
  3622. node = elems; /* next internal node of the tree */
  3623. do {
  3624. pqremove(s, tree, n); /* n = node of least frequency */
  3625. m = s->heap[SMALLEST]; /* m = node of next least frequency */
  3626. s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
  3627. s->heap[--(s->heap_max)] = m;
  3628. /* Create a new node father of n and m */
  3629. tree[node].Freq = tree[n].Freq + tree[m].Freq;
  3630. s->depth[node] = (uch)((s->depth[n] >= s->depth[m] ?
  3631. s->depth[n] : s->depth[m]) + 1);
  3632. tree[n].Dad = tree[m].Dad = (ush)node;
  3633. #ifdef DUMP_BL_TREE
  3634. if (tree == s->bl_tree) {
  3635. fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
  3636. node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
  3637. }
  3638. #endif
  3639. /* and insert the new node in the heap */
  3640. s->heap[SMALLEST] = node++;
  3641. pqdownheap(s, tree, SMALLEST);
  3642. } while (s->heap_len >= 2);
  3643. s->heap[--(s->heap_max)] = s->heap[SMALLEST];
  3644. /* At this point, the fields freq and dad are set. We can now
  3645. * generate the bit lengths.
  3646. */
  3647. gen_bitlen(s, (tree_desc *)desc);
  3648. /* The field len is now set, we can generate the bit codes */
  3649. gen_codes ((ct_data *)tree, max_code, s->bl_count);
  3650. }
  3651. /* ===========================================================================
  3652. * Scan a literal or distance tree to determine the frequencies of the codes
  3653. * in the bit length tree.
  3654. */
  3655. local void scan_tree (s, tree, max_code)
  3656. deflate_state *s;
  3657. ct_data *tree; /* the tree to be scanned */
  3658. int max_code; /* and its largest code of non zero frequency */
  3659. {
  3660. int n; /* iterates over all tree elements */
  3661. int prevlen = -1; /* last emitted length */
  3662. int curlen; /* length of current code */
  3663. int nextlen = tree[0].Len; /* length of next code */
  3664. int count = 0; /* repeat count of the current code */
  3665. int max_count = 7; /* max repeat count */
  3666. int min_count = 4; /* min repeat count */
  3667. if (nextlen == 0) max_count = 138, min_count = 3;
  3668. tree[max_code+1].Len = (ush)0xffff; /* guard */
  3669. for (n = 0; n <= max_code; n++) {
  3670. curlen = nextlen; nextlen = tree[n+1].Len;
  3671. if (++count < max_count && curlen == nextlen) {
  3672. continue;
  3673. } else if (count < min_count) {
  3674. s->bl_tree[curlen].Freq += count;
  3675. } else if (curlen != 0) {
  3676. if (curlen != prevlen) s->bl_tree[curlen].Freq++;
  3677. s->bl_tree[REP_3_6].Freq++;
  3678. } else if (count <= 10) {
  3679. s->bl_tree[REPZ_3_10].Freq++;
  3680. } else {
  3681. s->bl_tree[REPZ_11_138].Freq++;
  3682. }
  3683. count = 0; prevlen = curlen;
  3684. if (nextlen == 0) {
  3685. max_count = 138, min_count = 3;
  3686. } else if (curlen == nextlen) {
  3687. max_count = 6, min_count = 3;
  3688. } else {
  3689. max_count = 7, min_count = 4;
  3690. }
  3691. }
  3692. }
  3693. /* ===========================================================================
  3694. * Send a literal or distance tree in compressed form, using the codes in
  3695. * bl_tree.
  3696. */
  3697. local void send_tree (s, tree, max_code)
  3698. deflate_state *s;
  3699. ct_data *tree; /* the tree to be scanned */
  3700. int max_code; /* and its largest code of non zero frequency */
  3701. {
  3702. int n; /* iterates over all tree elements */
  3703. int prevlen = -1; /* last emitted length */
  3704. int curlen; /* length of current code */
  3705. int nextlen = tree[0].Len; /* length of next code */
  3706. int count = 0; /* repeat count of the current code */
  3707. int max_count = 7; /* max repeat count */
  3708. int min_count = 4; /* min repeat count */
  3709. /* tree[max_code+1].Len = -1; */ /* guard already set */
  3710. if (nextlen == 0) max_count = 138, min_count = 3;
  3711. for (n = 0; n <= max_code; n++) {
  3712. curlen = nextlen; nextlen = tree[n+1].Len;
  3713. if (++count < max_count && curlen == nextlen) {
  3714. continue;
  3715. } else if (count < min_count) {
  3716. do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
  3717. } else if (curlen != 0) {
  3718. if (curlen != prevlen) {
  3719. send_code(s, curlen, s->bl_tree); count--;
  3720. }
  3721. Assert(count >= 3 && count <= 6, " 3_6?");
  3722. send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
  3723. } else if (count <= 10) {
  3724. send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
  3725. } else {
  3726. send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
  3727. }
  3728. count = 0; prevlen = curlen;
  3729. if (nextlen == 0) {
  3730. max_count = 138, min_count = 3;
  3731. } else if (curlen == nextlen) {
  3732. max_count = 6, min_count = 3;
  3733. } else {
  3734. max_count = 7, min_count = 4;
  3735. }
  3736. }
  3737. }
  3738. /* ===========================================================================
  3739. * Construct the Huffman tree for the bit lengths and return the index in
  3740. * bl_order of the last bit length code to send.
  3741. */
  3742. local int build_bl_tree(s)
  3743. deflate_state *s;
  3744. {
  3745. int max_blindex; /* index of last bit length code of non zero freq */
  3746. /* Determine the bit length frequencies for literal and distance trees */
  3747. scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
  3748. scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
  3749. /* Build the bit length tree: */
  3750. build_tree(s, (tree_desc *)(&(s->bl_desc)));
  3751. /* opt_len now includes the length of the tree representations, except
  3752. * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
  3753. */
  3754. /* Determine the number of bit length codes to send. The pkzip format
  3755. * requires that at least 4 bit length codes be sent. (appnote.txt says
  3756. * 3 but the actual value used is 4.)
  3757. */
  3758. for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
  3759. if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
  3760. }
  3761. /* Update opt_len to include the bit length tree and counts */
  3762. s->opt_len += 3*((ulg)max_blindex+1) + 5+5+4;
  3763. Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
  3764. s->opt_len, s->static_len));
  3765. return max_blindex;
  3766. }
  3767. /* ===========================================================================
  3768. * Send the header for a block using dynamic Huffman trees: the counts, the
  3769. * lengths of the bit length codes, the literal tree and the distance tree.
  3770. * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
  3771. */
  3772. local void send_all_trees(s, lcodes, dcodes, blcodes)
  3773. deflate_state *s;
  3774. int lcodes, dcodes, blcodes; /* number of codes for each tree */
  3775. {
  3776. int rank; /* index in bl_order */
  3777. Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
  3778. Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
  3779. "too many codes");
  3780. Tracev((stderr, "\nbl counts: "));
  3781. send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
  3782. send_bits(s, dcodes-1, 5);
  3783. send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
  3784. for (rank = 0; rank < blcodes; rank++) {
  3785. Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
  3786. send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
  3787. }
  3788. Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
  3789. send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
  3790. Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
  3791. send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
  3792. Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
  3793. }
  3794. /* ===========================================================================
  3795. * Send a stored block
  3796. */
  3797. void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
  3798. deflate_state *s;
  3799. charf *buf; /* input block */
  3800. ulg stored_len; /* length of input block */
  3801. int last; /* one if this is the last block for a file */
  3802. {
  3803. send_bits(s, (STORED_BLOCK<<1)+last, 3); /* send block type */
  3804. bi_windup(s); /* align on byte boundary */
  3805. put_short(s, (ush)stored_len);
  3806. put_short(s, (ush)~stored_len);
  3807. zmemcpy(s->pending_buf + s->pending, (Bytef *)buf, stored_len);
  3808. s->pending += stored_len;
  3809. #ifdef ZLIB_DEBUG
  3810. s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
  3811. s->compressed_len += (stored_len + 4) << 3;
  3812. s->bits_sent += 2*16;
  3813. s->bits_sent += stored_len<<3;
  3814. #endif
  3815. }
  3816. /* ===========================================================================
  3817. * Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
  3818. */
  3819. void ZLIB_INTERNAL _tr_flush_bits(s)
  3820. deflate_state *s;
  3821. {
  3822. bi_flush(s);
  3823. }
  3824. /* ===========================================================================
  3825. * Send one empty static block to give enough lookahead for inflate.
  3826. * This takes 10 bits, of which 7 may remain in the bit buffer.
  3827. */
  3828. void ZLIB_INTERNAL _tr_align(s)
  3829. deflate_state *s;
  3830. {
  3831. send_bits(s, STATIC_TREES<<1, 3);
  3832. send_code(s, END_BLOCK, static_ltree);
  3833. #ifdef ZLIB_DEBUG
  3834. s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
  3835. #endif
  3836. bi_flush(s);
  3837. }
  3838. /* ===========================================================================
  3839. * Determine the best encoding for the current block: dynamic trees, static
  3840. * trees or store, and write out the encoded block.
  3841. */
  3842. void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
  3843. deflate_state *s;
  3844. charf *buf; /* input block, or NULL if too old */
  3845. ulg stored_len; /* length of input block */
  3846. int last; /* one if this is the last block for a file */
  3847. {
  3848. ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
  3849. int max_blindex = 0; /* index of last bit length code of non zero freq */
  3850. /* Build the Huffman trees unless a stored block is forced */
  3851. if (s->level > 0) {
  3852. /* Check if the file is binary or text */
  3853. if (s->strm->data_type == Z_UNKNOWN)
  3854. s->strm->data_type = detect_data_type(s);
  3855. /* Construct the literal and distance trees */
  3856. build_tree(s, (tree_desc *)(&(s->l_desc)));
  3857. Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
  3858. s->static_len));
  3859. build_tree(s, (tree_desc *)(&(s->d_desc)));
  3860. Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
  3861. s->static_len));
  3862. /* At this point, opt_len and static_len are the total bit lengths of
  3863. * the compressed block data, excluding the tree representations.
  3864. */
  3865. /* Build the bit length tree for the above two trees, and get the index
  3866. * in bl_order of the last bit length code to send.
  3867. */
  3868. max_blindex = build_bl_tree(s);
  3869. /* Determine the best encoding. Compute the block lengths in bytes. */
  3870. opt_lenb = (s->opt_len+3+7)>>3;
  3871. static_lenb = (s->static_len+3+7)>>3;
  3872. Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
  3873. opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
  3874. s->last_lit));
  3875. if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
  3876. } else {
  3877. Assert(buf != (char*)0, "lost buf");
  3878. opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
  3879. }
  3880. #ifdef FORCE_STORED
  3881. if (buf != (char*)0) { /* force stored block */
  3882. #else
  3883. if (stored_len+4 <= opt_lenb && buf != (char*)0) {
  3884. /* 4: two words for the lengths */
  3885. #endif
  3886. /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
  3887. * Otherwise we can't have processed more than WSIZE input bytes since
  3888. * the last block flush, because compression would have been
  3889. * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
  3890. * transform a block into a stored block.
  3891. */
  3892. _tr_stored_block(s, buf, stored_len, last);
  3893. #ifdef FORCE_STATIC
  3894. } else if (static_lenb >= 0) { /* force static trees */
  3895. #else
  3896. } else if (s->strategy == Z_FIXED || static_lenb == opt_lenb) {
  3897. #endif
  3898. send_bits(s, (STATIC_TREES<<1)+last, 3);
  3899. compress_block(s, (const ct_data *)static_ltree,
  3900. (const ct_data *)static_dtree);
  3901. #ifdef ZLIB_DEBUG
  3902. s->compressed_len += 3 + s->static_len;
  3903. #endif
  3904. } else {
  3905. send_bits(s, (DYN_TREES<<1)+last, 3);
  3906. send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
  3907. max_blindex+1);
  3908. compress_block(s, (const ct_data *)s->dyn_ltree,
  3909. (const ct_data *)s->dyn_dtree);
  3910. #ifdef ZLIB_DEBUG
  3911. s->compressed_len += 3 + s->opt_len;
  3912. #endif
  3913. }
  3914. Assert (s->compressed_len == s->bits_sent, "bad compressed size");
  3915. /* The above check is made mod 2^32, for files larger than 512 MB
  3916. * and uLong implemented on 32 bits.
  3917. */
  3918. init_block(s);
  3919. if (last) {
  3920. bi_windup(s);
  3921. #ifdef ZLIB_DEBUG
  3922. s->compressed_len += 7; /* align on byte boundary */
  3923. #endif
  3924. }
  3925. Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
  3926. s->compressed_len-7*last));
  3927. }
  3928. /* ===========================================================================
  3929. * Save the match info and tally the frequency counts. Return true if
  3930. * the current block must be flushed.
  3931. */
  3932. int ZLIB_INTERNAL _tr_tally (s, dist, lc)
  3933. deflate_state *s;
  3934. unsigned dist; /* distance of matched string */
  3935. unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
  3936. {
  3937. s->d_buf[s->last_lit] = (ush)dist;
  3938. s->l_buf[s->last_lit++] = (uch)lc;
  3939. if (dist == 0) {
  3940. /* lc is the unmatched char */
  3941. s->dyn_ltree[lc].Freq++;
  3942. } else {
  3943. s->matches++;
  3944. /* Here, lc is the match length - MIN_MATCH */
  3945. dist--; /* dist = match distance - 1 */
  3946. Assert((ush)dist < (ush)MAX_DIST(s) &&
  3947. (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
  3948. (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
  3949. s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
  3950. s->dyn_dtree[d_code(dist)].Freq++;
  3951. }
  3952. #ifdef TRUNCATE_BLOCK
  3953. /* Try to guess if it is profitable to stop the current block here */
  3954. if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
  3955. /* Compute an upper bound for the compressed length */
  3956. ulg out_length = (ulg)s->last_lit*8L;
  3957. ulg in_length = (ulg)((long)s->strstart - s->block_start);
  3958. int dcode;
  3959. for (dcode = 0; dcode < D_CODES; dcode++) {
  3960. out_length += (ulg)s->dyn_dtree[dcode].Freq *
  3961. (5L+extra_dbits[dcode]);
  3962. }
  3963. out_length >>= 3;
  3964. Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
  3965. s->last_lit, in_length, out_length,
  3966. 100L - out_length*100L/in_length));
  3967. if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
  3968. }
  3969. #endif
  3970. return (s->last_lit == s->lit_bufsize-1);
  3971. /* We avoid equality with lit_bufsize because of wraparound at 64K
  3972. * on 16 bit machines and because stored blocks are restricted to
  3973. * 64K-1 bytes.
  3974. */
  3975. }
  3976. /* ===========================================================================
  3977. * Send the block data compressed using the given Huffman trees
  3978. */
  3979. local void compress_block(s, ltree, dtree)
  3980. deflate_state *s;
  3981. const ct_data *ltree; /* literal tree */
  3982. const ct_data *dtree; /* distance tree */
  3983. {
  3984. unsigned dist; /* distance of matched string */
  3985. int lc; /* match length or unmatched char (if dist == 0) */
  3986. unsigned lx = 0; /* running index in l_buf */
  3987. unsigned code; /* the code to send */
  3988. int extra; /* number of extra bits to send */
  3989. if (s->last_lit != 0) do {
  3990. dist = s->d_buf[lx];
  3991. lc = s->l_buf[lx++];
  3992. if (dist == 0) {
  3993. send_code(s, lc, ltree); /* send a literal byte */
  3994. Tracecv(isgraph(lc), (stderr," '%c' ", lc));
  3995. } else {
  3996. /* Here, lc is the match length - MIN_MATCH */
  3997. code = _length_code[lc];
  3998. send_code(s, code+LITERALS+1, ltree); /* send the length code */
  3999. extra = extra_lbits[code];
  4000. if (extra != 0) {
  4001. lc -= base_length[code];
  4002. send_bits(s, lc, extra); /* send the extra length bits */
  4003. }
  4004. dist--; /* dist is now the match distance - 1 */
  4005. code = d_code(dist);
  4006. Assert (code < D_CODES, "bad d_code");
  4007. send_code(s, code, dtree); /* send the distance code */
  4008. extra = extra_dbits[code];
  4009. if (extra != 0) {
  4010. dist -= (unsigned)base_dist[code];
  4011. send_bits(s, dist, extra); /* send the extra distance bits */
  4012. }
  4013. } /* literal or match pair ? */
  4014. /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
  4015. Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx,
  4016. "pendingBuf overflow");
  4017. } while (lx < s->last_lit);
  4018. send_code(s, END_BLOCK, ltree);
  4019. }
  4020. /* ===========================================================================
  4021. * Check if the data type is TEXT or BINARY, using the following algorithm:
  4022. * - TEXT if the two conditions below are satisfied:
  4023. * a) There are no non-portable control characters belonging to the
  4024. * "black list" (0..6, 14..25, 28..31).
  4025. * b) There is at least one printable character belonging to the
  4026. * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
  4027. * - BINARY otherwise.
  4028. * - The following partially-portable control characters form a
  4029. * "gray list" that is ignored in this detection algorithm:
  4030. * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
  4031. * IN assertion: the fields Freq of dyn_ltree are set.
  4032. */
  4033. local int detect_data_type(s)
  4034. deflate_state *s;
  4035. {
  4036. /* black_mask is the bit mask of black-listed bytes
  4037. * set bits 0..6, 14..25, and 28..31
  4038. * 0xf3ffc07f = binary 11110011111111111100000001111111
  4039. */
  4040. unsigned long black_mask = 0xf3ffc07fUL;
  4041. int n;
  4042. /* Check for non-textual ("black-listed") bytes. */
  4043. for (n = 0; n <= 31; n++, black_mask >>= 1)
  4044. if ((black_mask & 1) && (s->dyn_ltree[n].Freq != 0))
  4045. return Z_BINARY;
  4046. /* Check for textual ("white-listed") bytes. */
  4047. if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
  4048. || s->dyn_ltree[13].Freq != 0)
  4049. return Z_TEXT;
  4050. for (n = 32; n < LITERALS; n++)
  4051. if (s->dyn_ltree[n].Freq != 0)
  4052. return Z_TEXT;
  4053. /* There are no "black-listed" or "white-listed" bytes:
  4054. * this stream either is empty or has tolerated ("gray-listed") bytes only.
  4055. */
  4056. return Z_BINARY;
  4057. }
  4058. /* ===========================================================================
  4059. * Reverse the first len bits of a code, using straightforward code (a faster
  4060. * method would use a table)
  4061. * IN assertion: 1 <= len <= 15
  4062. */
  4063. local unsigned bi_reverse(code, len)
  4064. unsigned code; /* the value to invert */
  4065. int len; /* its bit length */
  4066. {
  4067. register unsigned res = 0;
  4068. do {
  4069. res |= code & 1;
  4070. code >>= 1, res <<= 1;
  4071. } while (--len > 0);
  4072. return res >> 1;
  4073. }
  4074. /* ===========================================================================
  4075. * Flush the bit buffer, keeping at most 7 bits in it.
  4076. */
  4077. local void bi_flush(s)
  4078. deflate_state *s;
  4079. {
  4080. if (s->bi_valid == 16) {
  4081. put_short(s, s->bi_buf);
  4082. s->bi_buf = 0;
  4083. s->bi_valid = 0;
  4084. } else if (s->bi_valid >= 8) {
  4085. put_byte(s, (Byte)s->bi_buf);
  4086. s->bi_buf >>= 8;
  4087. s->bi_valid -= 8;
  4088. }
  4089. }
  4090. /* ===========================================================================
  4091. * Flush the bit buffer and align the output on a byte boundary
  4092. */
  4093. local void bi_windup(s)
  4094. deflate_state *s;
  4095. {
  4096. if (s->bi_valid > 8) {
  4097. put_short(s, s->bi_buf);
  4098. } else if (s->bi_valid > 0) {
  4099. put_byte(s, (Byte)s->bi_buf);
  4100. }
  4101. s->bi_buf = 0;
  4102. s->bi_valid = 0;
  4103. #ifdef ZLIB_DEBUG
  4104. s->bits_sent = (s->bits_sent+7) & ~7;
  4105. #endif
  4106. }
  4107. /* zutil.c -- target dependent utility functions for the compression library
  4108. * Copyright (C) 1995-2017 Jean-loup Gailly
  4109. * For conditions of distribution and use, see copyright notice in zlib.h
  4110. */
  4111. /* @(#) $Id$ */
  4112. z_const char * const z_errmsg[10] = {
  4113. (z_const char *)"need dictionary", /* Z_NEED_DICT 2 */
  4114. (z_const char *)"stream end", /* Z_STREAM_END 1 */
  4115. (z_const char *)"", /* Z_OK 0 */
  4116. (z_const char *)"file error", /* Z_ERRNO (-1) */
  4117. (z_const char *)"stream error", /* Z_STREAM_ERROR (-2) */
  4118. (z_const char *)"data error", /* Z_DATA_ERROR (-3) */
  4119. (z_const char *)"insufficient memory", /* Z_MEM_ERROR (-4) */
  4120. (z_const char *)"buffer error", /* Z_BUF_ERROR (-5) */
  4121. (z_const char *)"incompatible version",/* Z_VERSION_ERROR (-6) */
  4122. (z_const char *)""
  4123. };
  4124. const char * ZEXPORT zlibVersion()
  4125. {
  4126. return ZLIB_VERSION;
  4127. }
  4128. uLong ZEXPORT zlibCompileFlags()
  4129. {
  4130. uLong flags;
  4131. flags = 0;
  4132. switch ((int)(sizeof(uInt))) {
  4133. case 2: break;
  4134. case 4: flags += 1; break;
  4135. case 8: flags += 2; break;
  4136. default: flags += 3;
  4137. }
  4138. switch ((int)(sizeof(uLong))) {
  4139. case 2: break;
  4140. case 4: flags += 1 << 2; break;
  4141. case 8: flags += 2 << 2; break;
  4142. default: flags += 3 << 2;
  4143. }
  4144. switch ((int)(sizeof(voidpf))) {
  4145. case 2: break;
  4146. case 4: flags += 1 << 4; break;
  4147. case 8: flags += 2 << 4; break;
  4148. default: flags += 3 << 4;
  4149. }
  4150. switch ((int)(sizeof(z_off_t))) {
  4151. case 2: break;
  4152. case 4: flags += 1 << 6; break;
  4153. case 8: flags += 2 << 6; break;
  4154. default: flags += 3 << 6;
  4155. }
  4156. #ifdef ZLIB_DEBUG
  4157. flags += 1 << 8;
  4158. #endif
  4159. #if defined(ASMV) || defined(ASMINF)
  4160. flags += 1 << 9;
  4161. #endif
  4162. #ifdef ZLIB_WINAPI
  4163. flags += 1 << 10;
  4164. #endif
  4165. #ifdef BUILDFIXED
  4166. flags += 1 << 12;
  4167. #endif
  4168. #ifdef DYNAMIC_CRC_TABLE
  4169. flags += 1 << 13;
  4170. #endif
  4171. #ifdef NO_GZCOMPRESS
  4172. flags += 1L << 16;
  4173. #endif
  4174. #ifdef NO_GZIP
  4175. flags += 1L << 17;
  4176. #endif
  4177. #ifdef PKZIP_BUG_WORKAROUND
  4178. flags += 1L << 20;
  4179. #endif
  4180. #ifdef FASTEST
  4181. flags += 1L << 21;
  4182. #endif
  4183. #if defined(STDC) || defined(Z_HAVE_STDARG_H)
  4184. # ifdef NO_vsnprintf
  4185. flags += 1L << 25;
  4186. # ifdef HAS_vsprintf_void
  4187. flags += 1L << 26;
  4188. # endif
  4189. # else
  4190. # ifdef HAS_vsnprintf_void
  4191. flags += 1L << 26;
  4192. # endif
  4193. # endif
  4194. #else
  4195. flags += 1L << 24;
  4196. # ifdef NO_snprintf
  4197. flags += 1L << 25;
  4198. # ifdef HAS_sprintf_void
  4199. flags += 1L << 26;
  4200. # endif
  4201. # else
  4202. # ifdef HAS_snprintf_void
  4203. flags += 1L << 26;
  4204. # endif
  4205. # endif
  4206. #endif
  4207. return flags;
  4208. }
  4209. #ifdef ZLIB_DEBUG
  4210. #include <stdlib.h>
  4211. # ifndef verbose
  4212. # define verbose 0
  4213. # endif
  4214. int ZLIB_INTERNAL z_verbose = verbose;
  4215. void ZLIB_INTERNAL z_error (m)
  4216. char *m;
  4217. {
  4218. fprintf(stderr, "%s\n", m);
  4219. exit(1);
  4220. }
  4221. #endif
  4222. /* exported to allow conversion of error code to string for compress() and
  4223. * uncompress()
  4224. */
  4225. const char * ZEXPORT zError(err)
  4226. int err;
  4227. {
  4228. return ERR_MSG(err);
  4229. }
  4230. #if defined(_WIN32_WCE)
  4231. /* The Microsoft C Run-Time Library for Windows CE doesn't have
  4232. * errno. We define it as a global variable to simplify porting.
  4233. * Its value is always 0 and should not be used.
  4234. */
  4235. int errno = 0;
  4236. #endif
  4237. #ifndef HAVE_MEMCPY
  4238. void ZLIB_INTERNAL zmemcpy(dest, source, len)
  4239. Bytef* dest;
  4240. const Bytef* source;
  4241. uInt len;
  4242. {
  4243. if (len == 0) return;
  4244. do {
  4245. *dest++ = *source++; /* ??? to be unrolled */
  4246. } while (--len != 0);
  4247. }
  4248. int ZLIB_INTERNAL zmemcmp(s1, s2, len)
  4249. const Bytef* s1;
  4250. const Bytef* s2;
  4251. uInt len;
  4252. {
  4253. uInt j;
  4254. for (j = 0; j < len; j++) {
  4255. if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
  4256. }
  4257. return 0;
  4258. }
  4259. void ZLIB_INTERNAL zmemzero(dest, len)
  4260. Bytef* dest;
  4261. uInt len;
  4262. {
  4263. if (len == 0) return;
  4264. do {
  4265. *dest++ = 0; /* ??? to be unrolled */
  4266. } while (--len != 0);
  4267. }
  4268. #endif
  4269. #ifndef Z_SOLO
  4270. #ifdef SYS16BIT
  4271. #ifdef __TURBOC__
  4272. /* Turbo C in 16-bit mode */
  4273. # define MY_ZCALLOC
  4274. /* Turbo C malloc() does not allow dynamic allocation of 64K bytes
  4275. * and farmalloc(64K) returns a pointer with an offset of 8, so we
  4276. * must fix the pointer. Warning: the pointer must be put back to its
  4277. * original form in order to free it, use zcfree().
  4278. */
  4279. #define MAX_PTR 10
  4280. /* 10*64K = 640K */
  4281. local int next_ptr = 0;
  4282. typedef struct ptr_table_s {
  4283. voidpf org_ptr;
  4284. voidpf new_ptr;
  4285. } ptr_table;
  4286. local ptr_table table[MAX_PTR];
  4287. /* This table is used to remember the original form of pointers
  4288. * to large buffers (64K). Such pointers are normalized with a zero offset.
  4289. * Since MSDOS is not a preemptive multitasking OS, this table is not
  4290. * protected from concurrent access. This hack doesn't work anyway on
  4291. * a protected system like OS/2. Use Microsoft C instead.
  4292. */
  4293. voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, unsigned items, unsigned size)
  4294. {
  4295. voidpf buf;
  4296. ulg bsize = (ulg)items*size;
  4297. (void)opaque;
  4298. /* If we allocate less than 65520 bytes, we assume that farmalloc
  4299. * will return a usable pointer which doesn't have to be normalized.
  4300. */
  4301. if (bsize < 65520L) {
  4302. buf = farmalloc(bsize);
  4303. if (*(ush*)&buf != 0) return buf;
  4304. } else {
  4305. buf = farmalloc(bsize + 16L);
  4306. }
  4307. if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
  4308. table[next_ptr].org_ptr = buf;
  4309. /* Normalize the pointer to seg:0 */
  4310. *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
  4311. *(ush*)&buf = 0;
  4312. table[next_ptr++].new_ptr = buf;
  4313. return buf;
  4314. }
  4315. void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
  4316. {
  4317. int n;
  4318. (void)opaque;
  4319. if (*(ush*)&ptr != 0) { /* object < 64K */
  4320. farfree(ptr);
  4321. return;
  4322. }
  4323. /* Find the original pointer */
  4324. for (n = 0; n < next_ptr; n++) {
  4325. if (ptr != table[n].new_ptr) continue;
  4326. farfree(table[n].org_ptr);
  4327. while (++n < next_ptr) {
  4328. table[n-1] = table[n];
  4329. }
  4330. next_ptr--;
  4331. return;
  4332. }
  4333. Assert(0, "zcfree: ptr not found");
  4334. }
  4335. #endif /* __TURBOC__ */
  4336. #ifdef M_I86
  4337. /* Microsoft C in 16-bit mode */
  4338. # define MY_ZCALLOC
  4339. #if (!defined(_MSC_VER) || (_MSC_VER <= 600))
  4340. # define _halloc halloc
  4341. # define _hfree hfree
  4342. #endif
  4343. voidpf ZLIB_INTERNAL zcalloc (voidpf opaque, uInt items, uInt size)
  4344. {
  4345. (void)opaque;
  4346. return _halloc((long)items, size);
  4347. }
  4348. void ZLIB_INTERNAL zcfree (voidpf opaque, voidpf ptr)
  4349. {
  4350. (void)opaque;
  4351. _hfree(ptr);
  4352. }
  4353. #endif /* M_I86 */
  4354. #endif /* SYS16BIT */
  4355. #ifndef MY_ZCALLOC /* Any system without a special alloc function */
  4356. #ifndef STDC
  4357. extern voidp malloc OF((uInt size));
  4358. extern voidp calloc OF((uInt items, uInt size));
  4359. extern void free OF((voidpf ptr));
  4360. #endif
  4361. voidpf ZLIB_INTERNAL zcalloc (opaque, items, size)
  4362. voidpf opaque;
  4363. unsigned items;
  4364. unsigned size;
  4365. {
  4366. (void)opaque;
  4367. return sizeof(uInt) > 2 ? (voidpf)malloc(items * size) :
  4368. (voidpf)calloc(items, size);
  4369. }
  4370. void ZLIB_INTERNAL zcfree (opaque, ptr)
  4371. voidpf opaque;
  4372. voidpf ptr;
  4373. {
  4374. (void)opaque;
  4375. free(ptr);
  4376. }
  4377. #endif /* MY_ZCALLOC */
  4378. #endif /* !Z_SOLO */
  4379. /* compress.c -- compress a memory buffer
  4380. * Copyright (C) 1995-2005, 2014, 2016 Jean-loup Gailly, Mark Adler
  4381. * For conditions of distribution and use, see copyright notice in zlib.h
  4382. */
  4383. /* @(#) $Id$ */
  4384. #define ZLIB_INTERNAL
  4385. #include "zlib.h"
  4386. /* ===========================================================================
  4387. Compresses the source buffer into the destination buffer. The level
  4388. parameter has the same meaning as in deflateInit. sourceLen is the byte
  4389. length of the source buffer. Upon entry, destLen is the total size of the
  4390. destination buffer, which must be at least 0.1% larger than sourceLen plus
  4391. 12 bytes. Upon exit, destLen is the actual size of the compressed buffer.
  4392. compress2 returns Z_OK if success, Z_MEM_ERROR if there was not enough
  4393. memory, Z_BUF_ERROR if there was not enough room in the output buffer,
  4394. Z_STREAM_ERROR if the level parameter is invalid.
  4395. */
  4396. int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
  4397. Bytef *dest;
  4398. uLongf *destLen;
  4399. const Bytef *source;
  4400. uLong sourceLen;
  4401. int level;
  4402. {
  4403. z_stream stream;
  4404. int err;
  4405. const uInt max = (uInt)-1;
  4406. uLong left;
  4407. left = *destLen;
  4408. *destLen = 0;
  4409. stream.zalloc = (alloc_func)0;
  4410. stream.zfree = (free_func)0;
  4411. stream.opaque = (voidpf)0;
  4412. err = deflateInit(&stream, level);
  4413. if (err != Z_OK) return err;
  4414. stream.next_out = dest;
  4415. stream.avail_out = 0;
  4416. stream.next_in = (z_const Bytef *)source;
  4417. stream.avail_in = 0;
  4418. do {
  4419. if (stream.avail_out == 0) {
  4420. stream.avail_out = left > (uLong)max ? max : (uInt)left;
  4421. left -= stream.avail_out;
  4422. }
  4423. if (stream.avail_in == 0) {
  4424. stream.avail_in = sourceLen > (uLong)max ? max : (uInt)sourceLen;
  4425. sourceLen -= stream.avail_in;
  4426. }
  4427. err = deflate(&stream, sourceLen ? Z_NO_FLUSH : Z_FINISH);
  4428. } while (err == Z_OK);
  4429. *destLen = stream.total_out;
  4430. deflateEnd(&stream);
  4431. return err == Z_STREAM_END ? Z_OK : err;
  4432. }
  4433. /* ===========================================================================
  4434. */
  4435. int ZEXPORT compress (dest, destLen, source, sourceLen)
  4436. Bytef *dest;
  4437. uLongf *destLen;
  4438. const Bytef *source;
  4439. uLong sourceLen;
  4440. {
  4441. return compress2(dest, destLen, source, sourceLen, Z_DEFAULT_COMPRESSION);
  4442. }
  4443. /* ===========================================================================
  4444. If the default memLevel or windowBits for deflateInit() is changed, then
  4445. this function needs to be updated.
  4446. */
  4447. uLong ZEXPORT compressBound (sourceLen)
  4448. uLong sourceLen;
  4449. {
  4450. return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
  4451. (sourceLen >> 25) + 13;
  4452. }