onyx_if.c 198 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "vpx_config.h"
  11. #include "./vpx_scale_rtcd.h"
  12. #include "./vpx_dsp_rtcd.h"
  13. #include "./vp8_rtcd.h"
  14. #include "vp8/common/onyxc_int.h"
  15. #include "vp8/common/blockd.h"
  16. #include "onyx_int.h"
  17. #include "vp8/common/systemdependent.h"
  18. #include "quantize.h"
  19. #include "vp8/common/alloccommon.h"
  20. #include "mcomp.h"
  21. #include "firstpass.h"
  22. #include "vpx/internal/vpx_psnr.h"
  23. #include "vpx_scale/vpx_scale.h"
  24. #include "vp8/common/extend.h"
  25. #include "ratectrl.h"
  26. #include "vp8/common/quant_common.h"
  27. #include "segmentation.h"
  28. #if CONFIG_POSTPROC
  29. #include "vp8/common/postproc.h"
  30. #endif
  31. #include "vpx_mem/vpx_mem.h"
  32. #include "vp8/common/swapyv12buffer.h"
  33. #include "vp8/common/threading.h"
  34. #include "vpx_ports/vpx_timer.h"
  35. #if ARCH_ARM
  36. #include "vpx_ports/arm.h"
  37. #endif
  38. #if CONFIG_MULTI_RES_ENCODING
  39. #include "mr_dissim.h"
  40. #endif
  41. #include "encodeframe.h"
  42. #include <math.h>
  43. #include <stdio.h>
  44. #include <limits.h>
  45. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  46. extern int vp8_update_coef_context(VP8_COMP *cpi);
  47. extern void vp8_update_coef_probs(VP8_COMP *cpi);
  48. #endif
  49. extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
  50. extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val);
  51. extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi);
  52. extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag);
  53. extern void print_parms(VP8_CONFIG *ocf, char *filenam);
  54. extern unsigned int vp8_get_processor_freq();
  55. extern void print_tree_update_probs();
  56. extern int vp8cx_create_encoder_threads(VP8_COMP *cpi);
  57. extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi);
  58. int vp8_estimate_entropy_savings(VP8_COMP *cpi);
  59. int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
  60. extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance);
  61. static void set_default_lf_deltas(VP8_COMP *cpi);
  62. extern const int vp8_gf_interval_table[101];
  63. #if CONFIG_INTERNAL_STATS
  64. #include "math.h"
  65. extern double vp8_calc_ssim
  66. (
  67. YV12_BUFFER_CONFIG *source,
  68. YV12_BUFFER_CONFIG *dest,
  69. int lumamask,
  70. double *weight
  71. );
  72. extern double vp8_calc_ssimg
  73. (
  74. YV12_BUFFER_CONFIG *source,
  75. YV12_BUFFER_CONFIG *dest,
  76. double *ssim_y,
  77. double *ssim_u,
  78. double *ssim_v
  79. );
  80. #endif
  81. #ifdef OUTPUT_YUV_SRC
  82. FILE *yuv_file;
  83. #endif
  84. #ifdef OUTPUT_YUV_DENOISED
  85. FILE *yuv_denoised_file;
  86. #endif
  87. #if 0
  88. FILE *framepsnr;
  89. FILE *kf_list;
  90. FILE *keyfile;
  91. #endif
  92. #if 0
  93. extern int skip_true_count;
  94. extern int skip_false_count;
  95. #endif
  96. #ifdef VP8_ENTROPY_STATS
  97. extern int intra_mode_stats[10][10][10];
  98. #endif
  99. #ifdef SPEEDSTATS
  100. unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
  101. unsigned int tot_pm = 0;
  102. unsigned int cnt_pm = 0;
  103. unsigned int tot_ef = 0;
  104. unsigned int cnt_ef = 0;
  105. #endif
  106. #ifdef MODE_STATS
  107. extern unsigned __int64 Sectionbits[50];
  108. extern int y_modes[5] ;
  109. extern int uv_modes[4] ;
  110. extern int b_modes[10] ;
  111. extern int inter_y_modes[10] ;
  112. extern int inter_uv_modes[4] ;
  113. extern unsigned int inter_b_modes[15];
  114. #endif
  115. extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
  116. extern const int qrounding_factors[129];
  117. extern const int qzbin_factors[129];
  118. extern void vp8cx_init_quantizer(VP8_COMP *cpi);
  119. extern const int vp8cx_base_skip_false_prob[128];
  120. /* Tables relating active max Q to active min Q */
  121. static const unsigned char kf_low_motion_minq[QINDEX_RANGE] =
  122. {
  123. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  124. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  125. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  126. 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
  127. 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6,
  128. 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
  129. 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
  130. 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23
  131. };
  132. static const unsigned char kf_high_motion_minq[QINDEX_RANGE] =
  133. {
  134. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  135. 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
  136. 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3,
  137. 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6,
  138. 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11,
  139. 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16,
  140. 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21,
  141. 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30
  142. };
  143. static const unsigned char gf_low_motion_minq[QINDEX_RANGE] =
  144. {
  145. 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,
  146. 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,
  147. 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,
  148. 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,
  149. 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,
  150. 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,
  151. 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,
  152. 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58
  153. };
  154. static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] =
  155. {
  156. 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4,
  157. 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
  158. 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14,
  159. 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,
  160. 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,
  161. 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,
  162. 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48,
  163. 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64
  164. };
  165. static const unsigned char gf_high_motion_minq[QINDEX_RANGE] =
  166. {
  167. 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4,
  168. 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9,
  169. 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16,
  170. 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,
  171. 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,
  172. 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,
  173. 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54,
  174. 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80
  175. };
  176. static const unsigned char inter_minq[QINDEX_RANGE] =
  177. {
  178. 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9,
  179. 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20,
  180. 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31,
  181. 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43,
  182. 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56,
  183. 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70,
  184. 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85,
  185. 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100
  186. };
  187. #ifdef PACKET_TESTING
  188. extern FILE *vpxlogc;
  189. #endif
  190. static void save_layer_context(VP8_COMP *cpi)
  191. {
  192. LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
  193. /* Save layer dependent coding state */
  194. lc->target_bandwidth = cpi->target_bandwidth;
  195. lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
  196. lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
  197. lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
  198. lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
  199. lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
  200. lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
  201. lc->buffer_level = cpi->buffer_level;
  202. lc->bits_off_target = cpi->bits_off_target;
  203. lc->total_actual_bits = cpi->total_actual_bits;
  204. lc->worst_quality = cpi->worst_quality;
  205. lc->active_worst_quality = cpi->active_worst_quality;
  206. lc->best_quality = cpi->best_quality;
  207. lc->active_best_quality = cpi->active_best_quality;
  208. lc->ni_av_qi = cpi->ni_av_qi;
  209. lc->ni_tot_qi = cpi->ni_tot_qi;
  210. lc->ni_frames = cpi->ni_frames;
  211. lc->avg_frame_qindex = cpi->avg_frame_qindex;
  212. lc->rate_correction_factor = cpi->rate_correction_factor;
  213. lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
  214. lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
  215. lc->zbin_over_quant = cpi->mb.zbin_over_quant;
  216. lc->inter_frame_target = cpi->inter_frame_target;
  217. lc->total_byte_count = cpi->total_byte_count;
  218. lc->filter_level = cpi->common.filter_level;
  219. lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
  220. memcpy (lc->count_mb_ref_frame_usage,
  221. cpi->mb.count_mb_ref_frame_usage,
  222. sizeof(cpi->mb.count_mb_ref_frame_usage));
  223. }
  224. static void restore_layer_context(VP8_COMP *cpi, const int layer)
  225. {
  226. LAYER_CONTEXT *lc = &cpi->layer_context[layer];
  227. /* Restore layer dependent coding state */
  228. cpi->current_layer = layer;
  229. cpi->target_bandwidth = lc->target_bandwidth;
  230. cpi->oxcf.target_bandwidth = lc->target_bandwidth;
  231. cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
  232. cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
  233. cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
  234. cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
  235. cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
  236. cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
  237. cpi->buffer_level = lc->buffer_level;
  238. cpi->bits_off_target = lc->bits_off_target;
  239. cpi->total_actual_bits = lc->total_actual_bits;
  240. cpi->active_worst_quality = lc->active_worst_quality;
  241. cpi->active_best_quality = lc->active_best_quality;
  242. cpi->ni_av_qi = lc->ni_av_qi;
  243. cpi->ni_tot_qi = lc->ni_tot_qi;
  244. cpi->ni_frames = lc->ni_frames;
  245. cpi->avg_frame_qindex = lc->avg_frame_qindex;
  246. cpi->rate_correction_factor = lc->rate_correction_factor;
  247. cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
  248. cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
  249. cpi->mb.zbin_over_quant = lc->zbin_over_quant;
  250. cpi->inter_frame_target = lc->inter_frame_target;
  251. cpi->total_byte_count = lc->total_byte_count;
  252. cpi->common.filter_level = lc->filter_level;
  253. cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
  254. memcpy (cpi->mb.count_mb_ref_frame_usage,
  255. lc->count_mb_ref_frame_usage,
  256. sizeof(cpi->mb.count_mb_ref_frame_usage));
  257. }
  258. static int rescale(int val, int num, int denom)
  259. {
  260. int64_t llnum = num;
  261. int64_t llden = denom;
  262. int64_t llval = val;
  263. return (int)(llval * llnum / llden);
  264. }
  265. static void init_temporal_layer_context(VP8_COMP *cpi,
  266. VP8_CONFIG *oxcf,
  267. const int layer,
  268. double prev_layer_framerate)
  269. {
  270. LAYER_CONTEXT *lc = &cpi->layer_context[layer];
  271. lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
  272. lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
  273. lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
  274. lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
  275. lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
  276. lc->starting_buffer_level =
  277. rescale((int)(oxcf->starting_buffer_level),
  278. lc->target_bandwidth, 1000);
  279. if (oxcf->optimal_buffer_level == 0)
  280. lc->optimal_buffer_level = lc->target_bandwidth / 8;
  281. else
  282. lc->optimal_buffer_level =
  283. rescale((int)(oxcf->optimal_buffer_level),
  284. lc->target_bandwidth, 1000);
  285. if (oxcf->maximum_buffer_size == 0)
  286. lc->maximum_buffer_size = lc->target_bandwidth / 8;
  287. else
  288. lc->maximum_buffer_size =
  289. rescale((int)(oxcf->maximum_buffer_size),
  290. lc->target_bandwidth, 1000);
  291. /* Work out the average size of a frame within this layer */
  292. if (layer > 0)
  293. lc->avg_frame_size_for_layer =
  294. (int)((cpi->oxcf.target_bitrate[layer] -
  295. cpi->oxcf.target_bitrate[layer-1]) * 1000 /
  296. (lc->framerate - prev_layer_framerate));
  297. lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
  298. lc->active_best_quality = cpi->oxcf.best_allowed_q;
  299. lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
  300. lc->buffer_level = lc->starting_buffer_level;
  301. lc->bits_off_target = lc->starting_buffer_level;
  302. lc->total_actual_bits = 0;
  303. lc->ni_av_qi = 0;
  304. lc->ni_tot_qi = 0;
  305. lc->ni_frames = 0;
  306. lc->rate_correction_factor = 1.0;
  307. lc->key_frame_rate_correction_factor = 1.0;
  308. lc->gf_rate_correction_factor = 1.0;
  309. lc->inter_frame_target = 0;
  310. }
  311. // Upon a run-time change in temporal layers, reset the layer context parameters
  312. // for any "new" layers. For "existing" layers, let them inherit the parameters
  313. // from the previous layer state (at the same layer #). In future we may want
  314. // to better map the previous layer state(s) to the "new" ones.
  315. static void reset_temporal_layer_change(VP8_COMP *cpi,
  316. VP8_CONFIG *oxcf,
  317. const int prev_num_layers)
  318. {
  319. int i;
  320. double prev_layer_framerate = 0;
  321. const int curr_num_layers = cpi->oxcf.number_of_layers;
  322. // If the previous state was 1 layer, get current layer context from cpi.
  323. // We need this to set the layer context for the new layers below.
  324. if (prev_num_layers == 1)
  325. {
  326. cpi->current_layer = 0;
  327. save_layer_context(cpi);
  328. }
  329. for (i = 0; i < curr_num_layers; i++)
  330. {
  331. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  332. if (i >= prev_num_layers)
  333. {
  334. init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
  335. }
  336. // The initial buffer levels are set based on their starting levels.
  337. // We could set the buffer levels based on the previous state (normalized
  338. // properly by the layer bandwidths) but we would need to keep track of
  339. // the previous set of layer bandwidths (i.e., target_bitrate[i])
  340. // before the layer change. For now, reset to the starting levels.
  341. lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
  342. cpi->oxcf.target_bitrate[i];
  343. lc->bits_off_target = lc->buffer_level;
  344. // TDOD(marpan): Should we set the rate_correction_factor and
  345. // active_worst/best_quality to values derived from the previous layer
  346. // state (to smooth-out quality dips/rate fluctuation at transition)?
  347. // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
  348. // is not set for 1 layer, and the restore_layer_context/save_context()
  349. // are not called in the encoding loop, so we need to call it here to
  350. // pass the layer context state to |cpi|.
  351. if (curr_num_layers == 1)
  352. {
  353. lc->target_bandwidth = cpi->oxcf.target_bandwidth;
  354. lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms *
  355. lc->target_bandwidth / 1000;
  356. lc->bits_off_target = lc->buffer_level;
  357. restore_layer_context(cpi, 0);
  358. }
  359. prev_layer_framerate = cpi->output_framerate /
  360. cpi->oxcf.rate_decimator[i];
  361. }
  362. }
  363. static void setup_features(VP8_COMP *cpi)
  364. {
  365. // If segmentation enabled set the update flags
  366. if ( cpi->mb.e_mbd.segmentation_enabled )
  367. {
  368. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  369. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  370. }
  371. else
  372. {
  373. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  374. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  375. }
  376. cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
  377. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  378. memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  379. memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  380. memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  381. memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  382. set_default_lf_deltas(cpi);
  383. }
  384. static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
  385. static void dealloc_compressor_data(VP8_COMP *cpi)
  386. {
  387. vpx_free(cpi->tplist);
  388. cpi->tplist = NULL;
  389. /* Delete last frame MV storage buffers */
  390. vpx_free(cpi->lfmv);
  391. cpi->lfmv = 0;
  392. vpx_free(cpi->lf_ref_frame_sign_bias);
  393. cpi->lf_ref_frame_sign_bias = 0;
  394. vpx_free(cpi->lf_ref_frame);
  395. cpi->lf_ref_frame = 0;
  396. /* Delete sementation map */
  397. vpx_free(cpi->segmentation_map);
  398. cpi->segmentation_map = 0;
  399. vpx_free(cpi->active_map);
  400. cpi->active_map = 0;
  401. vp8_de_alloc_frame_buffers(&cpi->common);
  402. vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
  403. vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
  404. dealloc_raw_frame_buffers(cpi);
  405. vpx_free(cpi->tok);
  406. cpi->tok = 0;
  407. /* Structure used to monitor GF usage */
  408. vpx_free(cpi->gf_active_flags);
  409. cpi->gf_active_flags = 0;
  410. /* Activity mask based per mb zbin adjustments */
  411. vpx_free(cpi->mb_activity_map);
  412. cpi->mb_activity_map = 0;
  413. vpx_free(cpi->mb.pip);
  414. cpi->mb.pip = 0;
  415. #if CONFIG_MULTITHREAD
  416. vpx_free(cpi->mt_current_mb_col);
  417. cpi->mt_current_mb_col = NULL;
  418. #endif
  419. }
  420. static void enable_segmentation(VP8_COMP *cpi)
  421. {
  422. /* Set the appropriate feature bit */
  423. cpi->mb.e_mbd.segmentation_enabled = 1;
  424. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  425. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  426. }
  427. static void disable_segmentation(VP8_COMP *cpi)
  428. {
  429. /* Clear the appropriate feature bit */
  430. cpi->mb.e_mbd.segmentation_enabled = 0;
  431. }
  432. /* Valid values for a segment are 0 to 3
  433. * Segmentation map is arrange as [Rows][Columns]
  434. */
  435. static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map)
  436. {
  437. /* Copy in the new segmentation map */
  438. memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols));
  439. /* Signal that the map should be updated. */
  440. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  441. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  442. }
  443. /* The values given for each segment can be either deltas (from the default
  444. * value chosen for the frame) or absolute values.
  445. *
  446. * Valid range for abs values is:
  447. * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
  448. * Valid range for delta values are:
  449. * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
  450. *
  451. * abs_delta = SEGMENT_DELTADATA (deltas)
  452. * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
  453. *
  454. */
  455. static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta)
  456. {
  457. cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
  458. memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data));
  459. }
  460. static void segmentation_test_function(VP8_COMP *cpi)
  461. {
  462. unsigned char *seg_map;
  463. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  464. // Create a temporary map for segmentation data.
  465. CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1));
  466. // Set the segmentation Map
  467. set_segmentation_map(cpi, seg_map);
  468. // Activate segmentation.
  469. enable_segmentation(cpi);
  470. // Set up the quant segment data
  471. feature_data[MB_LVL_ALT_Q][0] = 0;
  472. feature_data[MB_LVL_ALT_Q][1] = 4;
  473. feature_data[MB_LVL_ALT_Q][2] = 0;
  474. feature_data[MB_LVL_ALT_Q][3] = 0;
  475. // Set up the loop segment data
  476. feature_data[MB_LVL_ALT_LF][0] = 0;
  477. feature_data[MB_LVL_ALT_LF][1] = 0;
  478. feature_data[MB_LVL_ALT_LF][2] = 0;
  479. feature_data[MB_LVL_ALT_LF][3] = 0;
  480. // Initialise the feature data structure
  481. // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1
  482. set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  483. // Delete sementation map
  484. vpx_free(seg_map);
  485. seg_map = 0;
  486. }
  487. /* A simple function to cyclically refresh the background at a lower Q */
  488. static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment)
  489. {
  490. unsigned char *seg_map = cpi->segmentation_map;
  491. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  492. int i;
  493. int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
  494. int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
  495. cpi->cyclic_refresh_q = Q / 2;
  496. if (cpi->oxcf.screen_content_mode) {
  497. // Modify quality ramp-up based on Q. Above some Q level, increase the
  498. // number of blocks to be refreshed, and reduce it below the thredhold.
  499. // Turn-off under certain conditions (i.e., away from key frame, and if
  500. // we are at good quality (low Q) and most of the blocks were skipped-encoded
  501. // in previous frame.
  502. int qp_thresh = (cpi->oxcf.screen_content_mode == 2) ? 80 : 100;
  503. if (Q >= qp_thresh) {
  504. cpi->cyclic_refresh_mode_max_mbs_perframe =
  505. (cpi->common.mb_rows * cpi->common.mb_cols) / 10;
  506. } else if (cpi->frames_since_key > 250 &&
  507. Q < 20 &&
  508. cpi->mb.skip_true_count > (int)(0.95 * mbs_in_frame)) {
  509. cpi->cyclic_refresh_mode_max_mbs_perframe = 0;
  510. } else {
  511. cpi->cyclic_refresh_mode_max_mbs_perframe =
  512. (cpi->common.mb_rows * cpi->common.mb_cols) / 20;
  513. }
  514. block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
  515. }
  516. // Set every macroblock to be eligible for update.
  517. // For key frame this will reset seg map to 0.
  518. memset(cpi->segmentation_map, 0, mbs_in_frame);
  519. if (cpi->common.frame_type != KEY_FRAME && block_count > 0)
  520. {
  521. /* Cycle through the macro_block rows */
  522. /* MB loop to set local segmentation map */
  523. i = cpi->cyclic_refresh_mode_index;
  524. assert(i < mbs_in_frame);
  525. do
  526. {
  527. /* If the MB is as a candidate for clean up then mark it for
  528. * possible boost/refresh (segment 1) The segment id may get
  529. * reset to 0 later if the MB gets coded anything other than
  530. * last frame 0,0 as only (last frame 0,0) MBs are eligable for
  531. * refresh : that is to say Mbs likely to be background blocks.
  532. */
  533. if (cpi->cyclic_refresh_map[i] == 0)
  534. {
  535. seg_map[i] = 1;
  536. block_count --;
  537. }
  538. else if (cpi->cyclic_refresh_map[i] < 0)
  539. cpi->cyclic_refresh_map[i]++;
  540. i++;
  541. if (i == mbs_in_frame)
  542. i = 0;
  543. }
  544. while(block_count && i != cpi->cyclic_refresh_mode_index);
  545. cpi->cyclic_refresh_mode_index = i;
  546. #if CONFIG_TEMPORAL_DENOISING
  547. if (cpi->oxcf.noise_sensitivity > 0) {
  548. if (cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive &&
  549. Q < (int)cpi->denoiser.denoise_pars.qp_thresh &&
  550. (cpi->frames_since_key >
  551. 2 * cpi->denoiser.denoise_pars.consec_zerolast)) {
  552. // Under aggressive denoising, use segmentation to turn off loop
  553. // filter below some qp thresh. The filter is reduced for all
  554. // blocks that have been encoded as ZEROMV LAST x frames in a row,
  555. // where x is set by cpi->denoiser.denoise_pars.consec_zerolast.
  556. // This is to avoid "dot" artifacts that can occur from repeated
  557. // loop filtering on noisy input source.
  558. cpi->cyclic_refresh_q = Q;
  559. // lf_adjustment = -MAX_LOOP_FILTER;
  560. lf_adjustment = -40;
  561. for (i = 0; i < mbs_in_frame; ++i) {
  562. seg_map[i] = (cpi->consec_zero_last[i] >
  563. cpi->denoiser.denoise_pars.consec_zerolast) ? 1 : 0;
  564. }
  565. }
  566. }
  567. #endif
  568. }
  569. /* Activate segmentation. */
  570. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  571. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  572. enable_segmentation(cpi);
  573. /* Set up the quant segment data */
  574. feature_data[MB_LVL_ALT_Q][0] = 0;
  575. feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
  576. feature_data[MB_LVL_ALT_Q][2] = 0;
  577. feature_data[MB_LVL_ALT_Q][3] = 0;
  578. /* Set up the loop segment data */
  579. feature_data[MB_LVL_ALT_LF][0] = 0;
  580. feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
  581. feature_data[MB_LVL_ALT_LF][2] = 0;
  582. feature_data[MB_LVL_ALT_LF][3] = 0;
  583. /* Initialise the feature data structure */
  584. set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  585. }
  586. static void set_default_lf_deltas(VP8_COMP *cpi)
  587. {
  588. cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
  589. cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
  590. memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  591. memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  592. /* Test of ref frame deltas */
  593. cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
  594. cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
  595. cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
  596. cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
  597. cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
  598. if(cpi->oxcf.Mode == MODE_REALTIME)
  599. cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
  600. else
  601. cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
  602. cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
  603. cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
  604. }
  605. /* Convenience macros for mapping speed and mode into a continuous
  606. * range
  607. */
  608. #define GOOD(x) (x+1)
  609. #define RT(x) (x+7)
  610. static int speed_map(int speed, const int *map)
  611. {
  612. int res;
  613. do
  614. {
  615. res = *map++;
  616. } while(speed >= *map++);
  617. return res;
  618. }
  619. static const int thresh_mult_map_znn[] = {
  620. /* map common to zero, nearest, and near */
  621. 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
  622. };
  623. static const int thresh_mult_map_vhpred[] = {
  624. 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000,
  625. RT(7), INT_MAX, INT_MAX
  626. };
  627. static const int thresh_mult_map_bpred[] = {
  628. 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000,
  629. RT(6), INT_MAX, INT_MAX
  630. };
  631. static const int thresh_mult_map_tm[] = {
  632. 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000,
  633. RT(7), INT_MAX, INT_MAX
  634. };
  635. static const int thresh_mult_map_new1[] = {
  636. 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX
  637. };
  638. static const int thresh_mult_map_new2[] = {
  639. 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500,
  640. RT(5), 4000, INT_MAX
  641. };
  642. static const int thresh_mult_map_split1[] = {
  643. 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
  644. RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
  645. };
  646. static const int thresh_mult_map_split2[] = {
  647. 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
  648. RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
  649. };
  650. static const int mode_check_freq_map_zn2[] = {
  651. /* {zero,nearest}{2,3} */
  652. 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
  653. };
  654. static const int mode_check_freq_map_vhbpred[] = {
  655. 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX
  656. };
  657. static const int mode_check_freq_map_near2[] = {
  658. 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4,
  659. INT_MAX
  660. };
  661. static const int mode_check_freq_map_new1[] = {
  662. 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX
  663. };
  664. static const int mode_check_freq_map_new2[] = {
  665. 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5,
  666. INT_MAX
  667. };
  668. static const int mode_check_freq_map_split1[] = {
  669. 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX
  670. };
  671. static const int mode_check_freq_map_split2[] = {
  672. 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX
  673. };
  674. void vp8_set_speed_features(VP8_COMP *cpi)
  675. {
  676. SPEED_FEATURES *sf = &cpi->sf;
  677. int Mode = cpi->compressor_speed;
  678. int Speed = cpi->Speed;
  679. int i;
  680. VP8_COMMON *cm = &cpi->common;
  681. int last_improved_quant = sf->improved_quant;
  682. int ref_frames;
  683. /* Initialise default mode frequency sampling variables */
  684. for (i = 0; i < MAX_MODES; i ++)
  685. {
  686. cpi->mode_check_freq[i] = 0;
  687. }
  688. cpi->mb.mbs_tested_so_far = 0;
  689. cpi->mb.mbs_zero_last_dot_suppress = 0;
  690. /* best quality defaults */
  691. sf->RD = 1;
  692. sf->search_method = NSTEP;
  693. sf->improved_quant = 1;
  694. sf->improved_dct = 1;
  695. sf->auto_filter = 1;
  696. sf->recode_loop = 1;
  697. sf->quarter_pixel_search = 1;
  698. sf->half_pixel_search = 1;
  699. sf->iterative_sub_pixel = 1;
  700. sf->optimize_coefficients = 1;
  701. sf->use_fastquant_for_pick = 0;
  702. sf->no_skip_block4x4_search = 1;
  703. sf->first_step = 0;
  704. sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
  705. sf->improved_mv_pred = 1;
  706. /* default thresholds to 0 */
  707. for (i = 0; i < MAX_MODES; i++)
  708. sf->thresh_mult[i] = 0;
  709. /* Count enabled references */
  710. ref_frames = 1;
  711. if (cpi->ref_frame_flags & VP8_LAST_FRAME)
  712. ref_frames++;
  713. if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
  714. ref_frames++;
  715. if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
  716. ref_frames++;
  717. /* Convert speed to continuous range, with clamping */
  718. if (Mode == 0)
  719. Speed = 0;
  720. else if (Mode == 2)
  721. Speed = RT(Speed);
  722. else
  723. {
  724. if (Speed > 5)
  725. Speed = 5;
  726. Speed = GOOD(Speed);
  727. }
  728. sf->thresh_mult[THR_ZERO1] =
  729. sf->thresh_mult[THR_NEAREST1] =
  730. sf->thresh_mult[THR_NEAR1] =
  731. sf->thresh_mult[THR_DC] = 0; /* always */
  732. sf->thresh_mult[THR_ZERO2] =
  733. sf->thresh_mult[THR_ZERO3] =
  734. sf->thresh_mult[THR_NEAREST2] =
  735. sf->thresh_mult[THR_NEAREST3] =
  736. sf->thresh_mult[THR_NEAR2] =
  737. sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn);
  738. sf->thresh_mult[THR_V_PRED] =
  739. sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred);
  740. sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
  741. sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
  742. sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
  743. sf->thresh_mult[THR_NEW2] =
  744. sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2);
  745. sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
  746. sf->thresh_mult[THR_SPLIT2] =
  747. sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2);
  748. // Special case for temporal layers.
  749. // Reduce the thresholds for zero/nearest/near for GOLDEN, if GOLDEN is
  750. // used as second reference. We don't modify thresholds for ALTREF case
  751. // since ALTREF is usually used as long-term reference in temporal layers.
  752. if ((cpi->Speed <= 6) &&
  753. (cpi->oxcf.number_of_layers > 1) &&
  754. (cpi->ref_frame_flags & VP8_LAST_FRAME) &&
  755. (cpi->ref_frame_flags & VP8_GOLD_FRAME)) {
  756. if (cpi->closest_reference_frame == GOLDEN_FRAME) {
  757. sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO2] >> 3;
  758. sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST2] >> 3;
  759. sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR2] >> 3;
  760. } else {
  761. sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO2] >> 1;
  762. sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST2] >> 1;
  763. sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR2] >> 1;
  764. }
  765. }
  766. cpi->mode_check_freq[THR_ZERO1] =
  767. cpi->mode_check_freq[THR_NEAREST1] =
  768. cpi->mode_check_freq[THR_NEAR1] =
  769. cpi->mode_check_freq[THR_TM] =
  770. cpi->mode_check_freq[THR_DC] = 0; /* always */
  771. cpi->mode_check_freq[THR_ZERO2] =
  772. cpi->mode_check_freq[THR_ZERO3] =
  773. cpi->mode_check_freq[THR_NEAREST2] =
  774. cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed,
  775. mode_check_freq_map_zn2);
  776. cpi->mode_check_freq[THR_NEAR2] =
  777. cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed,
  778. mode_check_freq_map_near2);
  779. cpi->mode_check_freq[THR_V_PRED] =
  780. cpi->mode_check_freq[THR_H_PRED] =
  781. cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed,
  782. mode_check_freq_map_vhbpred);
  783. cpi->mode_check_freq[THR_NEW1] = speed_map(Speed,
  784. mode_check_freq_map_new1);
  785. cpi->mode_check_freq[THR_NEW2] =
  786. cpi->mode_check_freq[THR_NEW3] = speed_map(Speed,
  787. mode_check_freq_map_new2);
  788. cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed,
  789. mode_check_freq_map_split1);
  790. cpi->mode_check_freq[THR_SPLIT2] =
  791. cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed,
  792. mode_check_freq_map_split2);
  793. Speed = cpi->Speed;
  794. switch (Mode)
  795. {
  796. #if !(CONFIG_REALTIME_ONLY)
  797. case 0: /* best quality mode */
  798. sf->first_step = 0;
  799. sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
  800. break;
  801. case 1:
  802. case 3:
  803. if (Speed > 0)
  804. {
  805. /* Disable coefficient optimization above speed 0 */
  806. sf->optimize_coefficients = 0;
  807. sf->use_fastquant_for_pick = 1;
  808. sf->no_skip_block4x4_search = 0;
  809. sf->first_step = 1;
  810. }
  811. if (Speed > 2)
  812. {
  813. sf->improved_quant = 0;
  814. sf->improved_dct = 0;
  815. /* Only do recode loop on key frames, golden frames and
  816. * alt ref frames
  817. */
  818. sf->recode_loop = 2;
  819. }
  820. if (Speed > 3)
  821. {
  822. sf->auto_filter = 1;
  823. sf->recode_loop = 0; /* recode loop off */
  824. sf->RD = 0; /* Turn rd off */
  825. }
  826. if (Speed > 4)
  827. {
  828. sf->auto_filter = 0; /* Faster selection of loop filter */
  829. }
  830. break;
  831. #endif
  832. case 2:
  833. sf->optimize_coefficients = 0;
  834. sf->recode_loop = 0;
  835. sf->auto_filter = 1;
  836. sf->iterative_sub_pixel = 1;
  837. sf->search_method = NSTEP;
  838. if (Speed > 0)
  839. {
  840. sf->improved_quant = 0;
  841. sf->improved_dct = 0;
  842. sf->use_fastquant_for_pick = 1;
  843. sf->no_skip_block4x4_search = 0;
  844. sf->first_step = 1;
  845. }
  846. if (Speed > 2)
  847. sf->auto_filter = 0; /* Faster selection of loop filter */
  848. if (Speed > 3)
  849. {
  850. sf->RD = 0;
  851. sf->auto_filter = 1;
  852. }
  853. if (Speed > 4)
  854. {
  855. sf->auto_filter = 0; /* Faster selection of loop filter */
  856. sf->search_method = HEX;
  857. sf->iterative_sub_pixel = 0;
  858. }
  859. if (Speed > 6)
  860. {
  861. unsigned int sum = 0;
  862. unsigned int total_mbs = cm->MBs;
  863. int thresh;
  864. unsigned int total_skip;
  865. int min = 2000;
  866. if (cpi->oxcf.encode_breakout > 2000)
  867. min = cpi->oxcf.encode_breakout;
  868. min >>= 7;
  869. for (i = 0; i < min; i++)
  870. {
  871. sum += cpi->mb.error_bins[i];
  872. }
  873. total_skip = sum;
  874. sum = 0;
  875. /* i starts from 2 to make sure thresh started from 2048 */
  876. for (; i < 1024; i++)
  877. {
  878. sum += cpi->mb.error_bins[i];
  879. if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip))
  880. break;
  881. }
  882. i--;
  883. thresh = (i << 7);
  884. if (thresh < 2000)
  885. thresh = 2000;
  886. if (ref_frames > 1)
  887. {
  888. sf->thresh_mult[THR_NEW1 ] = thresh;
  889. sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1;
  890. sf->thresh_mult[THR_NEAR1 ] = thresh >> 1;
  891. }
  892. if (ref_frames > 2)
  893. {
  894. sf->thresh_mult[THR_NEW2] = thresh << 1;
  895. sf->thresh_mult[THR_NEAREST2 ] = thresh;
  896. sf->thresh_mult[THR_NEAR2 ] = thresh;
  897. }
  898. if (ref_frames > 3)
  899. {
  900. sf->thresh_mult[THR_NEW3] = thresh << 1;
  901. sf->thresh_mult[THR_NEAREST3 ] = thresh;
  902. sf->thresh_mult[THR_NEAR3 ] = thresh;
  903. }
  904. sf->improved_mv_pred = 0;
  905. }
  906. if (Speed > 8)
  907. sf->quarter_pixel_search = 0;
  908. if(cm->version == 0)
  909. {
  910. cm->filter_type = NORMAL_LOOPFILTER;
  911. if (Speed >= 14)
  912. cm->filter_type = SIMPLE_LOOPFILTER;
  913. }
  914. else
  915. {
  916. cm->filter_type = SIMPLE_LOOPFILTER;
  917. }
  918. /* This has a big hit on quality. Last resort */
  919. if (Speed >= 15)
  920. sf->half_pixel_search = 0;
  921. memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
  922. }; /* switch */
  923. /* Slow quant, dct and trellis not worthwhile for first pass
  924. * so make sure they are always turned off.
  925. */
  926. if ( cpi->pass == 1 )
  927. {
  928. sf->improved_quant = 0;
  929. sf->optimize_coefficients = 0;
  930. sf->improved_dct = 0;
  931. }
  932. if (cpi->sf.search_method == NSTEP)
  933. {
  934. vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
  935. }
  936. else if (cpi->sf.search_method == DIAMOND)
  937. {
  938. vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride);
  939. }
  940. if (cpi->sf.improved_dct)
  941. {
  942. cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
  943. cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
  944. }
  945. else
  946. {
  947. /* No fast FDCT defined for any platform at this time. */
  948. cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
  949. cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
  950. }
  951. cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
  952. if (cpi->sf.improved_quant)
  953. {
  954. cpi->mb.quantize_b = vp8_regular_quantize_b;
  955. }
  956. else
  957. {
  958. cpi->mb.quantize_b = vp8_fast_quantize_b;
  959. }
  960. if (cpi->sf.improved_quant != last_improved_quant)
  961. vp8cx_init_quantizer(cpi);
  962. if (cpi->sf.iterative_sub_pixel == 1)
  963. {
  964. cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
  965. }
  966. else if (cpi->sf.quarter_pixel_search)
  967. {
  968. cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
  969. }
  970. else if (cpi->sf.half_pixel_search)
  971. {
  972. cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
  973. }
  974. else
  975. {
  976. cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
  977. }
  978. if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1)
  979. cpi->mb.optimize = 1;
  980. else
  981. cpi->mb.optimize = 0;
  982. if (cpi->common.full_pixel)
  983. cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
  984. #ifdef SPEEDSTATS
  985. frames_at_speed[cpi->Speed]++;
  986. #endif
  987. }
  988. #undef GOOD
  989. #undef RT
  990. static void alloc_raw_frame_buffers(VP8_COMP *cpi)
  991. {
  992. #if VP8_TEMPORAL_ALT_REF
  993. int width = (cpi->oxcf.Width + 15) & ~15;
  994. int height = (cpi->oxcf.Height + 15) & ~15;
  995. #endif
  996. cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
  997. cpi->oxcf.lag_in_frames);
  998. if(!cpi->lookahead)
  999. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1000. "Failed to allocate lag buffers");
  1001. #if VP8_TEMPORAL_ALT_REF
  1002. if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer,
  1003. width, height, VP8BORDERINPIXELS))
  1004. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1005. "Failed to allocate altref buffer");
  1006. #endif
  1007. }
  1008. static void dealloc_raw_frame_buffers(VP8_COMP *cpi)
  1009. {
  1010. #if VP8_TEMPORAL_ALT_REF
  1011. vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
  1012. #endif
  1013. vp8_lookahead_destroy(cpi->lookahead);
  1014. }
  1015. static int vp8_alloc_partition_data(VP8_COMP *cpi)
  1016. {
  1017. vpx_free(cpi->mb.pip);
  1018. cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) *
  1019. (cpi->common.mb_rows + 1),
  1020. sizeof(PARTITION_INFO));
  1021. if(!cpi->mb.pip)
  1022. return 1;
  1023. cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
  1024. return 0;
  1025. }
  1026. void vp8_alloc_compressor_data(VP8_COMP *cpi)
  1027. {
  1028. VP8_COMMON *cm = & cpi->common;
  1029. int width = cm->Width;
  1030. int height = cm->Height;
  1031. if (vp8_alloc_frame_buffers(cm, width, height))
  1032. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1033. "Failed to allocate frame buffers");
  1034. if (vp8_alloc_partition_data(cpi))
  1035. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1036. "Failed to allocate partition data");
  1037. if ((width & 0xf) != 0)
  1038. width += 16 - (width & 0xf);
  1039. if ((height & 0xf) != 0)
  1040. height += 16 - (height & 0xf);
  1041. if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame,
  1042. width, height, VP8BORDERINPIXELS))
  1043. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1044. "Failed to allocate last frame buffer");
  1045. if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source,
  1046. width, height, VP8BORDERINPIXELS))
  1047. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1048. "Failed to allocate scaled source buffer");
  1049. vpx_free(cpi->tok);
  1050. {
  1051. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  1052. unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
  1053. #else
  1054. unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
  1055. #endif
  1056. CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
  1057. }
  1058. /* Data used for real time vc mode to see if gf needs refreshing */
  1059. cpi->zeromv_count = 0;
  1060. /* Structures used to monitor GF usage */
  1061. vpx_free(cpi->gf_active_flags);
  1062. CHECK_MEM_ERROR(cpi->gf_active_flags,
  1063. vpx_calloc(sizeof(*cpi->gf_active_flags),
  1064. cm->mb_rows * cm->mb_cols));
  1065. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  1066. vpx_free(cpi->mb_activity_map);
  1067. CHECK_MEM_ERROR(cpi->mb_activity_map,
  1068. vpx_calloc(sizeof(*cpi->mb_activity_map),
  1069. cm->mb_rows * cm->mb_cols));
  1070. /* allocate memory for storing last frame's MVs for MV prediction. */
  1071. vpx_free(cpi->lfmv);
  1072. CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
  1073. sizeof(*cpi->lfmv)));
  1074. vpx_free(cpi->lf_ref_frame_sign_bias);
  1075. CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
  1076. vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
  1077. sizeof(*cpi->lf_ref_frame_sign_bias)));
  1078. vpx_free(cpi->lf_ref_frame);
  1079. CHECK_MEM_ERROR(cpi->lf_ref_frame,
  1080. vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2),
  1081. sizeof(*cpi->lf_ref_frame)));
  1082. /* Create the encoder segmentation map and set all entries to 0 */
  1083. vpx_free(cpi->segmentation_map);
  1084. CHECK_MEM_ERROR(cpi->segmentation_map,
  1085. vpx_calloc(cm->mb_rows * cm->mb_cols,
  1086. sizeof(*cpi->segmentation_map)));
  1087. cpi->cyclic_refresh_mode_index = 0;
  1088. vpx_free(cpi->active_map);
  1089. CHECK_MEM_ERROR(cpi->active_map,
  1090. vpx_calloc(cm->mb_rows * cm->mb_cols,
  1091. sizeof(*cpi->active_map)));
  1092. memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols));
  1093. #if CONFIG_MULTITHREAD
  1094. if (width < 640)
  1095. cpi->mt_sync_range = 1;
  1096. else if (width <= 1280)
  1097. cpi->mt_sync_range = 4;
  1098. else if (width <= 2560)
  1099. cpi->mt_sync_range = 8;
  1100. else
  1101. cpi->mt_sync_range = 16;
  1102. if (cpi->oxcf.multi_threaded > 1)
  1103. {
  1104. vpx_free(cpi->mt_current_mb_col);
  1105. CHECK_MEM_ERROR(cpi->mt_current_mb_col,
  1106. vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
  1107. }
  1108. #endif
  1109. vpx_free(cpi->tplist);
  1110. CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
  1111. #if CONFIG_TEMPORAL_DENOISING
  1112. if (cpi->oxcf.noise_sensitivity > 0) {
  1113. vp8_denoiser_free(&cpi->denoiser);
  1114. vp8_denoiser_allocate(&cpi->denoiser, width, height,
  1115. cm->mb_rows, cm->mb_cols,
  1116. cpi->oxcf.noise_sensitivity);
  1117. }
  1118. #endif
  1119. }
  1120. /* Quant MOD */
  1121. static const int q_trans[] =
  1122. {
  1123. 0, 1, 2, 3, 4, 5, 7, 8,
  1124. 9, 10, 12, 13, 15, 17, 18, 19,
  1125. 20, 21, 23, 24, 25, 26, 27, 28,
  1126. 29, 30, 31, 33, 35, 37, 39, 41,
  1127. 43, 45, 47, 49, 51, 53, 55, 57,
  1128. 59, 61, 64, 67, 70, 73, 76, 79,
  1129. 82, 85, 88, 91, 94, 97, 100, 103,
  1130. 106, 109, 112, 115, 118, 121, 124, 127,
  1131. };
  1132. int vp8_reverse_trans(int x)
  1133. {
  1134. int i;
  1135. for (i = 0; i < 64; i++)
  1136. if (q_trans[i] >= x)
  1137. return i;
  1138. return 63;
  1139. }
  1140. void vp8_new_framerate(VP8_COMP *cpi, double framerate)
  1141. {
  1142. if(framerate < .1)
  1143. framerate = 30;
  1144. cpi->framerate = framerate;
  1145. cpi->output_framerate = framerate;
  1146. cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth /
  1147. cpi->output_framerate);
  1148. cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
  1149. cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
  1150. cpi->oxcf.two_pass_vbrmin_section / 100);
  1151. /* Set Maximum gf/arf interval */
  1152. cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
  1153. if(cpi->max_gf_interval < 12)
  1154. cpi->max_gf_interval = 12;
  1155. /* Extended interval for genuinely static scenes */
  1156. cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
  1157. /* Special conditions when altr ref frame enabled in lagged compress mode */
  1158. if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames)
  1159. {
  1160. if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1)
  1161. cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
  1162. if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1)
  1163. cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
  1164. }
  1165. if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval )
  1166. cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
  1167. }
  1168. static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
  1169. {
  1170. VP8_COMMON *cm = &cpi->common;
  1171. cpi->oxcf = *oxcf;
  1172. cpi->auto_gold = 1;
  1173. cpi->auto_adjust_gold_quantizer = 1;
  1174. cm->version = oxcf->Version;
  1175. vp8_setup_version(cm);
  1176. /* Frame rate is not available on the first frame, as it's derived from
  1177. * the observed timestamps. The actual value used here doesn't matter
  1178. * too much, as it will adapt quickly.
  1179. */
  1180. if (oxcf->timebase.num > 0) {
  1181. cpi->framerate = (double)(oxcf->timebase.den) /
  1182. (double)(oxcf->timebase.num);
  1183. } else {
  1184. cpi->framerate = 30;
  1185. }
  1186. /* If the reciprocal of the timebase seems like a reasonable framerate,
  1187. * then use that as a guess, otherwise use 30.
  1188. */
  1189. if (cpi->framerate > 180)
  1190. cpi->framerate = 30;
  1191. cpi->ref_framerate = cpi->framerate;
  1192. cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
  1193. cm->refresh_golden_frame = 0;
  1194. cm->refresh_last_frame = 1;
  1195. cm->refresh_entropy_probs = 1;
  1196. /* change includes all joint functionality */
  1197. vp8_change_config(cpi, oxcf);
  1198. /* Initialize active best and worst q and average q values. */
  1199. cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
  1200. cpi->active_best_quality = cpi->oxcf.best_allowed_q;
  1201. cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
  1202. /* Initialise the starting buffer levels */
  1203. cpi->buffer_level = cpi->oxcf.starting_buffer_level;
  1204. cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
  1205. cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
  1206. cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
  1207. cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
  1208. cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
  1209. cpi->total_actual_bits = 0;
  1210. cpi->total_target_vs_actual = 0;
  1211. /* Temporal scalabilty */
  1212. if (cpi->oxcf.number_of_layers > 1)
  1213. {
  1214. unsigned int i;
  1215. double prev_layer_framerate=0;
  1216. for (i=0; i<cpi->oxcf.number_of_layers; i++)
  1217. {
  1218. init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
  1219. prev_layer_framerate = cpi->output_framerate /
  1220. cpi->oxcf.rate_decimator[i];
  1221. }
  1222. }
  1223. #if VP8_TEMPORAL_ALT_REF
  1224. {
  1225. int i;
  1226. cpi->fixed_divide[0] = 0;
  1227. for (i = 1; i < 512; i++)
  1228. cpi->fixed_divide[i] = 0x80000 / i;
  1229. }
  1230. #endif
  1231. }
  1232. static void update_layer_contexts (VP8_COMP *cpi)
  1233. {
  1234. VP8_CONFIG *oxcf = &cpi->oxcf;
  1235. /* Update snapshots of the layer contexts to reflect new parameters */
  1236. if (oxcf->number_of_layers > 1)
  1237. {
  1238. unsigned int i;
  1239. double prev_layer_framerate=0;
  1240. assert(oxcf->number_of_layers <= VPX_TS_MAX_LAYERS);
  1241. for (i = 0; i < oxcf->number_of_layers && i < VPX_TS_MAX_LAYERS; ++i)
  1242. {
  1243. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  1244. lc->framerate =
  1245. cpi->ref_framerate / oxcf->rate_decimator[i];
  1246. lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
  1247. lc->starting_buffer_level = rescale(
  1248. (int)oxcf->starting_buffer_level_in_ms,
  1249. lc->target_bandwidth, 1000);
  1250. if (oxcf->optimal_buffer_level == 0)
  1251. lc->optimal_buffer_level = lc->target_bandwidth / 8;
  1252. else
  1253. lc->optimal_buffer_level = rescale(
  1254. (int)oxcf->optimal_buffer_level_in_ms,
  1255. lc->target_bandwidth, 1000);
  1256. if (oxcf->maximum_buffer_size == 0)
  1257. lc->maximum_buffer_size = lc->target_bandwidth / 8;
  1258. else
  1259. lc->maximum_buffer_size = rescale(
  1260. (int)oxcf->maximum_buffer_size_in_ms,
  1261. lc->target_bandwidth, 1000);
  1262. /* Work out the average size of a frame within this layer */
  1263. if (i > 0)
  1264. lc->avg_frame_size_for_layer =
  1265. (int)((oxcf->target_bitrate[i] -
  1266. oxcf->target_bitrate[i-1]) * 1000 /
  1267. (lc->framerate - prev_layer_framerate));
  1268. prev_layer_framerate = lc->framerate;
  1269. }
  1270. }
  1271. }
  1272. void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf)
  1273. {
  1274. VP8_COMMON *cm = &cpi->common;
  1275. int last_w, last_h, prev_number_of_layers;
  1276. if (!cpi)
  1277. return;
  1278. if (!oxcf)
  1279. return;
  1280. #if CONFIG_MULTITHREAD
  1281. /* wait for the last picture loopfilter thread done */
  1282. if (cpi->b_lpf_running)
  1283. {
  1284. sem_wait(&cpi->h_event_end_lpf);
  1285. cpi->b_lpf_running = 0;
  1286. }
  1287. #endif
  1288. if (cm->version != oxcf->Version)
  1289. {
  1290. cm->version = oxcf->Version;
  1291. vp8_setup_version(cm);
  1292. }
  1293. last_w = cpi->oxcf.Width;
  1294. last_h = cpi->oxcf.Height;
  1295. prev_number_of_layers = cpi->oxcf.number_of_layers;
  1296. cpi->oxcf = *oxcf;
  1297. switch (cpi->oxcf.Mode)
  1298. {
  1299. case MODE_REALTIME:
  1300. cpi->pass = 0;
  1301. cpi->compressor_speed = 2;
  1302. if (cpi->oxcf.cpu_used < -16)
  1303. {
  1304. cpi->oxcf.cpu_used = -16;
  1305. }
  1306. if (cpi->oxcf.cpu_used > 16)
  1307. cpi->oxcf.cpu_used = 16;
  1308. break;
  1309. case MODE_GOODQUALITY:
  1310. cpi->pass = 0;
  1311. cpi->compressor_speed = 1;
  1312. if (cpi->oxcf.cpu_used < -5)
  1313. {
  1314. cpi->oxcf.cpu_used = -5;
  1315. }
  1316. if (cpi->oxcf.cpu_used > 5)
  1317. cpi->oxcf.cpu_used = 5;
  1318. break;
  1319. case MODE_BESTQUALITY:
  1320. cpi->pass = 0;
  1321. cpi->compressor_speed = 0;
  1322. break;
  1323. case MODE_FIRSTPASS:
  1324. cpi->pass = 1;
  1325. cpi->compressor_speed = 1;
  1326. break;
  1327. case MODE_SECONDPASS:
  1328. cpi->pass = 2;
  1329. cpi->compressor_speed = 1;
  1330. if (cpi->oxcf.cpu_used < -5)
  1331. {
  1332. cpi->oxcf.cpu_used = -5;
  1333. }
  1334. if (cpi->oxcf.cpu_used > 5)
  1335. cpi->oxcf.cpu_used = 5;
  1336. break;
  1337. case MODE_SECONDPASS_BEST:
  1338. cpi->pass = 2;
  1339. cpi->compressor_speed = 0;
  1340. break;
  1341. }
  1342. if (cpi->pass == 0)
  1343. cpi->auto_worst_q = 1;
  1344. cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
  1345. cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
  1346. cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
  1347. if (oxcf->fixed_q >= 0)
  1348. {
  1349. if (oxcf->worst_allowed_q < 0)
  1350. cpi->oxcf.fixed_q = q_trans[0];
  1351. else
  1352. cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
  1353. if (oxcf->alt_q < 0)
  1354. cpi->oxcf.alt_q = q_trans[0];
  1355. else
  1356. cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
  1357. if (oxcf->key_q < 0)
  1358. cpi->oxcf.key_q = q_trans[0];
  1359. else
  1360. cpi->oxcf.key_q = q_trans[oxcf->key_q];
  1361. if (oxcf->gold_q < 0)
  1362. cpi->oxcf.gold_q = q_trans[0];
  1363. else
  1364. cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
  1365. }
  1366. cpi->baseline_gf_interval =
  1367. cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
  1368. #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
  1369. cpi->oxcf.token_partitions = 3;
  1370. #endif
  1371. if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3)
  1372. cm->multi_token_partition =
  1373. (TOKEN_PARTITION) cpi->oxcf.token_partitions;
  1374. setup_features(cpi);
  1375. {
  1376. int i;
  1377. for (i = 0; i < MAX_MB_SEGMENTS; i++)
  1378. cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
  1379. }
  1380. /* At the moment the first order values may not be > MAXQ */
  1381. if (cpi->oxcf.fixed_q > MAXQ)
  1382. cpi->oxcf.fixed_q = MAXQ;
  1383. /* local file playback mode == really big buffer */
  1384. if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK)
  1385. {
  1386. cpi->oxcf.starting_buffer_level = 60000;
  1387. cpi->oxcf.optimal_buffer_level = 60000;
  1388. cpi->oxcf.maximum_buffer_size = 240000;
  1389. cpi->oxcf.starting_buffer_level_in_ms = 60000;
  1390. cpi->oxcf.optimal_buffer_level_in_ms = 60000;
  1391. cpi->oxcf.maximum_buffer_size_in_ms = 240000;
  1392. }
  1393. /* Convert target bandwidth from Kbit/s to Bit/s */
  1394. cpi->oxcf.target_bandwidth *= 1000;
  1395. cpi->oxcf.starting_buffer_level =
  1396. rescale((int)cpi->oxcf.starting_buffer_level,
  1397. cpi->oxcf.target_bandwidth, 1000);
  1398. /* Set or reset optimal and maximum buffer levels. */
  1399. if (cpi->oxcf.optimal_buffer_level == 0)
  1400. cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
  1401. else
  1402. cpi->oxcf.optimal_buffer_level =
  1403. rescale((int)cpi->oxcf.optimal_buffer_level,
  1404. cpi->oxcf.target_bandwidth, 1000);
  1405. if (cpi->oxcf.maximum_buffer_size == 0)
  1406. cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
  1407. else
  1408. cpi->oxcf.maximum_buffer_size =
  1409. rescale((int)cpi->oxcf.maximum_buffer_size,
  1410. cpi->oxcf.target_bandwidth, 1000);
  1411. // Under a configuration change, where maximum_buffer_size may change,
  1412. // keep buffer level clipped to the maximum allowed buffer size.
  1413. if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
  1414. cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
  1415. cpi->buffer_level = cpi->bits_off_target;
  1416. }
  1417. /* Set up frame rate and related parameters rate control values. */
  1418. vp8_new_framerate(cpi, cpi->framerate);
  1419. /* Set absolute upper and lower quality limits */
  1420. cpi->worst_quality = cpi->oxcf.worst_allowed_q;
  1421. cpi->best_quality = cpi->oxcf.best_allowed_q;
  1422. /* active values should only be modified if out of new range */
  1423. if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q)
  1424. {
  1425. cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
  1426. }
  1427. /* less likely */
  1428. else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q)
  1429. {
  1430. cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
  1431. }
  1432. if (cpi->active_best_quality < cpi->oxcf.best_allowed_q)
  1433. {
  1434. cpi->active_best_quality = cpi->oxcf.best_allowed_q;
  1435. }
  1436. /* less likely */
  1437. else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q)
  1438. {
  1439. cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
  1440. }
  1441. cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
  1442. cpi->cq_target_quality = cpi->oxcf.cq_level;
  1443. /* Only allow dropped frames in buffered mode */
  1444. cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
  1445. cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
  1446. // Check if the number of temporal layers has changed, and if so reset the
  1447. // pattern counter and set/initialize the temporal layer context for the
  1448. // new layer configuration.
  1449. if (cpi->oxcf.number_of_layers != prev_number_of_layers)
  1450. {
  1451. // If the number of temporal layers are changed we must start at the
  1452. // base of the pattern cycle, so set the layer id to 0 and reset
  1453. // the temporal pattern counter.
  1454. if (cpi->temporal_layer_id > 0) {
  1455. cpi->temporal_layer_id = 0;
  1456. }
  1457. cpi->temporal_pattern_counter = 0;
  1458. reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
  1459. }
  1460. if (!cpi->initial_width)
  1461. {
  1462. cpi->initial_width = cpi->oxcf.Width;
  1463. cpi->initial_height = cpi->oxcf.Height;
  1464. }
  1465. cm->Width = cpi->oxcf.Width;
  1466. cm->Height = cpi->oxcf.Height;
  1467. assert(cm->Width <= cpi->initial_width);
  1468. assert(cm->Height <= cpi->initial_height);
  1469. /* TODO(jkoleszar): if an internal spatial resampling is active,
  1470. * and we downsize the input image, maybe we should clear the
  1471. * internal scale immediately rather than waiting for it to
  1472. * correct.
  1473. */
  1474. /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
  1475. if (cpi->oxcf.Sharpness > 7)
  1476. cpi->oxcf.Sharpness = 7;
  1477. cm->sharpness_level = cpi->oxcf.Sharpness;
  1478. if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL)
  1479. {
  1480. int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
  1481. int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
  1482. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  1483. Scale2Ratio(cm->vert_scale, &vr, &vs);
  1484. /* always go to the next whole number */
  1485. cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
  1486. cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
  1487. }
  1488. if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height)
  1489. cpi->force_next_frame_intra = 1;
  1490. if (((cm->Width + 15) & 0xfffffff0) !=
  1491. cm->yv12_fb[cm->lst_fb_idx].y_width ||
  1492. ((cm->Height + 15) & 0xfffffff0) !=
  1493. cm->yv12_fb[cm->lst_fb_idx].y_height ||
  1494. cm->yv12_fb[cm->lst_fb_idx].y_width == 0)
  1495. {
  1496. dealloc_raw_frame_buffers(cpi);
  1497. alloc_raw_frame_buffers(cpi);
  1498. vp8_alloc_compressor_data(cpi);
  1499. }
  1500. if (cpi->oxcf.fixed_q >= 0)
  1501. {
  1502. cpi->last_q[0] = cpi->oxcf.fixed_q;
  1503. cpi->last_q[1] = cpi->oxcf.fixed_q;
  1504. }
  1505. cpi->Speed = cpi->oxcf.cpu_used;
  1506. /* force to allowlag to 0 if lag_in_frames is 0; */
  1507. if (cpi->oxcf.lag_in_frames == 0)
  1508. {
  1509. cpi->oxcf.allow_lag = 0;
  1510. }
  1511. /* Limit on lag buffers as these are not currently dynamically allocated */
  1512. else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS)
  1513. cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
  1514. /* YX Temp */
  1515. cpi->alt_ref_source = NULL;
  1516. cpi->is_src_frame_alt_ref = 0;
  1517. #if CONFIG_TEMPORAL_DENOISING
  1518. if (cpi->oxcf.noise_sensitivity)
  1519. {
  1520. if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc)
  1521. {
  1522. int width = (cpi->oxcf.Width + 15) & ~15;
  1523. int height = (cpi->oxcf.Height + 15) & ~15;
  1524. vp8_denoiser_allocate(&cpi->denoiser, width, height,
  1525. cm->mb_rows, cm->mb_cols,
  1526. cpi->oxcf.noise_sensitivity);
  1527. }
  1528. }
  1529. #endif
  1530. #if 0
  1531. /* Experimental RD Code */
  1532. cpi->frame_distortion = 0;
  1533. cpi->last_frame_distortion = 0;
  1534. #endif
  1535. }
  1536. #ifndef M_LOG2_E
  1537. #define M_LOG2_E 0.693147180559945309417
  1538. #endif
  1539. #define log2f(x) (log (x) / (float) M_LOG2_E)
  1540. static void cal_mvsadcosts(int *mvsadcost[2])
  1541. {
  1542. int i = 1;
  1543. mvsadcost [0] [0] = 300;
  1544. mvsadcost [1] [0] = 300;
  1545. do
  1546. {
  1547. double z = 256 * (2 * (log2f(8 * i) + .6));
  1548. mvsadcost [0][i] = (int) z;
  1549. mvsadcost [1][i] = (int) z;
  1550. mvsadcost [0][-i] = (int) z;
  1551. mvsadcost [1][-i] = (int) z;
  1552. }
  1553. while (++i <= mvfp_max);
  1554. }
  1555. struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf)
  1556. {
  1557. int i;
  1558. VP8_COMP *cpi;
  1559. VP8_COMMON *cm;
  1560. cpi = vpx_memalign(32, sizeof(VP8_COMP));
  1561. /* Check that the CPI instance is valid */
  1562. if (!cpi)
  1563. return 0;
  1564. cm = &cpi->common;
  1565. memset(cpi, 0, sizeof(VP8_COMP));
  1566. if (setjmp(cm->error.jmp))
  1567. {
  1568. cpi->common.error.setjmp = 0;
  1569. vp8_remove_compressor(&cpi);
  1570. return 0;
  1571. }
  1572. cpi->common.error.setjmp = 1;
  1573. CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1));
  1574. vp8_create_common(&cpi->common);
  1575. init_config(cpi, oxcf);
  1576. memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob));
  1577. cpi->common.current_video_frame = 0;
  1578. cpi->temporal_pattern_counter = 0;
  1579. cpi->temporal_layer_id = -1;
  1580. cpi->kf_overspend_bits = 0;
  1581. cpi->kf_bitrate_adjustment = 0;
  1582. cpi->frames_till_gf_update_due = 0;
  1583. cpi->gf_overspend_bits = 0;
  1584. cpi->non_gf_bitrate_adjustment = 0;
  1585. cpi->prob_last_coded = 128;
  1586. cpi->prob_gf_coded = 128;
  1587. cpi->prob_intra_coded = 63;
  1588. /* Prime the recent reference frame usage counters.
  1589. * Hereafter they will be maintained as a sort of moving average
  1590. */
  1591. cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
  1592. cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
  1593. cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
  1594. cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
  1595. /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
  1596. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
  1597. cpi->twopass.gf_decay_rate = 0;
  1598. cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
  1599. cpi->gold_is_last = 0 ;
  1600. cpi->alt_is_last = 0 ;
  1601. cpi->gold_is_alt = 0 ;
  1602. cpi->active_map_enabled = 0;
  1603. #if 0
  1604. /* Experimental code for lagged and one pass */
  1605. /* Initialise one_pass GF frames stats */
  1606. /* Update stats used for GF selection */
  1607. if (cpi->pass == 0)
  1608. {
  1609. cpi->one_pass_frame_index = 0;
  1610. for (i = 0; i < MAX_LAG_BUFFERS; i++)
  1611. {
  1612. cpi->one_pass_frame_stats[i].frames_so_far = 0;
  1613. cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
  1614. cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
  1615. cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
  1616. cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
  1617. cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
  1618. cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
  1619. cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
  1620. cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
  1621. }
  1622. }
  1623. #endif
  1624. cpi->mse_source_denoised = 0;
  1625. /* Should we use the cyclic refresh method.
  1626. * Currently this is tied to error resilliant mode
  1627. */
  1628. cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode;
  1629. cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 5;
  1630. if (cpi->oxcf.number_of_layers == 1) {
  1631. cpi->cyclic_refresh_mode_max_mbs_perframe =
  1632. (cpi->common.mb_rows * cpi->common.mb_cols) / 20;
  1633. } else if (cpi->oxcf.number_of_layers == 2) {
  1634. cpi->cyclic_refresh_mode_max_mbs_perframe =
  1635. (cpi->common.mb_rows * cpi->common.mb_cols) / 10;
  1636. }
  1637. cpi->cyclic_refresh_mode_index = 0;
  1638. cpi->cyclic_refresh_q = 32;
  1639. if (cpi->cyclic_refresh_mode_enabled)
  1640. {
  1641. CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
  1642. }
  1643. else
  1644. cpi->cyclic_refresh_map = (signed char *) NULL;
  1645. CHECK_MEM_ERROR(cpi->consec_zero_last,
  1646. vpx_calloc(cm->mb_rows * cm->mb_cols, 1));
  1647. CHECK_MEM_ERROR(cpi->consec_zero_last_mvbias,
  1648. vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
  1649. #ifdef VP8_ENTROPY_STATS
  1650. init_context_counters();
  1651. #endif
  1652. /*Initialize the feed-forward activity masking.*/
  1653. cpi->activity_avg = 90<<12;
  1654. /* Give a sensible default for the first frame. */
  1655. cpi->frames_since_key = 8;
  1656. cpi->key_frame_frequency = cpi->oxcf.key_freq;
  1657. cpi->this_key_frame_forced = 0;
  1658. cpi->next_key_frame_forced = 0;
  1659. cpi->source_alt_ref_pending = 0;
  1660. cpi->source_alt_ref_active = 0;
  1661. cpi->common.refresh_alt_ref_frame = 0;
  1662. cpi->force_maxqp = 0;
  1663. cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
  1664. #if CONFIG_INTERNAL_STATS
  1665. cpi->b_calculate_ssimg = 0;
  1666. cpi->count = 0;
  1667. cpi->bytes = 0;
  1668. if (cpi->b_calculate_psnr)
  1669. {
  1670. cpi->total_sq_error = 0.0;
  1671. cpi->total_sq_error2 = 0.0;
  1672. cpi->total_y = 0.0;
  1673. cpi->total_u = 0.0;
  1674. cpi->total_v = 0.0;
  1675. cpi->total = 0.0;
  1676. cpi->totalp_y = 0.0;
  1677. cpi->totalp_u = 0.0;
  1678. cpi->totalp_v = 0.0;
  1679. cpi->totalp = 0.0;
  1680. cpi->tot_recode_hits = 0;
  1681. cpi->summed_quality = 0;
  1682. cpi->summed_weights = 0;
  1683. }
  1684. if (cpi->b_calculate_ssimg)
  1685. {
  1686. cpi->total_ssimg_y = 0;
  1687. cpi->total_ssimg_u = 0;
  1688. cpi->total_ssimg_v = 0;
  1689. cpi->total_ssimg_all = 0;
  1690. }
  1691. #endif
  1692. cpi->first_time_stamp_ever = 0x7FFFFFFF;
  1693. cpi->frames_till_gf_update_due = 0;
  1694. cpi->key_frame_count = 1;
  1695. cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
  1696. cpi->ni_tot_qi = 0;
  1697. cpi->ni_frames = 0;
  1698. cpi->total_byte_count = 0;
  1699. cpi->drop_frame = 0;
  1700. cpi->rate_correction_factor = 1.0;
  1701. cpi->key_frame_rate_correction_factor = 1.0;
  1702. cpi->gf_rate_correction_factor = 1.0;
  1703. cpi->twopass.est_max_qcorrection_factor = 1.0;
  1704. for (i = 0; i < KEY_FRAME_CONTEXT; i++)
  1705. {
  1706. cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
  1707. }
  1708. #ifdef OUTPUT_YUV_SRC
  1709. yuv_file = fopen("bd.yuv", "ab");
  1710. #endif
  1711. #ifdef OUTPUT_YUV_DENOISED
  1712. yuv_denoised_file = fopen("denoised.yuv", "ab");
  1713. #endif
  1714. #if 0
  1715. framepsnr = fopen("framepsnr.stt", "a");
  1716. kf_list = fopen("kf_list.stt", "w");
  1717. #endif
  1718. cpi->output_pkt_list = oxcf->output_pkt_list;
  1719. #if !(CONFIG_REALTIME_ONLY)
  1720. if (cpi->pass == 1)
  1721. {
  1722. vp8_init_first_pass(cpi);
  1723. }
  1724. else if (cpi->pass == 2)
  1725. {
  1726. size_t packet_sz = sizeof(FIRSTPASS_STATS);
  1727. int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
  1728. cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
  1729. cpi->twopass.stats_in = cpi->twopass.stats_in_start;
  1730. cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in
  1731. + (packets - 1) * packet_sz);
  1732. vp8_init_second_pass(cpi);
  1733. }
  1734. #endif
  1735. if (cpi->compressor_speed == 2)
  1736. {
  1737. cpi->avg_encode_time = 0;
  1738. cpi->avg_pick_mode_time = 0;
  1739. }
  1740. vp8_set_speed_features(cpi);
  1741. /* Set starting values of RD threshold multipliers (128 = *1) */
  1742. for (i = 0; i < MAX_MODES; i++)
  1743. {
  1744. cpi->mb.rd_thresh_mult[i] = 128;
  1745. }
  1746. #ifdef VP8_ENTROPY_STATS
  1747. init_mv_ref_counts();
  1748. #endif
  1749. #if CONFIG_MULTITHREAD
  1750. if(vp8cx_create_encoder_threads(cpi))
  1751. {
  1752. vp8_remove_compressor(&cpi);
  1753. return 0;
  1754. }
  1755. #endif
  1756. cpi->fn_ptr[BLOCK_16X16].sdf = vpx_sad16x16;
  1757. cpi->fn_ptr[BLOCK_16X16].vf = vpx_variance16x16;
  1758. cpi->fn_ptr[BLOCK_16X16].svf = vp8_sub_pixel_variance16x16;
  1759. cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vp8_variance_halfpixvar16x16_h;
  1760. cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vp8_variance_halfpixvar16x16_v;
  1761. cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vp8_variance_halfpixvar16x16_hv;
  1762. cpi->fn_ptr[BLOCK_16X16].sdx3f = vpx_sad16x16x3;
  1763. cpi->fn_ptr[BLOCK_16X16].sdx8f = vpx_sad16x16x8;
  1764. cpi->fn_ptr[BLOCK_16X16].sdx4df = vpx_sad16x16x4d;
  1765. cpi->fn_ptr[BLOCK_16X8].sdf = vpx_sad16x8;
  1766. cpi->fn_ptr[BLOCK_16X8].vf = vpx_variance16x8;
  1767. cpi->fn_ptr[BLOCK_16X8].svf = vp8_sub_pixel_variance16x8;
  1768. cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL;
  1769. cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL;
  1770. cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL;
  1771. cpi->fn_ptr[BLOCK_16X8].sdx3f = vpx_sad16x8x3;
  1772. cpi->fn_ptr[BLOCK_16X8].sdx8f = vpx_sad16x8x8;
  1773. cpi->fn_ptr[BLOCK_16X8].sdx4df = vpx_sad16x8x4d;
  1774. cpi->fn_ptr[BLOCK_8X16].sdf = vpx_sad8x16;
  1775. cpi->fn_ptr[BLOCK_8X16].vf = vpx_variance8x16;
  1776. cpi->fn_ptr[BLOCK_8X16].svf = vp8_sub_pixel_variance8x16;
  1777. cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL;
  1778. cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL;
  1779. cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL;
  1780. cpi->fn_ptr[BLOCK_8X16].sdx3f = vpx_sad8x16x3;
  1781. cpi->fn_ptr[BLOCK_8X16].sdx8f = vpx_sad8x16x8;
  1782. cpi->fn_ptr[BLOCK_8X16].sdx4df = vpx_sad8x16x4d;
  1783. cpi->fn_ptr[BLOCK_8X8].sdf = vpx_sad8x8;
  1784. cpi->fn_ptr[BLOCK_8X8].vf = vpx_variance8x8;
  1785. cpi->fn_ptr[BLOCK_8X8].svf = vp8_sub_pixel_variance8x8;
  1786. cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL;
  1787. cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL;
  1788. cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL;
  1789. cpi->fn_ptr[BLOCK_8X8].sdx3f = vpx_sad8x8x3;
  1790. cpi->fn_ptr[BLOCK_8X8].sdx8f = vpx_sad8x8x8;
  1791. cpi->fn_ptr[BLOCK_8X8].sdx4df = vpx_sad8x8x4d;
  1792. cpi->fn_ptr[BLOCK_4X4].sdf = vpx_sad4x4;
  1793. cpi->fn_ptr[BLOCK_4X4].vf = vpx_variance4x4;
  1794. cpi->fn_ptr[BLOCK_4X4].svf = vp8_sub_pixel_variance4x4;
  1795. cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL;
  1796. cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL;
  1797. cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL;
  1798. cpi->fn_ptr[BLOCK_4X4].sdx3f = vpx_sad4x4x3;
  1799. cpi->fn_ptr[BLOCK_4X4].sdx8f = vpx_sad4x4x8;
  1800. cpi->fn_ptr[BLOCK_4X4].sdx4df = vpx_sad4x4x4d;
  1801. #if ARCH_X86 || ARCH_X86_64
  1802. cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
  1803. cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
  1804. cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
  1805. cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
  1806. cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
  1807. #endif
  1808. cpi->full_search_sad = vp8_full_search_sad;
  1809. cpi->diamond_search_sad = vp8_diamond_search_sad;
  1810. cpi->refining_search_sad = vp8_refining_search_sad;
  1811. /* make sure frame 1 is okay */
  1812. cpi->mb.error_bins[0] = cpi->common.MBs;
  1813. /* vp8cx_init_quantizer() is first called here. Add check in
  1814. * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
  1815. * called later when needed. This will avoid unnecessary calls of
  1816. * vp8cx_init_quantizer() for every frame.
  1817. */
  1818. vp8cx_init_quantizer(cpi);
  1819. vp8_loop_filter_init(cm);
  1820. cpi->common.error.setjmp = 0;
  1821. #if CONFIG_MULTI_RES_ENCODING
  1822. /* Calculate # of MBs in a row in lower-resolution level image. */
  1823. if (cpi->oxcf.mr_encoder_id > 0)
  1824. vp8_cal_low_res_mb_cols(cpi);
  1825. #endif
  1826. /* setup RD costs to MACROBLOCK struct */
  1827. cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1];
  1828. cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1];
  1829. cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1];
  1830. cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1];
  1831. cal_mvsadcosts(cpi->mb.mvsadcost);
  1832. cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
  1833. cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
  1834. cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
  1835. cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
  1836. cpi->mb.token_costs = cpi->rd_costs.token_costs;
  1837. /* setup block ptrs & offsets */
  1838. vp8_setup_block_ptrs(&cpi->mb);
  1839. vp8_setup_block_dptrs(&cpi->mb.e_mbd);
  1840. return cpi;
  1841. }
  1842. void vp8_remove_compressor(VP8_COMP **ptr)
  1843. {
  1844. VP8_COMP *cpi = *ptr;
  1845. if (!cpi)
  1846. return;
  1847. if (cpi && (cpi->common.current_video_frame > 0))
  1848. {
  1849. #if !(CONFIG_REALTIME_ONLY)
  1850. if (cpi->pass == 2)
  1851. {
  1852. vp8_end_second_pass(cpi);
  1853. }
  1854. #endif
  1855. #ifdef VP8_ENTROPY_STATS
  1856. print_context_counters();
  1857. print_tree_update_probs();
  1858. print_mode_context();
  1859. #endif
  1860. #if CONFIG_INTERNAL_STATS
  1861. if (cpi->pass != 1)
  1862. {
  1863. FILE *f = fopen("opsnr.stt", "a");
  1864. double time_encoded = (cpi->last_end_time_stamp_seen
  1865. - cpi->first_time_stamp_ever) / 10000000.000;
  1866. double total_encode_time = (cpi->time_receive_data +
  1867. cpi->time_compress_data) / 1000.000;
  1868. double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
  1869. if (cpi->b_calculate_psnr)
  1870. {
  1871. if (cpi->oxcf.number_of_layers > 1)
  1872. {
  1873. int i;
  1874. fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
  1875. "GLPsnrP\tVPXSSIM\t\n");
  1876. for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
  1877. {
  1878. double dr = (double)cpi->bytes_in_layer[i] *
  1879. 8.0 / 1000.0 / time_encoded;
  1880. double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
  1881. cpi->common.Width * cpi->common.Height;
  1882. double total_psnr =
  1883. vpx_sse_to_psnr(samples, 255.0,
  1884. cpi->total_error2[i]);
  1885. double total_psnr2 =
  1886. vpx_sse_to_psnr(samples, 255.0,
  1887. cpi->total_error2_p[i]);
  1888. double total_ssim = 100 * pow(cpi->sum_ssim[i] /
  1889. cpi->sum_weights[i], 8.0);
  1890. fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
  1891. "%7.3f\t%7.3f\n",
  1892. i, dr,
  1893. cpi->sum_psnr[i] / cpi->frames_in_layer[i],
  1894. total_psnr,
  1895. cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
  1896. total_psnr2, total_ssim);
  1897. }
  1898. }
  1899. else
  1900. {
  1901. double samples = 3.0 / 2 * cpi->count *
  1902. cpi->common.Width * cpi->common.Height;
  1903. double total_psnr = vpx_sse_to_psnr(samples, 255.0,
  1904. cpi->total_sq_error);
  1905. double total_psnr2 = vpx_sse_to_psnr(samples, 255.0,
  1906. cpi->total_sq_error2);
  1907. double total_ssim = 100 * pow(cpi->summed_quality /
  1908. cpi->summed_weights, 8.0);
  1909. fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
  1910. "GLPsnrP\tVPXSSIM\t Time(us)\n");
  1911. fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
  1912. "%7.3f\t%8.0f\n",
  1913. dr, cpi->total / cpi->count, total_psnr,
  1914. cpi->totalp / cpi->count, total_psnr2,
  1915. total_ssim, total_encode_time);
  1916. }
  1917. }
  1918. if (cpi->b_calculate_ssimg)
  1919. {
  1920. if (cpi->oxcf.number_of_layers > 1)
  1921. {
  1922. int i;
  1923. fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
  1924. "Time(us)\n");
  1925. for (i=0; i<(int)cpi->oxcf.number_of_layers; i++)
  1926. {
  1927. double dr = (double)cpi->bytes_in_layer[i] *
  1928. 8.0 / 1000.0 / time_encoded;
  1929. fprintf(f, "%5d\t%7.3f\t%6.4f\t"
  1930. "%6.4f\t%6.4f\t%6.4f\t%8.0f\n",
  1931. i, dr,
  1932. cpi->total_ssimg_y_in_layer[i] /
  1933. cpi->frames_in_layer[i],
  1934. cpi->total_ssimg_u_in_layer[i] /
  1935. cpi->frames_in_layer[i],
  1936. cpi->total_ssimg_v_in_layer[i] /
  1937. cpi->frames_in_layer[i],
  1938. cpi->total_ssimg_all_in_layer[i] /
  1939. cpi->frames_in_layer[i],
  1940. total_encode_time);
  1941. }
  1942. }
  1943. else
  1944. {
  1945. fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t"
  1946. "Time(us)\n");
  1947. fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr,
  1948. cpi->total_ssimg_y / cpi->count,
  1949. cpi->total_ssimg_u / cpi->count,
  1950. cpi->total_ssimg_v / cpi->count,
  1951. cpi->total_ssimg_all / cpi->count, total_encode_time);
  1952. }
  1953. }
  1954. fclose(f);
  1955. #if 0
  1956. f = fopen("qskip.stt", "a");
  1957. fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
  1958. fclose(f);
  1959. #endif
  1960. }
  1961. #endif
  1962. #ifdef SPEEDSTATS
  1963. if (cpi->compressor_speed == 2)
  1964. {
  1965. int i;
  1966. FILE *f = fopen("cxspeed.stt", "a");
  1967. cnt_pm /= cpi->common.MBs;
  1968. for (i = 0; i < 16; i++)
  1969. fprintf(f, "%5d", frames_at_speed[i]);
  1970. fprintf(f, "\n");
  1971. fclose(f);
  1972. }
  1973. #endif
  1974. #ifdef MODE_STATS
  1975. {
  1976. extern int count_mb_seg[4];
  1977. FILE *f = fopen("modes.stt", "a");
  1978. double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ;
  1979. fprintf(f, "intra_mode in Intra Frames:\n");
  1980. fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]);
  1981. fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]);
  1982. fprintf(f, "B: ");
  1983. {
  1984. int i;
  1985. for (i = 0; i < 10; i++)
  1986. fprintf(f, "%8d, ", b_modes[i]);
  1987. fprintf(f, "\n");
  1988. }
  1989. fprintf(f, "Modes in Inter Frames:\n");
  1990. fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
  1991. inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4],
  1992. inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]);
  1993. fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
  1994. fprintf(f, "B: ");
  1995. {
  1996. int i;
  1997. for (i = 0; i < 15; i++)
  1998. fprintf(f, "%8d, ", inter_b_modes[i]);
  1999. fprintf(f, "\n");
  2000. }
  2001. fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]);
  2002. fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]);
  2003. fclose(f);
  2004. }
  2005. #endif
  2006. #ifdef VP8_ENTROPY_STATS
  2007. {
  2008. int i, j, k;
  2009. FILE *fmode = fopen("modecontext.c", "w");
  2010. fprintf(fmode, "\n#include \"entropymode.h\"\n\n");
  2011. fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts ");
  2012. fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n");
  2013. for (i = 0; i < 10; i++)
  2014. {
  2015. fprintf(fmode, " { /* Above Mode : %d */\n", i);
  2016. for (j = 0; j < 10; j++)
  2017. {
  2018. fprintf(fmode, " {");
  2019. for (k = 0; k < 10; k++)
  2020. {
  2021. if (!intra_mode_stats[i][j][k])
  2022. fprintf(fmode, " %5d, ", 1);
  2023. else
  2024. fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]);
  2025. }
  2026. fprintf(fmode, "}, /* left_mode %d */\n", j);
  2027. }
  2028. fprintf(fmode, " },\n");
  2029. }
  2030. fprintf(fmode, "};\n");
  2031. fclose(fmode);
  2032. }
  2033. #endif
  2034. #if defined(SECTIONBITS_OUTPUT)
  2035. if (0)
  2036. {
  2037. int i;
  2038. FILE *f = fopen("tokenbits.stt", "a");
  2039. for (i = 0; i < 28; i++)
  2040. fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
  2041. fprintf(f, "\n");
  2042. fclose(f);
  2043. }
  2044. #endif
  2045. #if 0
  2046. {
  2047. printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
  2048. printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
  2049. printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
  2050. }
  2051. #endif
  2052. }
  2053. #if CONFIG_MULTITHREAD
  2054. vp8cx_remove_encoder_threads(cpi);
  2055. #endif
  2056. #if CONFIG_TEMPORAL_DENOISING
  2057. vp8_denoiser_free(&cpi->denoiser);
  2058. #endif
  2059. dealloc_compressor_data(cpi);
  2060. vpx_free(cpi->mb.ss);
  2061. vpx_free(cpi->tok);
  2062. vpx_free(cpi->cyclic_refresh_map);
  2063. vpx_free(cpi->consec_zero_last);
  2064. vpx_free(cpi->consec_zero_last_mvbias);
  2065. vp8_remove_common(&cpi->common);
  2066. vpx_free(cpi);
  2067. *ptr = 0;
  2068. #ifdef OUTPUT_YUV_SRC
  2069. fclose(yuv_file);
  2070. #endif
  2071. #ifdef OUTPUT_YUV_DENOISED
  2072. fclose(yuv_denoised_file);
  2073. #endif
  2074. #if 0
  2075. if (keyfile)
  2076. fclose(keyfile);
  2077. if (framepsnr)
  2078. fclose(framepsnr);
  2079. if (kf_list)
  2080. fclose(kf_list);
  2081. #endif
  2082. }
  2083. static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
  2084. unsigned char *recon, int recon_stride,
  2085. unsigned int cols, unsigned int rows)
  2086. {
  2087. unsigned int row, col;
  2088. uint64_t total_sse = 0;
  2089. int diff;
  2090. for (row = 0; row + 16 <= rows; row += 16)
  2091. {
  2092. for (col = 0; col + 16 <= cols; col += 16)
  2093. {
  2094. unsigned int sse;
  2095. vpx_mse16x16(orig + col, orig_stride,
  2096. recon + col, recon_stride,
  2097. &sse);
  2098. total_sse += sse;
  2099. }
  2100. /* Handle odd-sized width */
  2101. if (col < cols)
  2102. {
  2103. unsigned int border_row, border_col;
  2104. unsigned char *border_orig = orig;
  2105. unsigned char *border_recon = recon;
  2106. for (border_row = 0; border_row < 16; border_row++)
  2107. {
  2108. for (border_col = col; border_col < cols; border_col++)
  2109. {
  2110. diff = border_orig[border_col] - border_recon[border_col];
  2111. total_sse += diff * diff;
  2112. }
  2113. border_orig += orig_stride;
  2114. border_recon += recon_stride;
  2115. }
  2116. }
  2117. orig += orig_stride * 16;
  2118. recon += recon_stride * 16;
  2119. }
  2120. /* Handle odd-sized height */
  2121. for (; row < rows; row++)
  2122. {
  2123. for (col = 0; col < cols; col++)
  2124. {
  2125. diff = orig[col] - recon[col];
  2126. total_sse += diff * diff;
  2127. }
  2128. orig += orig_stride;
  2129. recon += recon_stride;
  2130. }
  2131. vp8_clear_system_state();
  2132. return total_sse;
  2133. }
  2134. static void generate_psnr_packet(VP8_COMP *cpi)
  2135. {
  2136. YV12_BUFFER_CONFIG *orig = cpi->Source;
  2137. YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
  2138. struct vpx_codec_cx_pkt pkt;
  2139. uint64_t sse;
  2140. int i;
  2141. unsigned int width = cpi->common.Width;
  2142. unsigned int height = cpi->common.Height;
  2143. pkt.kind = VPX_CODEC_PSNR_PKT;
  2144. sse = calc_plane_error(orig->y_buffer, orig->y_stride,
  2145. recon->y_buffer, recon->y_stride,
  2146. width, height);
  2147. pkt.data.psnr.sse[0] = sse;
  2148. pkt.data.psnr.sse[1] = sse;
  2149. pkt.data.psnr.samples[0] = width * height;
  2150. pkt.data.psnr.samples[1] = width * height;
  2151. width = (width + 1) / 2;
  2152. height = (height + 1) / 2;
  2153. sse = calc_plane_error(orig->u_buffer, orig->uv_stride,
  2154. recon->u_buffer, recon->uv_stride,
  2155. width, height);
  2156. pkt.data.psnr.sse[0] += sse;
  2157. pkt.data.psnr.sse[2] = sse;
  2158. pkt.data.psnr.samples[0] += width * height;
  2159. pkt.data.psnr.samples[2] = width * height;
  2160. sse = calc_plane_error(orig->v_buffer, orig->uv_stride,
  2161. recon->v_buffer, recon->uv_stride,
  2162. width, height);
  2163. pkt.data.psnr.sse[0] += sse;
  2164. pkt.data.psnr.sse[3] = sse;
  2165. pkt.data.psnr.samples[0] += width * height;
  2166. pkt.data.psnr.samples[3] = width * height;
  2167. for (i = 0; i < 4; i++)
  2168. pkt.data.psnr.psnr[i] = vpx_sse_to_psnr(pkt.data.psnr.samples[i], 255.0,
  2169. (double)(pkt.data.psnr.sse[i]));
  2170. vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
  2171. }
  2172. int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags)
  2173. {
  2174. if (ref_frame_flags > 7)
  2175. return -1 ;
  2176. cpi->ref_frame_flags = ref_frame_flags;
  2177. return 0;
  2178. }
  2179. int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags)
  2180. {
  2181. if (ref_frame_flags > 7)
  2182. return -1 ;
  2183. cpi->common.refresh_golden_frame = 0;
  2184. cpi->common.refresh_alt_ref_frame = 0;
  2185. cpi->common.refresh_last_frame = 0;
  2186. if (ref_frame_flags & VP8_LAST_FRAME)
  2187. cpi->common.refresh_last_frame = 1;
  2188. if (ref_frame_flags & VP8_GOLD_FRAME)
  2189. cpi->common.refresh_golden_frame = 1;
  2190. if (ref_frame_flags & VP8_ALTR_FRAME)
  2191. cpi->common.refresh_alt_ref_frame = 1;
  2192. return 0;
  2193. }
  2194. int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
  2195. {
  2196. VP8_COMMON *cm = &cpi->common;
  2197. int ref_fb_idx;
  2198. if (ref_frame_flag == VP8_LAST_FRAME)
  2199. ref_fb_idx = cm->lst_fb_idx;
  2200. else if (ref_frame_flag == VP8_GOLD_FRAME)
  2201. ref_fb_idx = cm->gld_fb_idx;
  2202. else if (ref_frame_flag == VP8_ALTR_FRAME)
  2203. ref_fb_idx = cm->alt_fb_idx;
  2204. else
  2205. return -1;
  2206. vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
  2207. return 0;
  2208. }
  2209. int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd)
  2210. {
  2211. VP8_COMMON *cm = &cpi->common;
  2212. int ref_fb_idx;
  2213. if (ref_frame_flag == VP8_LAST_FRAME)
  2214. ref_fb_idx = cm->lst_fb_idx;
  2215. else if (ref_frame_flag == VP8_GOLD_FRAME)
  2216. ref_fb_idx = cm->gld_fb_idx;
  2217. else if (ref_frame_flag == VP8_ALTR_FRAME)
  2218. ref_fb_idx = cm->alt_fb_idx;
  2219. else
  2220. return -1;
  2221. vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
  2222. return 0;
  2223. }
  2224. int vp8_update_entropy(VP8_COMP *cpi, int update)
  2225. {
  2226. VP8_COMMON *cm = &cpi->common;
  2227. cm->refresh_entropy_probs = update;
  2228. return 0;
  2229. }
  2230. #if defined(OUTPUT_YUV_SRC) || defined(OUTPUT_YUV_DENOISED)
  2231. void vp8_write_yuv_frame(FILE *yuv_file, YV12_BUFFER_CONFIG *s)
  2232. {
  2233. unsigned char *src = s->y_buffer;
  2234. int h = s->y_height;
  2235. do
  2236. {
  2237. fwrite(src, s->y_width, 1, yuv_file);
  2238. src += s->y_stride;
  2239. }
  2240. while (--h);
  2241. src = s->u_buffer;
  2242. h = s->uv_height;
  2243. do
  2244. {
  2245. fwrite(src, s->uv_width, 1, yuv_file);
  2246. src += s->uv_stride;
  2247. }
  2248. while (--h);
  2249. src = s->v_buffer;
  2250. h = s->uv_height;
  2251. do
  2252. {
  2253. fwrite(src, s->uv_width, 1, yuv_file);
  2254. src += s->uv_stride;
  2255. }
  2256. while (--h);
  2257. }
  2258. #endif
  2259. static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi)
  2260. {
  2261. VP8_COMMON *cm = &cpi->common;
  2262. /* are we resizing the image */
  2263. if (cm->horiz_scale != 0 || cm->vert_scale != 0)
  2264. {
  2265. #if CONFIG_SPATIAL_RESAMPLING
  2266. int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
  2267. int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
  2268. int tmp_height;
  2269. if (cm->vert_scale == 3)
  2270. tmp_height = 9;
  2271. else
  2272. tmp_height = 11;
  2273. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  2274. Scale2Ratio(cm->vert_scale, &vr, &vs);
  2275. vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
  2276. tmp_height, hs, hr, vs, vr, 0);
  2277. vp8_yv12_extend_frame_borders(&cpi->scaled_source);
  2278. cpi->Source = &cpi->scaled_source;
  2279. #endif
  2280. }
  2281. else
  2282. cpi->Source = sd;
  2283. }
  2284. static int resize_key_frame(VP8_COMP *cpi)
  2285. {
  2286. #if CONFIG_SPATIAL_RESAMPLING
  2287. VP8_COMMON *cm = &cpi->common;
  2288. /* Do we need to apply resampling for one pass cbr.
  2289. * In one pass this is more limited than in two pass cbr.
  2290. * The test and any change is only made once per key frame sequence.
  2291. */
  2292. if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER))
  2293. {
  2294. int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs);
  2295. int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs);
  2296. int new_width, new_height;
  2297. /* If we are below the resample DOWN watermark then scale down a
  2298. * notch.
  2299. */
  2300. if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100))
  2301. {
  2302. cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
  2303. cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
  2304. }
  2305. /* Should we now start scaling back up */
  2306. else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100))
  2307. {
  2308. cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
  2309. cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
  2310. }
  2311. /* Get the new height and width */
  2312. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  2313. Scale2Ratio(cm->vert_scale, &vr, &vs);
  2314. new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
  2315. new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
  2316. /* If the image size has changed we need to reallocate the buffers
  2317. * and resample the source image
  2318. */
  2319. if ((cm->Width != new_width) || (cm->Height != new_height))
  2320. {
  2321. cm->Width = new_width;
  2322. cm->Height = new_height;
  2323. vp8_alloc_compressor_data(cpi);
  2324. scale_and_extend_source(cpi->un_scaled_source, cpi);
  2325. return 1;
  2326. }
  2327. }
  2328. #endif
  2329. return 0;
  2330. }
  2331. static void update_alt_ref_frame_stats(VP8_COMP *cpi)
  2332. {
  2333. VP8_COMMON *cm = &cpi->common;
  2334. /* Select an interval before next GF or altref */
  2335. if (!cpi->auto_gold)
  2336. cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
  2337. if ((cpi->pass != 2) && cpi->frames_till_gf_update_due)
  2338. {
  2339. cpi->current_gf_interval = cpi->frames_till_gf_update_due;
  2340. /* Set the bits per frame that we should try and recover in
  2341. * subsequent inter frames to account for the extra GF spend...
  2342. * note that his does not apply for GF updates that occur
  2343. * coincident with a key frame as the extra cost of key frames is
  2344. * dealt with elsewhere.
  2345. */
  2346. cpi->gf_overspend_bits += cpi->projected_frame_size;
  2347. cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
  2348. }
  2349. /* Update data structure that monitors level of reference to last GF */
  2350. memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
  2351. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  2352. /* this frame refreshes means next frames don't unless specified by user */
  2353. cpi->frames_since_golden = 0;
  2354. /* Clear the alternate reference update pending flag. */
  2355. cpi->source_alt_ref_pending = 0;
  2356. /* Set the alternate reference frame active flag */
  2357. cpi->source_alt_ref_active = 1;
  2358. }
  2359. static void update_golden_frame_stats(VP8_COMP *cpi)
  2360. {
  2361. VP8_COMMON *cm = &cpi->common;
  2362. /* Update the Golden frame usage counts. */
  2363. if (cm->refresh_golden_frame)
  2364. {
  2365. /* Select an interval before next GF */
  2366. if (!cpi->auto_gold)
  2367. cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
  2368. if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0))
  2369. {
  2370. cpi->current_gf_interval = cpi->frames_till_gf_update_due;
  2371. /* Set the bits per frame that we should try and recover in
  2372. * subsequent inter frames to account for the extra GF spend...
  2373. * note that his does not apply for GF updates that occur
  2374. * coincident with a key frame as the extra cost of key frames
  2375. * is dealt with elsewhere.
  2376. */
  2377. if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active)
  2378. {
  2379. /* Calcluate GF bits to be recovered
  2380. * Projected size - av frame bits available for inter
  2381. * frames for clip as a whole
  2382. */
  2383. cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target);
  2384. }
  2385. cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
  2386. }
  2387. /* Update data structure that monitors level of reference to last GF */
  2388. memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
  2389. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  2390. /* this frame refreshes means next frames don't unless specified by
  2391. * user
  2392. */
  2393. cm->refresh_golden_frame = 0;
  2394. cpi->frames_since_golden = 0;
  2395. cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
  2396. cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
  2397. cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
  2398. cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
  2399. /* ******** Fixed Q test code only ************ */
  2400. /* If we are going to use the ALT reference for the next group of
  2401. * frames set a flag to say so.
  2402. */
  2403. if (cpi->oxcf.fixed_q >= 0 &&
  2404. cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame)
  2405. {
  2406. cpi->source_alt_ref_pending = 1;
  2407. cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
  2408. }
  2409. if (!cpi->source_alt_ref_pending)
  2410. cpi->source_alt_ref_active = 0;
  2411. /* Decrement count down till next gf */
  2412. if (cpi->frames_till_gf_update_due > 0)
  2413. cpi->frames_till_gf_update_due--;
  2414. }
  2415. else if (!cpi->common.refresh_alt_ref_frame)
  2416. {
  2417. /* Decrement count down till next gf */
  2418. if (cpi->frames_till_gf_update_due > 0)
  2419. cpi->frames_till_gf_update_due--;
  2420. if (cpi->frames_till_alt_ref_frame)
  2421. cpi->frames_till_alt_ref_frame --;
  2422. cpi->frames_since_golden ++;
  2423. if (cpi->frames_since_golden > 1)
  2424. {
  2425. cpi->recent_ref_frame_usage[INTRA_FRAME] +=
  2426. cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
  2427. cpi->recent_ref_frame_usage[LAST_FRAME] +=
  2428. cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
  2429. cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
  2430. cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
  2431. cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
  2432. cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
  2433. }
  2434. }
  2435. }
  2436. /* This function updates the reference frame probability estimates that
  2437. * will be used during mode selection
  2438. */
  2439. static void update_rd_ref_frame_probs(VP8_COMP *cpi)
  2440. {
  2441. VP8_COMMON *cm = &cpi->common;
  2442. const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
  2443. const int rf_intra = rfct[INTRA_FRAME];
  2444. const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
  2445. if (cm->frame_type == KEY_FRAME)
  2446. {
  2447. cpi->prob_intra_coded = 255;
  2448. cpi->prob_last_coded = 128;
  2449. cpi->prob_gf_coded = 128;
  2450. }
  2451. else if (!(rf_intra + rf_inter))
  2452. {
  2453. cpi->prob_intra_coded = 63;
  2454. cpi->prob_last_coded = 128;
  2455. cpi->prob_gf_coded = 128;
  2456. }
  2457. /* update reference frame costs since we can do better than what we got
  2458. * last frame.
  2459. */
  2460. if (cpi->oxcf.number_of_layers == 1)
  2461. {
  2462. if (cpi->common.refresh_alt_ref_frame)
  2463. {
  2464. cpi->prob_intra_coded += 40;
  2465. if (cpi->prob_intra_coded > 255)
  2466. cpi->prob_intra_coded = 255;
  2467. cpi->prob_last_coded = 200;
  2468. cpi->prob_gf_coded = 1;
  2469. }
  2470. else if (cpi->frames_since_golden == 0)
  2471. {
  2472. cpi->prob_last_coded = 214;
  2473. }
  2474. else if (cpi->frames_since_golden == 1)
  2475. {
  2476. cpi->prob_last_coded = 192;
  2477. cpi->prob_gf_coded = 220;
  2478. }
  2479. else if (cpi->source_alt_ref_active)
  2480. {
  2481. cpi->prob_gf_coded -= 20;
  2482. if (cpi->prob_gf_coded < 10)
  2483. cpi->prob_gf_coded = 10;
  2484. }
  2485. if (!cpi->source_alt_ref_active)
  2486. cpi->prob_gf_coded = 255;
  2487. }
  2488. }
  2489. /* 1 = key, 0 = inter */
  2490. static int decide_key_frame(VP8_COMP *cpi)
  2491. {
  2492. VP8_COMMON *cm = &cpi->common;
  2493. int code_key_frame = 0;
  2494. cpi->kf_boost = 0;
  2495. if (cpi->Speed > 11)
  2496. return 0;
  2497. /* Clear down mmx registers */
  2498. vp8_clear_system_state();
  2499. if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0))
  2500. {
  2501. double change = 1.0 * abs((int)(cpi->mb.intra_error -
  2502. cpi->last_intra_error)) / (1 + cpi->last_intra_error);
  2503. double change2 = 1.0 * abs((int)(cpi->mb.prediction_error -
  2504. cpi->last_prediction_error)) / (1 + cpi->last_prediction_error);
  2505. double minerror = cm->MBs * 256;
  2506. cpi->last_intra_error = cpi->mb.intra_error;
  2507. cpi->last_prediction_error = cpi->mb.prediction_error;
  2508. if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15
  2509. && cpi->mb.prediction_error > minerror
  2510. && (change > .25 || change2 > .25))
  2511. {
  2512. /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/
  2513. return 1;
  2514. }
  2515. return 0;
  2516. }
  2517. /* If the following are true we might as well code a key frame */
  2518. if (((cpi->this_frame_percent_intra == 100) &&
  2519. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
  2520. ((cpi->this_frame_percent_intra > 95) &&
  2521. (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5))))
  2522. {
  2523. code_key_frame = 1;
  2524. }
  2525. /* in addition if the following are true and this is not a golden frame
  2526. * then code a key frame Note that on golden frames there often seems
  2527. * to be a pop in intra useage anyway hence this restriction is
  2528. * designed to prevent spurious key frames. The Intra pop needs to be
  2529. * investigated.
  2530. */
  2531. else if (((cpi->this_frame_percent_intra > 60) &&
  2532. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) ||
  2533. ((cpi->this_frame_percent_intra > 75) &&
  2534. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) ||
  2535. ((cpi->this_frame_percent_intra > 90) &&
  2536. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10))))
  2537. {
  2538. if (!cm->refresh_golden_frame)
  2539. code_key_frame = 1;
  2540. }
  2541. return code_key_frame;
  2542. }
  2543. #if !(CONFIG_REALTIME_ONLY)
  2544. static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags)
  2545. {
  2546. (void) size;
  2547. (void) dest;
  2548. (void) frame_flags;
  2549. vp8_set_quantizer(cpi, 26);
  2550. vp8_first_pass(cpi);
  2551. }
  2552. #endif
  2553. #if 0
  2554. void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
  2555. {
  2556. /* write the frame */
  2557. FILE *yframe;
  2558. int i;
  2559. char filename[255];
  2560. sprintf(filename, "cx\\y%04d.raw", this_frame);
  2561. yframe = fopen(filename, "wb");
  2562. for (i = 0; i < frame->y_height; i++)
  2563. fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
  2564. fclose(yframe);
  2565. sprintf(filename, "cx\\u%04d.raw", this_frame);
  2566. yframe = fopen(filename, "wb");
  2567. for (i = 0; i < frame->uv_height; i++)
  2568. fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
  2569. fclose(yframe);
  2570. sprintf(filename, "cx\\v%04d.raw", this_frame);
  2571. yframe = fopen(filename, "wb");
  2572. for (i = 0; i < frame->uv_height; i++)
  2573. fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
  2574. fclose(yframe);
  2575. }
  2576. #endif
  2577. /* return of 0 means drop frame */
  2578. /* Function to test for conditions that indeicate we should loop
  2579. * back and recode a frame.
  2580. */
  2581. static int recode_loop_test( VP8_COMP *cpi,
  2582. int high_limit, int low_limit,
  2583. int q, int maxq, int minq )
  2584. {
  2585. int force_recode = 0;
  2586. VP8_COMMON *cm = &cpi->common;
  2587. /* Is frame recode allowed at all
  2588. * Yes if either recode mode 1 is selected or mode two is selcted
  2589. * and the frame is a key frame. golden frame or alt_ref_frame
  2590. */
  2591. if ( (cpi->sf.recode_loop == 1) ||
  2592. ( (cpi->sf.recode_loop == 2) &&
  2593. ( (cm->frame_type == KEY_FRAME) ||
  2594. cm->refresh_golden_frame ||
  2595. cm->refresh_alt_ref_frame ) ) )
  2596. {
  2597. /* General over and under shoot tests */
  2598. if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
  2599. ((cpi->projected_frame_size < low_limit) && (q > minq)) )
  2600. {
  2601. force_recode = 1;
  2602. }
  2603. /* Special Constrained quality tests */
  2604. else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
  2605. {
  2606. /* Undershoot and below auto cq level */
  2607. if ( (q > cpi->cq_target_quality) &&
  2608. (cpi->projected_frame_size <
  2609. ((cpi->this_frame_target * 7) >> 3)))
  2610. {
  2611. force_recode = 1;
  2612. }
  2613. /* Severe undershoot and between auto and user cq level */
  2614. else if ( (q > cpi->oxcf.cq_level) &&
  2615. (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
  2616. (cpi->active_best_quality > cpi->oxcf.cq_level))
  2617. {
  2618. force_recode = 1;
  2619. cpi->active_best_quality = cpi->oxcf.cq_level;
  2620. }
  2621. }
  2622. }
  2623. return force_recode;
  2624. }
  2625. static void update_reference_frames(VP8_COMP *cpi)
  2626. {
  2627. VP8_COMMON *cm = &cpi->common;
  2628. YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
  2629. /* At this point the new frame has been encoded.
  2630. * If any buffer copy / swapping is signaled it should be done here.
  2631. */
  2632. if (cm->frame_type == KEY_FRAME)
  2633. {
  2634. yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ;
  2635. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2636. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2637. cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
  2638. cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
  2639. cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
  2640. }
  2641. else /* For non key frames */
  2642. {
  2643. if (cm->refresh_alt_ref_frame)
  2644. {
  2645. assert(!cm->copy_buffer_to_arf);
  2646. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
  2647. cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2648. cm->alt_fb_idx = cm->new_fb_idx;
  2649. cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
  2650. }
  2651. else if (cm->copy_buffer_to_arf)
  2652. {
  2653. assert(!(cm->copy_buffer_to_arf & ~0x3));
  2654. if (cm->copy_buffer_to_arf == 1)
  2655. {
  2656. if(cm->alt_fb_idx != cm->lst_fb_idx)
  2657. {
  2658. yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
  2659. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2660. cm->alt_fb_idx = cm->lst_fb_idx;
  2661. cpi->current_ref_frames[ALTREF_FRAME] =
  2662. cpi->current_ref_frames[LAST_FRAME];
  2663. }
  2664. }
  2665. else /* if (cm->copy_buffer_to_arf == 2) */
  2666. {
  2667. if(cm->alt_fb_idx != cm->gld_fb_idx)
  2668. {
  2669. yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
  2670. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2671. cm->alt_fb_idx = cm->gld_fb_idx;
  2672. cpi->current_ref_frames[ALTREF_FRAME] =
  2673. cpi->current_ref_frames[GOLDEN_FRAME];
  2674. }
  2675. }
  2676. }
  2677. if (cm->refresh_golden_frame)
  2678. {
  2679. assert(!cm->copy_buffer_to_gf);
  2680. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
  2681. cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2682. cm->gld_fb_idx = cm->new_fb_idx;
  2683. cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
  2684. }
  2685. else if (cm->copy_buffer_to_gf)
  2686. {
  2687. assert(!(cm->copy_buffer_to_arf & ~0x3));
  2688. if (cm->copy_buffer_to_gf == 1)
  2689. {
  2690. if(cm->gld_fb_idx != cm->lst_fb_idx)
  2691. {
  2692. yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
  2693. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2694. cm->gld_fb_idx = cm->lst_fb_idx;
  2695. cpi->current_ref_frames[GOLDEN_FRAME] =
  2696. cpi->current_ref_frames[LAST_FRAME];
  2697. }
  2698. }
  2699. else /* if (cm->copy_buffer_to_gf == 2) */
  2700. {
  2701. if(cm->alt_fb_idx != cm->gld_fb_idx)
  2702. {
  2703. yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
  2704. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2705. cm->gld_fb_idx = cm->alt_fb_idx;
  2706. cpi->current_ref_frames[GOLDEN_FRAME] =
  2707. cpi->current_ref_frames[ALTREF_FRAME];
  2708. }
  2709. }
  2710. }
  2711. }
  2712. if (cm->refresh_last_frame)
  2713. {
  2714. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
  2715. cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
  2716. cm->lst_fb_idx = cm->new_fb_idx;
  2717. cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
  2718. }
  2719. #if CONFIG_TEMPORAL_DENOISING
  2720. if (cpi->oxcf.noise_sensitivity)
  2721. {
  2722. /* we shouldn't have to keep multiple copies as we know in advance which
  2723. * buffer we should start - for now to get something up and running
  2724. * I've chosen to copy the buffers
  2725. */
  2726. if (cm->frame_type == KEY_FRAME)
  2727. {
  2728. int i;
  2729. for (i = LAST_FRAME; i < MAX_REF_FRAMES; ++i)
  2730. vp8_yv12_copy_frame(cpi->Source,
  2731. &cpi->denoiser.yv12_running_avg[i]);
  2732. }
  2733. else /* For non key frames */
  2734. {
  2735. vp8_yv12_extend_frame_borders(
  2736. &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
  2737. if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf)
  2738. {
  2739. vp8_yv12_copy_frame(
  2740. &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2741. &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
  2742. }
  2743. if (cm->refresh_golden_frame || cm->copy_buffer_to_gf)
  2744. {
  2745. vp8_yv12_copy_frame(
  2746. &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2747. &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
  2748. }
  2749. if(cm->refresh_last_frame)
  2750. {
  2751. vp8_yv12_copy_frame(
  2752. &cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2753. &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
  2754. }
  2755. }
  2756. if (cpi->oxcf.noise_sensitivity == 4)
  2757. vp8_yv12_copy_frame(cpi->Source, &cpi->denoiser.yv12_last_source);
  2758. }
  2759. #endif
  2760. }
  2761. static int measure_square_diff_partial(YV12_BUFFER_CONFIG *source,
  2762. YV12_BUFFER_CONFIG *dest,
  2763. VP8_COMP *cpi)
  2764. {
  2765. int i, j;
  2766. int Total = 0;
  2767. int num_blocks = 0;
  2768. int skip = 2;
  2769. int min_consec_zero_last = 10;
  2770. int tot_num_blocks = (source->y_height * source->y_width) >> 8;
  2771. unsigned char *src = source->y_buffer;
  2772. unsigned char *dst = dest->y_buffer;
  2773. /* Loop through the Y plane, every |skip| blocks along rows and colmumns,
  2774. * summing the square differences, and only for blocks that have been
  2775. * zero_last mode at least |x| frames in a row.
  2776. */
  2777. for (i = 0; i < source->y_height; i += 16 * skip)
  2778. {
  2779. int block_index_row = (i >> 4) * cpi->common.mb_cols;
  2780. for (j = 0; j < source->y_width; j += 16 * skip)
  2781. {
  2782. int index = block_index_row + (j >> 4);
  2783. if (cpi->consec_zero_last[index] >= min_consec_zero_last) {
  2784. unsigned int sse;
  2785. Total += vpx_mse16x16(src + j,
  2786. source->y_stride,
  2787. dst + j, dest->y_stride,
  2788. &sse);
  2789. num_blocks++;
  2790. }
  2791. }
  2792. src += 16 * skip * source->y_stride;
  2793. dst += 16 * skip * dest->y_stride;
  2794. }
  2795. // Only return non-zero if we have at least ~1/16 samples for estimate.
  2796. if (num_blocks > (tot_num_blocks >> 4)) {
  2797. return (Total / num_blocks);
  2798. } else {
  2799. return 0;
  2800. }
  2801. }
  2802. #if CONFIG_TEMPORAL_DENOISING
  2803. static void process_denoiser_mode_change(VP8_COMP *cpi) {
  2804. const VP8_COMMON *const cm = &cpi->common;
  2805. int i, j;
  2806. int total = 0;
  2807. int num_blocks = 0;
  2808. // Number of blocks skipped along row/column in computing the
  2809. // nmse (normalized mean square error) of source.
  2810. int skip = 2;
  2811. // Only select blocks for computing nmse that have been encoded
  2812. // as ZERO LAST min_consec_zero_last frames in a row.
  2813. // Scale with number of temporal layers.
  2814. int min_consec_zero_last = 12 / cpi->oxcf.number_of_layers;
  2815. // Decision is tested for changing the denoising mode every
  2816. // num_mode_change times this function is called. Note that this
  2817. // function called every 8 frames, so (8 * num_mode_change) is number
  2818. // of frames where denoising mode change is tested for switch.
  2819. int num_mode_change = 20;
  2820. // Framerate factor, to compensate for larger mse at lower framerates.
  2821. // Use ref_framerate, which is full source framerate for temporal layers.
  2822. // TODO(marpan): Adjust this factor.
  2823. int fac_framerate = cpi->ref_framerate < 25.0f ? 80 : 100;
  2824. int tot_num_blocks = cm->mb_rows * cm->mb_cols;
  2825. int ystride = cpi->Source->y_stride;
  2826. unsigned char *src = cpi->Source->y_buffer;
  2827. unsigned char *dst = cpi->denoiser.yv12_last_source.y_buffer;
  2828. static const unsigned char const_source[16] = {
  2829. 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
  2830. 128, 128, 128};
  2831. int bandwidth = (int)(cpi->target_bandwidth);
  2832. // For temporal layers, use full bandwidth (top layer).
  2833. if (cpi->oxcf.number_of_layers > 1) {
  2834. LAYER_CONTEXT *lc = &cpi->layer_context[cpi->oxcf.number_of_layers - 1];
  2835. bandwidth = (int)(lc->target_bandwidth);
  2836. }
  2837. // Loop through the Y plane, every skip blocks along rows and columns,
  2838. // summing the normalized mean square error, only for blocks that have
  2839. // been encoded as ZEROMV LAST at least min_consec_zero_last least frames in
  2840. // a row and have small sum difference between current and previous frame.
  2841. // Normalization here is by the contrast of the current frame block.
  2842. for (i = 0; i < cm->Height; i += 16 * skip) {
  2843. int block_index_row = (i >> 4) * cm->mb_cols;
  2844. for (j = 0; j < cm->Width; j += 16 * skip) {
  2845. int index = block_index_row + (j >> 4);
  2846. if (cpi->consec_zero_last[index] >= min_consec_zero_last) {
  2847. unsigned int sse;
  2848. const unsigned int var = vpx_variance16x16(src + j,
  2849. ystride,
  2850. dst + j,
  2851. ystride,
  2852. &sse);
  2853. // Only consider this block as valid for noise measurement
  2854. // if the sum_diff average of the current and previous frame
  2855. // is small (to avoid effects from lighting change).
  2856. if ((sse - var) < 128) {
  2857. unsigned int sse2;
  2858. const unsigned int act = vpx_variance16x16(src + j,
  2859. ystride,
  2860. const_source,
  2861. 0,
  2862. &sse2);
  2863. if (act > 0)
  2864. total += sse / act;
  2865. num_blocks++;
  2866. }
  2867. }
  2868. }
  2869. src += 16 * skip * ystride;
  2870. dst += 16 * skip * ystride;
  2871. }
  2872. total = total * fac_framerate / 100;
  2873. // Only consider this frame as valid sample if we have computed nmse over
  2874. // at least ~1/16 blocks, and Total > 0 (Total == 0 can happen if the
  2875. // application inputs duplicate frames, or contrast is all zero).
  2876. if (total > 0 &&
  2877. (num_blocks > (tot_num_blocks >> 4))) {
  2878. // Update the recursive mean square source_diff.
  2879. total = (total << 8) / num_blocks;
  2880. if (cpi->denoiser.nmse_source_diff_count == 0) {
  2881. // First sample in new interval.
  2882. cpi->denoiser.nmse_source_diff = total;
  2883. cpi->denoiser.qp_avg = cm->base_qindex;
  2884. } else {
  2885. // For subsequent samples, use average with weight ~1/4 for new sample.
  2886. cpi->denoiser.nmse_source_diff = (int)((total +
  2887. 3 * cpi->denoiser.nmse_source_diff) >> 2);
  2888. cpi->denoiser.qp_avg = (int)((cm->base_qindex +
  2889. 3 * cpi->denoiser.qp_avg) >> 2);
  2890. }
  2891. cpi->denoiser.nmse_source_diff_count++;
  2892. }
  2893. // Check for changing the denoiser mode, when we have obtained #samples =
  2894. // num_mode_change. Condition the change also on the bitrate and QP.
  2895. if (cpi->denoiser.nmse_source_diff_count == num_mode_change) {
  2896. // Check for going up: from normal to aggressive mode.
  2897. if ((cpi->denoiser.denoiser_mode == kDenoiserOnYUV) &&
  2898. (cpi->denoiser.nmse_source_diff >
  2899. cpi->denoiser.threshold_aggressive_mode) &&
  2900. (cpi->denoiser.qp_avg < cpi->denoiser.qp_threshold_up &&
  2901. bandwidth > cpi->denoiser.bitrate_threshold)) {
  2902. vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUVAggressive);
  2903. } else {
  2904. // Check for going down: from aggressive to normal mode.
  2905. if (((cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) &&
  2906. (cpi->denoiser.nmse_source_diff <
  2907. cpi->denoiser.threshold_aggressive_mode)) ||
  2908. ((cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) &&
  2909. (cpi->denoiser.qp_avg > cpi->denoiser.qp_threshold_down ||
  2910. bandwidth < cpi->denoiser.bitrate_threshold))) {
  2911. vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUV);
  2912. }
  2913. }
  2914. // Reset metric and counter for next interval.
  2915. cpi->denoiser.nmse_source_diff = 0;
  2916. cpi->denoiser.qp_avg = 0;
  2917. cpi->denoiser.nmse_source_diff_count = 0;
  2918. }
  2919. }
  2920. #endif
  2921. void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm)
  2922. {
  2923. const FRAME_TYPE frame_type = cm->frame_type;
  2924. int update_any_ref_buffers = 1;
  2925. if (cpi->common.refresh_last_frame == 0 &&
  2926. cpi->common.refresh_golden_frame == 0 &&
  2927. cpi->common.refresh_alt_ref_frame == 0) {
  2928. update_any_ref_buffers = 0;
  2929. }
  2930. if (cm->no_lpf)
  2931. {
  2932. cm->filter_level = 0;
  2933. }
  2934. else
  2935. {
  2936. struct vpx_usec_timer timer;
  2937. vp8_clear_system_state();
  2938. vpx_usec_timer_start(&timer);
  2939. if (cpi->sf.auto_filter == 0) {
  2940. #if CONFIG_TEMPORAL_DENOISING
  2941. if (cpi->oxcf.noise_sensitivity && cm->frame_type != KEY_FRAME) {
  2942. // Use the denoised buffer for selecting base loop filter level.
  2943. // Denoised signal for current frame is stored in INTRA_FRAME.
  2944. // No denoising on key frames.
  2945. vp8cx_pick_filter_level_fast(
  2946. &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi);
  2947. } else {
  2948. vp8cx_pick_filter_level_fast(cpi->Source, cpi);
  2949. }
  2950. #else
  2951. vp8cx_pick_filter_level_fast(cpi->Source, cpi);
  2952. #endif
  2953. } else {
  2954. #if CONFIG_TEMPORAL_DENOISING
  2955. if (cpi->oxcf.noise_sensitivity && cm->frame_type != KEY_FRAME) {
  2956. // Use the denoised buffer for selecting base loop filter level.
  2957. // Denoised signal for current frame is stored in INTRA_FRAME.
  2958. // No denoising on key frames.
  2959. vp8cx_pick_filter_level(
  2960. &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi);
  2961. } else {
  2962. vp8cx_pick_filter_level(cpi->Source, cpi);
  2963. }
  2964. #else
  2965. vp8cx_pick_filter_level(cpi->Source, cpi);
  2966. #endif
  2967. }
  2968. if (cm->filter_level > 0)
  2969. {
  2970. vp8cx_set_alt_lf_level(cpi, cm->filter_level);
  2971. }
  2972. vpx_usec_timer_mark(&timer);
  2973. cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
  2974. }
  2975. #if CONFIG_MULTITHREAD
  2976. if (cpi->b_multi_threaded)
  2977. sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
  2978. #endif
  2979. // No need to apply loop-filter if the encoded frame does not update
  2980. // any reference buffers.
  2981. if (cm->filter_level > 0 && update_any_ref_buffers)
  2982. {
  2983. vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
  2984. }
  2985. vp8_yv12_extend_frame_borders(cm->frame_to_show);
  2986. }
  2987. static void encode_frame_to_data_rate
  2988. (
  2989. VP8_COMP *cpi,
  2990. unsigned long *size,
  2991. unsigned char *dest,
  2992. unsigned char* dest_end,
  2993. unsigned int *frame_flags
  2994. )
  2995. {
  2996. int Q;
  2997. int frame_over_shoot_limit;
  2998. int frame_under_shoot_limit;
  2999. int Loop = 0;
  3000. int loop_count;
  3001. VP8_COMMON *cm = &cpi->common;
  3002. int active_worst_qchanged = 0;
  3003. #if !(CONFIG_REALTIME_ONLY)
  3004. int q_low;
  3005. int q_high;
  3006. int zbin_oq_high;
  3007. int zbin_oq_low = 0;
  3008. int top_index;
  3009. int bottom_index;
  3010. int overshoot_seen = 0;
  3011. int undershoot_seen = 0;
  3012. #endif
  3013. int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
  3014. cpi->oxcf.optimal_buffer_level / 100);
  3015. int drop_mark75 = drop_mark * 2 / 3;
  3016. int drop_mark50 = drop_mark / 4;
  3017. int drop_mark25 = drop_mark / 8;
  3018. /* Clear down mmx registers to allow floating point in what follows */
  3019. vp8_clear_system_state();
  3020. #if CONFIG_MULTITHREAD
  3021. /* wait for the last picture loopfilter thread done */
  3022. if (cpi->b_lpf_running)
  3023. {
  3024. sem_wait(&cpi->h_event_end_lpf);
  3025. cpi->b_lpf_running = 0;
  3026. }
  3027. #endif
  3028. if(cpi->force_next_frame_intra)
  3029. {
  3030. cm->frame_type = KEY_FRAME; /* delayed intra frame */
  3031. cpi->force_next_frame_intra = 0;
  3032. }
  3033. /* For an alt ref frame in 2 pass we skip the call to the second pass
  3034. * function that sets the target bandwidth
  3035. */
  3036. #if !(CONFIG_REALTIME_ONLY)
  3037. if (cpi->pass == 2)
  3038. {
  3039. if (cpi->common.refresh_alt_ref_frame)
  3040. {
  3041. /* Per frame bit target for the alt ref frame */
  3042. cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
  3043. /* per second target bitrate */
  3044. cpi->target_bandwidth = (int)(cpi->twopass.gf_bits *
  3045. cpi->output_framerate);
  3046. }
  3047. }
  3048. else
  3049. #endif
  3050. cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate);
  3051. /* Default turn off buffer to buffer copying */
  3052. cm->copy_buffer_to_gf = 0;
  3053. cm->copy_buffer_to_arf = 0;
  3054. /* Clear zbin over-quant value and mode boost values. */
  3055. cpi->mb.zbin_over_quant = 0;
  3056. cpi->mb.zbin_mode_boost = 0;
  3057. /* Enable or disable mode based tweaking of the zbin
  3058. * For 2 Pass Only used where GF/ARF prediction quality
  3059. * is above a threshold
  3060. */
  3061. cpi->mb.zbin_mode_boost_enabled = 1;
  3062. if (cpi->pass == 2)
  3063. {
  3064. if ( cpi->gfu_boost <= 400 )
  3065. {
  3066. cpi->mb.zbin_mode_boost_enabled = 0;
  3067. }
  3068. }
  3069. /* Current default encoder behaviour for the altref sign bias */
  3070. if (cpi->source_alt_ref_active)
  3071. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
  3072. else
  3073. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
  3074. /* Check to see if a key frame is signaled
  3075. * For two pass with auto key frame enabled cm->frame_type may already
  3076. * be set, but not for one pass.
  3077. */
  3078. if ((cm->current_video_frame == 0) ||
  3079. (cm->frame_flags & FRAMEFLAGS_KEY) ||
  3080. (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0)))
  3081. {
  3082. /* Key frame from VFW/auto-keyframe/first frame */
  3083. cm->frame_type = KEY_FRAME;
  3084. #if CONFIG_TEMPORAL_DENOISING
  3085. if (cpi->oxcf.noise_sensitivity == 4) {
  3086. // For adaptive mode, reset denoiser to normal mode on key frame.
  3087. vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUV);
  3088. }
  3089. #endif
  3090. }
  3091. #if CONFIG_MULTI_RES_ENCODING
  3092. if (cpi->oxcf.mr_total_resolutions > 1) {
  3093. LOWER_RES_FRAME_INFO* low_res_frame_info
  3094. = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info;
  3095. if (cpi->oxcf.mr_encoder_id) {
  3096. // TODO(marpan): This constraint shouldn't be needed, as we would like
  3097. // to allow for key frame setting (forced or periodic) defined per
  3098. // spatial layer. For now, keep this in.
  3099. cm->frame_type = low_res_frame_info->frame_type;
  3100. // Check if lower resolution is available for motion vector reuse.
  3101. if(cm->frame_type != KEY_FRAME)
  3102. {
  3103. cpi->mr_low_res_mv_avail = 1;
  3104. cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
  3105. if (cpi->ref_frame_flags & VP8_LAST_FRAME)
  3106. cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME]
  3107. == low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
  3108. if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
  3109. cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME]
  3110. == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
  3111. // Don't use altref to determine whether low res is available.
  3112. // TODO (marpan): Should we make this type of condition on a
  3113. // per-reference frame basis?
  3114. /*
  3115. if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
  3116. cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
  3117. == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
  3118. */
  3119. }
  3120. }
  3121. // On a key frame: For the lowest resolution, keep track of the key frame
  3122. // counter value. For the higher resolutions, reset the current video
  3123. // frame counter to that of the lowest resolution.
  3124. // This is done to the handle the case where we may stop/start encoding
  3125. // higher layer(s). The restart-encoding of higher layer is only signaled
  3126. // by a key frame for now.
  3127. // TODO (marpan): Add flag to indicate restart-encoding of higher layer.
  3128. if (cm->frame_type == KEY_FRAME) {
  3129. if (cpi->oxcf.mr_encoder_id) {
  3130. // If the initial starting value of the buffer level is zero (this can
  3131. // happen because we may have not started encoding this higher stream),
  3132. // then reset it to non-zero value based on |starting_buffer_level|.
  3133. if (cpi->common.current_video_frame == 0 && cpi->buffer_level == 0) {
  3134. unsigned int i;
  3135. cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
  3136. cpi->buffer_level = cpi->oxcf.starting_buffer_level;
  3137. for (i = 0; i < cpi->oxcf.number_of_layers; i++) {
  3138. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  3139. lc->bits_off_target = lc->starting_buffer_level;
  3140. lc->buffer_level = lc->starting_buffer_level;
  3141. }
  3142. }
  3143. cpi->common.current_video_frame =
  3144. low_res_frame_info->key_frame_counter_value;
  3145. } else {
  3146. low_res_frame_info->key_frame_counter_value =
  3147. cpi->common.current_video_frame;
  3148. }
  3149. }
  3150. }
  3151. #endif
  3152. // Find the reference frame closest to the current frame.
  3153. cpi->closest_reference_frame = LAST_FRAME;
  3154. if(cm->frame_type != KEY_FRAME) {
  3155. int i;
  3156. MV_REFERENCE_FRAME closest_ref = INTRA_FRAME;
  3157. if (cpi->ref_frame_flags & VP8_LAST_FRAME) {
  3158. closest_ref = LAST_FRAME;
  3159. } else if (cpi->ref_frame_flags & VP8_GOLD_FRAME) {
  3160. closest_ref = GOLDEN_FRAME;
  3161. } else if (cpi->ref_frame_flags & VP8_ALTR_FRAME) {
  3162. closest_ref = ALTREF_FRAME;
  3163. }
  3164. for(i = 1; i <= 3; i++) {
  3165. vpx_ref_frame_type_t ref_frame_type = (vpx_ref_frame_type_t)
  3166. ((i == 3) ? 4 : i);
  3167. if (cpi->ref_frame_flags & ref_frame_type) {
  3168. if ((cm->current_video_frame - cpi->current_ref_frames[i]) <
  3169. (cm->current_video_frame - cpi->current_ref_frames[closest_ref])) {
  3170. closest_ref = i;
  3171. }
  3172. }
  3173. }
  3174. cpi->closest_reference_frame = closest_ref;
  3175. }
  3176. /* Set various flags etc to special state if it is a key frame */
  3177. if (cm->frame_type == KEY_FRAME)
  3178. {
  3179. int i;
  3180. // Set the loop filter deltas and segmentation map update
  3181. setup_features(cpi);
  3182. /* The alternate reference frame cannot be active for a key frame */
  3183. cpi->source_alt_ref_active = 0;
  3184. /* Reset the RD threshold multipliers to default of * 1 (128) */
  3185. for (i = 0; i < MAX_MODES; i++)
  3186. {
  3187. cpi->mb.rd_thresh_mult[i] = 128;
  3188. }
  3189. // Reset the zero_last counter to 0 on key frame.
  3190. memset(cpi->consec_zero_last, 0, cm->mb_rows * cm->mb_cols);
  3191. memset(cpi->consec_zero_last_mvbias, 0,
  3192. (cpi->common.mb_rows * cpi->common.mb_cols));
  3193. }
  3194. #if 0
  3195. /* Experimental code for lagged compress and one pass
  3196. * Initialise one_pass GF frames stats
  3197. * Update stats used for GF selection
  3198. */
  3199. {
  3200. cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
  3201. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
  3202. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
  3203. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
  3204. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
  3205. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
  3206. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
  3207. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
  3208. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
  3209. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
  3210. }
  3211. #endif
  3212. update_rd_ref_frame_probs(cpi);
  3213. if (cpi->drop_frames_allowed)
  3214. {
  3215. /* The reset to decimation 0 is only done here for one pass.
  3216. * Once it is set two pass leaves decimation on till the next kf.
  3217. */
  3218. if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0))
  3219. cpi->decimation_factor --;
  3220. if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0)
  3221. cpi->decimation_factor = 1;
  3222. else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3))
  3223. {
  3224. cpi->decimation_factor = 3;
  3225. }
  3226. else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2))
  3227. {
  3228. cpi->decimation_factor = 2;
  3229. }
  3230. else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1))
  3231. {
  3232. cpi->decimation_factor = 1;
  3233. }
  3234. }
  3235. /* The following decimates the frame rate according to a regular
  3236. * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
  3237. * prevent buffer under-run in CBR mode. Alternatively it might be
  3238. * desirable in some situations to drop frame rate but throw more bits
  3239. * at each frame.
  3240. *
  3241. * Note that dropping a key frame can be problematic if spatial
  3242. * resampling is also active
  3243. */
  3244. if (cpi->decimation_factor > 0)
  3245. {
  3246. switch (cpi->decimation_factor)
  3247. {
  3248. case 1:
  3249. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
  3250. break;
  3251. case 2:
  3252. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
  3253. break;
  3254. case 3:
  3255. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
  3256. break;
  3257. }
  3258. /* Note that we should not throw out a key frame (especially when
  3259. * spatial resampling is enabled).
  3260. */
  3261. if (cm->frame_type == KEY_FRAME)
  3262. {
  3263. cpi->decimation_count = cpi->decimation_factor;
  3264. }
  3265. else if (cpi->decimation_count > 0)
  3266. {
  3267. cpi->decimation_count --;
  3268. cpi->bits_off_target += cpi->av_per_frame_bandwidth;
  3269. if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
  3270. cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
  3271. #if CONFIG_MULTI_RES_ENCODING
  3272. vp8_store_drop_frame_info(cpi);
  3273. #endif
  3274. cm->current_video_frame++;
  3275. cpi->frames_since_key++;
  3276. // We advance the temporal pattern for dropped frames.
  3277. cpi->temporal_pattern_counter++;
  3278. #if CONFIG_INTERNAL_STATS
  3279. cpi->count ++;
  3280. #endif
  3281. cpi->buffer_level = cpi->bits_off_target;
  3282. if (cpi->oxcf.number_of_layers > 1)
  3283. {
  3284. unsigned int i;
  3285. /* Propagate bits saved by dropping the frame to higher
  3286. * layers
  3287. */
  3288. for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
  3289. {
  3290. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  3291. lc->bits_off_target += (int)(lc->target_bandwidth /
  3292. lc->framerate);
  3293. if (lc->bits_off_target > lc->maximum_buffer_size)
  3294. lc->bits_off_target = lc->maximum_buffer_size;
  3295. lc->buffer_level = lc->bits_off_target;
  3296. }
  3297. }
  3298. return;
  3299. }
  3300. else
  3301. cpi->decimation_count = cpi->decimation_factor;
  3302. }
  3303. else
  3304. cpi->decimation_count = 0;
  3305. /* Decide how big to make the frame */
  3306. if (!vp8_pick_frame_size(cpi))
  3307. {
  3308. /*TODO: 2 drop_frame and return code could be put together. */
  3309. #if CONFIG_MULTI_RES_ENCODING
  3310. vp8_store_drop_frame_info(cpi);
  3311. #endif
  3312. cm->current_video_frame++;
  3313. cpi->frames_since_key++;
  3314. // We advance the temporal pattern for dropped frames.
  3315. cpi->temporal_pattern_counter++;
  3316. return;
  3317. }
  3318. /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
  3319. * This has a knock on effect on active best quality as well.
  3320. * For CBR if the buffer reaches its maximum level then we can no longer
  3321. * save up bits for later frames so we might as well use them up
  3322. * on the current frame.
  3323. */
  3324. if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
  3325. (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode)
  3326. {
  3327. /* Max adjustment is 1/4 */
  3328. int Adjustment = cpi->active_worst_quality / 4;
  3329. if (Adjustment)
  3330. {
  3331. int buff_lvl_step;
  3332. if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size)
  3333. {
  3334. buff_lvl_step = (int)
  3335. ((cpi->oxcf.maximum_buffer_size -
  3336. cpi->oxcf.optimal_buffer_level) /
  3337. Adjustment);
  3338. if (buff_lvl_step)
  3339. Adjustment = (int)
  3340. ((cpi->buffer_level -
  3341. cpi->oxcf.optimal_buffer_level) /
  3342. buff_lvl_step);
  3343. else
  3344. Adjustment = 0;
  3345. }
  3346. cpi->active_worst_quality -= Adjustment;
  3347. if(cpi->active_worst_quality < cpi->active_best_quality)
  3348. cpi->active_worst_quality = cpi->active_best_quality;
  3349. }
  3350. }
  3351. /* Set an active best quality and if necessary active worst quality
  3352. * There is some odd behavior for one pass here that needs attention.
  3353. */
  3354. if ( (cpi->pass == 2) || (cpi->ni_frames > 150))
  3355. {
  3356. vp8_clear_system_state();
  3357. Q = cpi->active_worst_quality;
  3358. if ( cm->frame_type == KEY_FRAME )
  3359. {
  3360. if ( cpi->pass == 2 )
  3361. {
  3362. if (cpi->gfu_boost > 600)
  3363. cpi->active_best_quality = kf_low_motion_minq[Q];
  3364. else
  3365. cpi->active_best_quality = kf_high_motion_minq[Q];
  3366. /* Special case for key frames forced because we have reached
  3367. * the maximum key frame interval. Here force the Q to a range
  3368. * based on the ambient Q to reduce the risk of popping
  3369. */
  3370. if ( cpi->this_key_frame_forced )
  3371. {
  3372. if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8)
  3373. cpi->active_best_quality = cpi->avg_frame_qindex * 7/8;
  3374. else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 )
  3375. cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
  3376. }
  3377. }
  3378. /* One pass more conservative */
  3379. else
  3380. cpi->active_best_quality = kf_high_motion_minq[Q];
  3381. }
  3382. else if (cpi->oxcf.number_of_layers==1 &&
  3383. (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame))
  3384. {
  3385. /* Use the lower of cpi->active_worst_quality and recent
  3386. * average Q as basis for GF/ARF Q limit unless last frame was
  3387. * a key frame.
  3388. */
  3389. if ( (cpi->frames_since_key > 1) &&
  3390. (cpi->avg_frame_qindex < cpi->active_worst_quality) )
  3391. {
  3392. Q = cpi->avg_frame_qindex;
  3393. }
  3394. /* For constrained quality dont allow Q less than the cq level */
  3395. if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3396. (Q < cpi->cq_target_quality) )
  3397. {
  3398. Q = cpi->cq_target_quality;
  3399. }
  3400. if ( cpi->pass == 2 )
  3401. {
  3402. if ( cpi->gfu_boost > 1000 )
  3403. cpi->active_best_quality = gf_low_motion_minq[Q];
  3404. else if ( cpi->gfu_boost < 400 )
  3405. cpi->active_best_quality = gf_high_motion_minq[Q];
  3406. else
  3407. cpi->active_best_quality = gf_mid_motion_minq[Q];
  3408. /* Constrained quality use slightly lower active best. */
  3409. if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY )
  3410. {
  3411. cpi->active_best_quality =
  3412. cpi->active_best_quality * 15/16;
  3413. }
  3414. }
  3415. /* One pass more conservative */
  3416. else
  3417. cpi->active_best_quality = gf_high_motion_minq[Q];
  3418. }
  3419. else
  3420. {
  3421. cpi->active_best_quality = inter_minq[Q];
  3422. /* For the constant/constrained quality mode we dont want
  3423. * q to fall below the cq level.
  3424. */
  3425. if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3426. (cpi->active_best_quality < cpi->cq_target_quality) )
  3427. {
  3428. /* If we are strongly undershooting the target rate in the last
  3429. * frames then use the user passed in cq value not the auto
  3430. * cq value.
  3431. */
  3432. if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth )
  3433. cpi->active_best_quality = cpi->oxcf.cq_level;
  3434. else
  3435. cpi->active_best_quality = cpi->cq_target_quality;
  3436. }
  3437. }
  3438. /* If CBR and the buffer is as full then it is reasonable to allow
  3439. * higher quality on the frames to prevent bits just going to waste.
  3440. */
  3441. if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)
  3442. {
  3443. /* Note that the use of >= here elliminates the risk of a devide
  3444. * by 0 error in the else if clause
  3445. */
  3446. if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size)
  3447. cpi->active_best_quality = cpi->best_quality;
  3448. else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level)
  3449. {
  3450. int Fraction = (int)
  3451. (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128)
  3452. / (cpi->oxcf.maximum_buffer_size -
  3453. cpi->oxcf.optimal_buffer_level));
  3454. int min_qadjustment = ((cpi->active_best_quality -
  3455. cpi->best_quality) * Fraction) / 128;
  3456. cpi->active_best_quality -= min_qadjustment;
  3457. }
  3458. }
  3459. }
  3460. /* Make sure constrained quality mode limits are adhered to for the first
  3461. * few frames of one pass encodes
  3462. */
  3463. else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY)
  3464. {
  3465. if ( (cm->frame_type == KEY_FRAME) ||
  3466. cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame )
  3467. {
  3468. cpi->active_best_quality = cpi->best_quality;
  3469. }
  3470. else if (cpi->active_best_quality < cpi->cq_target_quality)
  3471. {
  3472. cpi->active_best_quality = cpi->cq_target_quality;
  3473. }
  3474. }
  3475. /* Clip the active best and worst quality values to limits */
  3476. if (cpi->active_worst_quality > cpi->worst_quality)
  3477. cpi->active_worst_quality = cpi->worst_quality;
  3478. if (cpi->active_best_quality < cpi->best_quality)
  3479. cpi->active_best_quality = cpi->best_quality;
  3480. if ( cpi->active_worst_quality < cpi->active_best_quality )
  3481. cpi->active_worst_quality = cpi->active_best_quality;
  3482. /* Determine initial Q to try */
  3483. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3484. #if !(CONFIG_REALTIME_ONLY)
  3485. /* Set highest allowed value for Zbin over quant */
  3486. if (cm->frame_type == KEY_FRAME)
  3487. zbin_oq_high = 0;
  3488. else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame ||
  3489. (cm->refresh_golden_frame && !cpi->source_alt_ref_active))))
  3490. {
  3491. zbin_oq_high = 16;
  3492. }
  3493. else
  3494. zbin_oq_high = ZBIN_OQ_MAX;
  3495. #endif
  3496. /* Setup background Q adjustment for error resilient mode.
  3497. * For multi-layer encodes only enable this for the base layer.
  3498. */
  3499. if (cpi->cyclic_refresh_mode_enabled)
  3500. {
  3501. // Special case for screen_content_mode with golden frame updates.
  3502. int disable_cr_gf = (cpi->oxcf.screen_content_mode == 2 &&
  3503. cm->refresh_golden_frame);
  3504. if (cpi->current_layer == 0 && cpi->force_maxqp == 0 && !disable_cr_gf)
  3505. cyclic_background_refresh(cpi, Q, 0);
  3506. else
  3507. disable_segmentation(cpi);
  3508. }
  3509. vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
  3510. #if !(CONFIG_REALTIME_ONLY)
  3511. /* Limit Q range for the adaptive loop. */
  3512. bottom_index = cpi->active_best_quality;
  3513. top_index = cpi->active_worst_quality;
  3514. q_low = cpi->active_best_quality;
  3515. q_high = cpi->active_worst_quality;
  3516. #endif
  3517. vp8_save_coding_context(cpi);
  3518. loop_count = 0;
  3519. scale_and_extend_source(cpi->un_scaled_source, cpi);
  3520. #if CONFIG_TEMPORAL_DENOISING && CONFIG_POSTPROC
  3521. // Option to apply spatial blur under the aggressive or adaptive
  3522. // (temporal denoising) mode.
  3523. if (cpi->oxcf.noise_sensitivity >= 3) {
  3524. if (cpi->denoiser.denoise_pars.spatial_blur != 0) {
  3525. vp8_de_noise(cm, cpi->Source, cpi->Source,
  3526. cpi->denoiser.denoise_pars.spatial_blur, 1, 0, 0);
  3527. }
  3528. }
  3529. #endif
  3530. #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
  3531. if (cpi->oxcf.noise_sensitivity > 0)
  3532. {
  3533. unsigned char *src;
  3534. int l = 0;
  3535. switch (cpi->oxcf.noise_sensitivity)
  3536. {
  3537. case 1:
  3538. l = 20;
  3539. break;
  3540. case 2:
  3541. l = 40;
  3542. break;
  3543. case 3:
  3544. l = 60;
  3545. break;
  3546. case 4:
  3547. l = 80;
  3548. break;
  3549. case 5:
  3550. l = 100;
  3551. break;
  3552. case 6:
  3553. l = 150;
  3554. break;
  3555. }
  3556. if (cm->frame_type == KEY_FRAME)
  3557. {
  3558. vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0, 1);
  3559. }
  3560. else
  3561. {
  3562. vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0, 1);
  3563. src = cpi->Source->y_buffer;
  3564. if (cpi->Source->y_stride < 0)
  3565. {
  3566. src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
  3567. }
  3568. }
  3569. }
  3570. #endif
  3571. #ifdef OUTPUT_YUV_SRC
  3572. vp8_write_yuv_frame(yuv_file, cpi->Source);
  3573. #endif
  3574. do
  3575. {
  3576. vp8_clear_system_state();
  3577. vp8_set_quantizer(cpi, Q);
  3578. /* setup skip prob for costing in mode/mv decision */
  3579. if (cpi->common.mb_no_coeff_skip)
  3580. {
  3581. cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
  3582. if (cm->frame_type != KEY_FRAME)
  3583. {
  3584. if (cpi->common.refresh_alt_ref_frame)
  3585. {
  3586. if (cpi->last_skip_false_probs[2] != 0)
  3587. cpi->prob_skip_false = cpi->last_skip_false_probs[2];
  3588. /*
  3589. if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 )
  3590. cpi->prob_skip_false = cpi->last_skip_false_probs[2];
  3591. else if (cpi->last_skip_false_probs[2]!=0)
  3592. cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2;
  3593. */
  3594. }
  3595. else if (cpi->common.refresh_golden_frame)
  3596. {
  3597. if (cpi->last_skip_false_probs[1] != 0)
  3598. cpi->prob_skip_false = cpi->last_skip_false_probs[1];
  3599. /*
  3600. if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 )
  3601. cpi->prob_skip_false = cpi->last_skip_false_probs[1];
  3602. else if (cpi->last_skip_false_probs[1]!=0)
  3603. cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2;
  3604. */
  3605. }
  3606. else
  3607. {
  3608. if (cpi->last_skip_false_probs[0] != 0)
  3609. cpi->prob_skip_false = cpi->last_skip_false_probs[0];
  3610. /*
  3611. if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 )
  3612. cpi->prob_skip_false = cpi->last_skip_false_probs[0];
  3613. else if(cpi->last_skip_false_probs[0]!=0)
  3614. cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2;
  3615. */
  3616. }
  3617. /* as this is for cost estimate, let's make sure it does not
  3618. * go extreme eitehr way
  3619. */
  3620. if (cpi->prob_skip_false < 5)
  3621. cpi->prob_skip_false = 5;
  3622. if (cpi->prob_skip_false > 250)
  3623. cpi->prob_skip_false = 250;
  3624. if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref)
  3625. cpi->prob_skip_false = 1;
  3626. }
  3627. #if 0
  3628. if (cpi->pass != 1)
  3629. {
  3630. FILE *f = fopen("skip.stt", "a");
  3631. fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
  3632. fclose(f);
  3633. }
  3634. #endif
  3635. }
  3636. if (cm->frame_type == KEY_FRAME)
  3637. {
  3638. if(resize_key_frame(cpi))
  3639. {
  3640. /* If the frame size has changed, need to reset Q, quantizer,
  3641. * and background refresh.
  3642. */
  3643. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3644. if (cpi->cyclic_refresh_mode_enabled)
  3645. {
  3646. if (cpi->current_layer==0)
  3647. cyclic_background_refresh(cpi, Q, 0);
  3648. else
  3649. disable_segmentation(cpi);
  3650. }
  3651. // Reset the zero_last counter to 0 on key frame.
  3652. memset(cpi->consec_zero_last, 0, cm->mb_rows * cm->mb_cols);
  3653. memset(cpi->consec_zero_last_mvbias, 0,
  3654. (cpi->common.mb_rows * cpi->common.mb_cols));
  3655. vp8_set_quantizer(cpi, Q);
  3656. }
  3657. vp8_setup_key_frame(cpi);
  3658. }
  3659. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  3660. {
  3661. if(cpi->oxcf.error_resilient_mode)
  3662. cm->refresh_entropy_probs = 0;
  3663. if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS)
  3664. {
  3665. if (cm->frame_type == KEY_FRAME)
  3666. cm->refresh_entropy_probs = 1;
  3667. }
  3668. if (cm->refresh_entropy_probs == 0)
  3669. {
  3670. /* save a copy for later refresh */
  3671. memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
  3672. }
  3673. vp8_update_coef_context(cpi);
  3674. vp8_update_coef_probs(cpi);
  3675. /* transform / motion compensation build reconstruction frame
  3676. * +pack coef partitions
  3677. */
  3678. vp8_encode_frame(cpi);
  3679. /* cpi->projected_frame_size is not needed for RT mode */
  3680. }
  3681. #else
  3682. /* transform / motion compensation build reconstruction frame */
  3683. vp8_encode_frame(cpi);
  3684. if (cpi->oxcf.screen_content_mode == 2) {
  3685. if (vp8_drop_encodedframe_overshoot(cpi, Q))
  3686. return;
  3687. }
  3688. cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
  3689. cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
  3690. #endif
  3691. vp8_clear_system_state();
  3692. /* Test to see if the stats generated for this frame indicate that
  3693. * we should have coded a key frame (assuming that we didn't)!
  3694. */
  3695. if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME
  3696. && cpi->compressor_speed != 2)
  3697. {
  3698. #if !(CONFIG_REALTIME_ONLY)
  3699. if (decide_key_frame(cpi))
  3700. {
  3701. /* Reset all our sizing numbers and recode */
  3702. cm->frame_type = KEY_FRAME;
  3703. vp8_pick_frame_size(cpi);
  3704. /* Clear the Alt reference frame active flag when we have
  3705. * a key frame
  3706. */
  3707. cpi->source_alt_ref_active = 0;
  3708. // Set the loop filter deltas and segmentation map update
  3709. setup_features(cpi);
  3710. vp8_restore_coding_context(cpi);
  3711. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3712. vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit);
  3713. /* Limit Q range for the adaptive loop. */
  3714. bottom_index = cpi->active_best_quality;
  3715. top_index = cpi->active_worst_quality;
  3716. q_low = cpi->active_best_quality;
  3717. q_high = cpi->active_worst_quality;
  3718. loop_count++;
  3719. Loop = 1;
  3720. continue;
  3721. }
  3722. #endif
  3723. }
  3724. vp8_clear_system_state();
  3725. if (frame_over_shoot_limit == 0)
  3726. frame_over_shoot_limit = 1;
  3727. /* Are we are overshooting and up against the limit of active max Q. */
  3728. if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
  3729. (Q == cpi->active_worst_quality) &&
  3730. (cpi->active_worst_quality < cpi->worst_quality) &&
  3731. (cpi->projected_frame_size > frame_over_shoot_limit))
  3732. {
  3733. int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit;
  3734. /* If so is there any scope for relaxing it */
  3735. while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0))
  3736. {
  3737. cpi->active_worst_quality++;
  3738. /* Assume 1 qstep = about 4% on frame size. */
  3739. over_size_percent = (int)(over_size_percent * 0.96);
  3740. }
  3741. #if !(CONFIG_REALTIME_ONLY)
  3742. top_index = cpi->active_worst_quality;
  3743. #endif
  3744. /* If we have updated the active max Q do not call
  3745. * vp8_update_rate_correction_factors() this loop.
  3746. */
  3747. active_worst_qchanged = 1;
  3748. }
  3749. else
  3750. active_worst_qchanged = 0;
  3751. #if !(CONFIG_REALTIME_ONLY)
  3752. /* Special case handling for forced key frames */
  3753. if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced )
  3754. {
  3755. int last_q = Q;
  3756. int kf_err = vp8_calc_ss_err(cpi->Source,
  3757. &cm->yv12_fb[cm->new_fb_idx]);
  3758. /* The key frame is not good enough */
  3759. if ( kf_err > ((cpi->ambient_err * 7) >> 3) )
  3760. {
  3761. /* Lower q_high */
  3762. q_high = (Q > q_low) ? (Q - 1) : q_low;
  3763. /* Adjust Q */
  3764. Q = (q_high + q_low) >> 1;
  3765. }
  3766. /* The key frame is much better than the previous frame */
  3767. else if ( kf_err < (cpi->ambient_err >> 1) )
  3768. {
  3769. /* Raise q_low */
  3770. q_low = (Q < q_high) ? (Q + 1) : q_high;
  3771. /* Adjust Q */
  3772. Q = (q_high + q_low + 1) >> 1;
  3773. }
  3774. /* Clamp Q to upper and lower limits: */
  3775. if (Q > q_high)
  3776. Q = q_high;
  3777. else if (Q < q_low)
  3778. Q = q_low;
  3779. Loop = Q != last_q;
  3780. }
  3781. /* Is the projected frame size out of range and are we allowed
  3782. * to attempt to recode.
  3783. */
  3784. else if ( recode_loop_test( cpi,
  3785. frame_over_shoot_limit, frame_under_shoot_limit,
  3786. Q, top_index, bottom_index ) )
  3787. {
  3788. int last_q = Q;
  3789. int Retries = 0;
  3790. /* Frame size out of permitted range. Update correction factor
  3791. * & compute new Q to try...
  3792. */
  3793. /* Frame is too large */
  3794. if (cpi->projected_frame_size > cpi->this_frame_target)
  3795. {
  3796. /* Raise Qlow as to at least the current value */
  3797. q_low = (Q < q_high) ? (Q + 1) : q_high;
  3798. /* If we are using over quant do the same for zbin_oq_low */
  3799. if (cpi->mb.zbin_over_quant > 0)
  3800. zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
  3801. (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
  3802. if (undershoot_seen)
  3803. {
  3804. /* Update rate_correction_factor unless
  3805. * cpi->active_worst_quality has changed.
  3806. */
  3807. if (!active_worst_qchanged)
  3808. vp8_update_rate_correction_factors(cpi, 1);
  3809. Q = (q_high + q_low + 1) / 2;
  3810. /* Adjust cpi->zbin_over_quant (only allowed when Q
  3811. * is max)
  3812. */
  3813. if (Q < MAXQ)
  3814. cpi->mb.zbin_over_quant = 0;
  3815. else
  3816. {
  3817. zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ?
  3818. (cpi->mb.zbin_over_quant + 1) : zbin_oq_high;
  3819. cpi->mb.zbin_over_quant =
  3820. (zbin_oq_high + zbin_oq_low) / 2;
  3821. }
  3822. }
  3823. else
  3824. {
  3825. /* Update rate_correction_factor unless
  3826. * cpi->active_worst_quality has changed.
  3827. */
  3828. if (!active_worst_qchanged)
  3829. vp8_update_rate_correction_factors(cpi, 0);
  3830. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3831. while (((Q < q_low) ||
  3832. (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
  3833. (Retries < 10))
  3834. {
  3835. vp8_update_rate_correction_factors(cpi, 0);
  3836. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3837. Retries ++;
  3838. }
  3839. }
  3840. overshoot_seen = 1;
  3841. }
  3842. /* Frame is too small */
  3843. else
  3844. {
  3845. if (cpi->mb.zbin_over_quant == 0)
  3846. /* Lower q_high if not using over quant */
  3847. q_high = (Q > q_low) ? (Q - 1) : q_low;
  3848. else
  3849. /* else lower zbin_oq_high */
  3850. zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ?
  3851. (cpi->mb.zbin_over_quant - 1) : zbin_oq_low;
  3852. if (overshoot_seen)
  3853. {
  3854. /* Update rate_correction_factor unless
  3855. * cpi->active_worst_quality has changed.
  3856. */
  3857. if (!active_worst_qchanged)
  3858. vp8_update_rate_correction_factors(cpi, 1);
  3859. Q = (q_high + q_low) / 2;
  3860. /* Adjust cpi->zbin_over_quant (only allowed when Q
  3861. * is max)
  3862. */
  3863. if (Q < MAXQ)
  3864. cpi->mb.zbin_over_quant = 0;
  3865. else
  3866. cpi->mb.zbin_over_quant =
  3867. (zbin_oq_high + zbin_oq_low) / 2;
  3868. }
  3869. else
  3870. {
  3871. /* Update rate_correction_factor unless
  3872. * cpi->active_worst_quality has changed.
  3873. */
  3874. if (!active_worst_qchanged)
  3875. vp8_update_rate_correction_factors(cpi, 0);
  3876. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3877. /* Special case reset for qlow for constrained quality.
  3878. * This should only trigger where there is very substantial
  3879. * undershoot on a frame and the auto cq level is above
  3880. * the user passsed in value.
  3881. */
  3882. if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3883. (Q < q_low) )
  3884. {
  3885. q_low = Q;
  3886. }
  3887. while (((Q > q_high) ||
  3888. (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
  3889. (Retries < 10))
  3890. {
  3891. vp8_update_rate_correction_factors(cpi, 0);
  3892. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3893. Retries ++;
  3894. }
  3895. }
  3896. undershoot_seen = 1;
  3897. }
  3898. /* Clamp Q to upper and lower limits: */
  3899. if (Q > q_high)
  3900. Q = q_high;
  3901. else if (Q < q_low)
  3902. Q = q_low;
  3903. /* Clamp cpi->zbin_over_quant */
  3904. cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ?
  3905. zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ?
  3906. zbin_oq_high : cpi->mb.zbin_over_quant;
  3907. Loop = Q != last_q;
  3908. }
  3909. else
  3910. #endif
  3911. Loop = 0;
  3912. if (cpi->is_src_frame_alt_ref)
  3913. Loop = 0;
  3914. if (Loop == 1)
  3915. {
  3916. vp8_restore_coding_context(cpi);
  3917. loop_count++;
  3918. #if CONFIG_INTERNAL_STATS
  3919. cpi->tot_recode_hits++;
  3920. #endif
  3921. }
  3922. }
  3923. while (Loop == 1);
  3924. #if 0
  3925. /* Experimental code for lagged and one pass
  3926. * Update stats used for one pass GF selection
  3927. */
  3928. {
  3929. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
  3930. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
  3931. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
  3932. }
  3933. #endif
  3934. /* Special case code to reduce pulsing when key frames are forced at a
  3935. * fixed interval. Note the reconstruction error if it is the frame before
  3936. * the force key frame
  3937. */
  3938. if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) )
  3939. {
  3940. cpi->ambient_err = vp8_calc_ss_err(cpi->Source,
  3941. &cm->yv12_fb[cm->new_fb_idx]);
  3942. }
  3943. /* This frame's MVs are saved and will be used in next frame's MV predictor.
  3944. * Last frame has one more line(add to bottom) and one more column(add to
  3945. * right) than cm->mip. The edge elements are initialized to 0.
  3946. */
  3947. #if CONFIG_MULTI_RES_ENCODING
  3948. if(!cpi->oxcf.mr_encoder_id && cm->show_frame)
  3949. #else
  3950. if(cm->show_frame) /* do not save for altref frame */
  3951. #endif
  3952. {
  3953. int mb_row;
  3954. int mb_col;
  3955. /* Point to beginning of allocated MODE_INFO arrays. */
  3956. MODE_INFO *tmp = cm->mip;
  3957. if(cm->frame_type != KEY_FRAME)
  3958. {
  3959. for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++)
  3960. {
  3961. for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++)
  3962. {
  3963. if(tmp->mbmi.ref_frame != INTRA_FRAME)
  3964. cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int;
  3965. cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
  3966. cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame;
  3967. tmp++;
  3968. }
  3969. }
  3970. }
  3971. }
  3972. /* Count last ref frame 0,0 usage on current encoded frame. */
  3973. {
  3974. int mb_row;
  3975. int mb_col;
  3976. /* Point to beginning of MODE_INFO arrays. */
  3977. MODE_INFO *tmp = cm->mi;
  3978. cpi->zeromv_count = 0;
  3979. if(cm->frame_type != KEY_FRAME)
  3980. {
  3981. for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++)
  3982. {
  3983. for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++)
  3984. {
  3985. if (tmp->mbmi.mode == ZEROMV &&
  3986. tmp->mbmi.ref_frame == LAST_FRAME)
  3987. cpi->zeromv_count++;
  3988. tmp++;
  3989. }
  3990. tmp++;
  3991. }
  3992. }
  3993. }
  3994. #if CONFIG_MULTI_RES_ENCODING
  3995. vp8_cal_dissimilarity(cpi);
  3996. #endif
  3997. /* Update the GF useage maps.
  3998. * This is done after completing the compression of a frame when all
  3999. * modes etc. are finalized but before loop filter
  4000. */
  4001. if (cpi->oxcf.number_of_layers == 1)
  4002. vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
  4003. if (cm->frame_type == KEY_FRAME)
  4004. cm->refresh_last_frame = 1;
  4005. #if 0
  4006. {
  4007. FILE *f = fopen("gfactive.stt", "a");
  4008. fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
  4009. fclose(f);
  4010. }
  4011. #endif
  4012. /* For inter frames the current default behavior is that when
  4013. * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
  4014. * This is purely an encoder decision at present.
  4015. */
  4016. if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame)
  4017. cm->copy_buffer_to_arf = 2;
  4018. else
  4019. cm->copy_buffer_to_arf = 0;
  4020. cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
  4021. #if CONFIG_TEMPORAL_DENOISING
  4022. // Get some measure of the amount of noise, by measuring the (partial) mse
  4023. // between source and denoised buffer, for y channel. Partial refers to
  4024. // computing the sse for a sub-sample of the frame (i.e., skip x blocks along row/column),
  4025. // and only for blocks in that set that are consecutive ZEROMV_LAST mode.
  4026. // Do this every ~8 frames, to further reduce complexity.
  4027. // TODO(marpan): Keep this for now for the case cpi->oxcf.noise_sensitivity < 4,
  4028. // should be removed in favor of the process_denoiser_mode_change() function below.
  4029. if (cpi->oxcf.noise_sensitivity > 0 &&
  4030. cpi->oxcf.noise_sensitivity < 4 &&
  4031. !cpi->oxcf.screen_content_mode &&
  4032. cpi->frames_since_key%8 == 0 &&
  4033. cm->frame_type != KEY_FRAME) {
  4034. cpi->mse_source_denoised = measure_square_diff_partial(
  4035. &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi->Source, cpi);
  4036. }
  4037. // For the adaptive denoising mode (noise_sensitivity == 4), sample the mse
  4038. // of source diff (between current and previous frame), and determine if we
  4039. // should switch the denoiser mode. Sampling refers to computing the mse for
  4040. // a sub-sample of the frame (i.e., skip x blocks along row/column), and
  4041. // only for blocks in that set that have used ZEROMV LAST, along with some
  4042. // constraint on the sum diff between blocks. This process is called every
  4043. // ~8 frames, to further reduce complexity.
  4044. if (cpi->oxcf.noise_sensitivity == 4 &&
  4045. !cpi->oxcf.screen_content_mode &&
  4046. cpi->frames_since_key % 8 == 0 &&
  4047. cm->frame_type != KEY_FRAME) {
  4048. process_denoiser_mode_change(cpi);
  4049. }
  4050. #endif
  4051. #if CONFIG_MULTITHREAD
  4052. if (cpi->b_multi_threaded)
  4053. {
  4054. /* start loopfilter in separate thread */
  4055. sem_post(&cpi->h_event_start_lpf);
  4056. cpi->b_lpf_running = 1;
  4057. }
  4058. else
  4059. #endif
  4060. {
  4061. vp8_loopfilter_frame(cpi, cm);
  4062. }
  4063. update_reference_frames(cpi);
  4064. #ifdef OUTPUT_YUV_DENOISED
  4065. vp8_write_yuv_frame(yuv_denoised_file,
  4066. &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
  4067. #endif
  4068. #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
  4069. if (cpi->oxcf.error_resilient_mode)
  4070. {
  4071. cm->refresh_entropy_probs = 0;
  4072. }
  4073. #endif
  4074. #if CONFIG_MULTITHREAD
  4075. /* wait that filter_level is picked so that we can continue with stream packing */
  4076. if (cpi->b_multi_threaded)
  4077. sem_wait(&cpi->h_event_end_lpf);
  4078. #endif
  4079. /* build the bitstream */
  4080. vp8_pack_bitstream(cpi, dest, dest_end, size);
  4081. #if CONFIG_MULTITHREAD
  4082. /* if PSNR packets are generated we have to wait for the lpf */
  4083. if (cpi->b_lpf_running && cpi->b_calculate_psnr)
  4084. {
  4085. sem_wait(&cpi->h_event_end_lpf);
  4086. cpi->b_lpf_running = 0;
  4087. }
  4088. #endif
  4089. /* Move storing frame_type out of the above loop since it is also
  4090. * needed in motion search besides loopfilter */
  4091. cm->last_frame_type = cm->frame_type;
  4092. /* Update rate control heuristics */
  4093. cpi->total_byte_count += (*size);
  4094. cpi->projected_frame_size = (*size) << 3;
  4095. if (cpi->oxcf.number_of_layers > 1)
  4096. {
  4097. unsigned int i;
  4098. for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
  4099. cpi->layer_context[i].total_byte_count += (*size);
  4100. }
  4101. if (!active_worst_qchanged)
  4102. vp8_update_rate_correction_factors(cpi, 2);
  4103. cpi->last_q[cm->frame_type] = cm->base_qindex;
  4104. if (cm->frame_type == KEY_FRAME)
  4105. {
  4106. vp8_adjust_key_frame_context(cpi);
  4107. }
  4108. /* Keep a record of ambient average Q. */
  4109. if (cm->frame_type != KEY_FRAME)
  4110. cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
  4111. /* Keep a record from which we can calculate the average Q excluding
  4112. * GF updates and key frames
  4113. */
  4114. if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) ||
  4115. (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame)))
  4116. {
  4117. cpi->ni_frames++;
  4118. /* Calculate the average Q for normal inter frames (not key or GFU
  4119. * frames).
  4120. */
  4121. if ( cpi->pass == 2 )
  4122. {
  4123. cpi->ni_tot_qi += Q;
  4124. cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
  4125. }
  4126. else
  4127. {
  4128. /* Damp value for first few frames */
  4129. if (cpi->ni_frames > 150 )
  4130. {
  4131. cpi->ni_tot_qi += Q;
  4132. cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
  4133. }
  4134. /* For one pass, early in the clip ... average the current frame Q
  4135. * value with the worstq entered by the user as a dampening measure
  4136. */
  4137. else
  4138. {
  4139. cpi->ni_tot_qi += Q;
  4140. cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
  4141. }
  4142. /* If the average Q is higher than what was used in the last
  4143. * frame (after going through the recode loop to keep the frame
  4144. * size within range) then use the last frame value - 1. The -1
  4145. * is designed to stop Q and hence the data rate, from
  4146. * progressively falling away during difficult sections, but at
  4147. * the same time reduce the number of itterations around the
  4148. * recode loop.
  4149. */
  4150. if (Q > cpi->ni_av_qi)
  4151. cpi->ni_av_qi = Q - 1;
  4152. }
  4153. }
  4154. /* Update the buffer level variable. */
  4155. /* Non-viewable frames are a special case and are treated as pure overhead. */
  4156. if ( !cm->show_frame )
  4157. cpi->bits_off_target -= cpi->projected_frame_size;
  4158. else
  4159. cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
  4160. /* Clip the buffer level to the maximum specified buffer size */
  4161. if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size)
  4162. cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
  4163. // If the frame dropper is not enabled, don't let the buffer level go below
  4164. // some threshold, given here by -|maximum_buffer_size|. For now we only do
  4165. // this for screen content input.
  4166. if (cpi->drop_frames_allowed == 0 && cpi->oxcf.screen_content_mode &&
  4167. cpi->bits_off_target < -cpi->oxcf.maximum_buffer_size)
  4168. cpi->bits_off_target = -cpi->oxcf.maximum_buffer_size;
  4169. /* Rolling monitors of whether we are over or underspending used to
  4170. * help regulate min and Max Q in two pass.
  4171. */
  4172. cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
  4173. cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
  4174. cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
  4175. cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32;
  4176. /* Actual bits spent */
  4177. cpi->total_actual_bits += cpi->projected_frame_size;
  4178. /* Debug stats */
  4179. cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size);
  4180. cpi->buffer_level = cpi->bits_off_target;
  4181. /* Propagate values to higher temporal layers */
  4182. if (cpi->oxcf.number_of_layers > 1)
  4183. {
  4184. unsigned int i;
  4185. for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++)
  4186. {
  4187. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  4188. int bits_off_for_this_layer =
  4189. (int)(lc->target_bandwidth / lc->framerate -
  4190. cpi->projected_frame_size);
  4191. lc->bits_off_target += bits_off_for_this_layer;
  4192. /* Clip buffer level to maximum buffer size for the layer */
  4193. if (lc->bits_off_target > lc->maximum_buffer_size)
  4194. lc->bits_off_target = lc->maximum_buffer_size;
  4195. lc->total_actual_bits += cpi->projected_frame_size;
  4196. lc->total_target_vs_actual += bits_off_for_this_layer;
  4197. lc->buffer_level = lc->bits_off_target;
  4198. }
  4199. }
  4200. /* Update bits left to the kf and gf groups to account for overshoot
  4201. * or undershoot on these frames
  4202. */
  4203. if (cm->frame_type == KEY_FRAME)
  4204. {
  4205. cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
  4206. if (cpi->twopass.kf_group_bits < 0)
  4207. cpi->twopass.kf_group_bits = 0 ;
  4208. }
  4209. else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame)
  4210. {
  4211. cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size;
  4212. if (cpi->twopass.gf_group_bits < 0)
  4213. cpi->twopass.gf_group_bits = 0 ;
  4214. }
  4215. if (cm->frame_type != KEY_FRAME)
  4216. {
  4217. if (cpi->common.refresh_alt_ref_frame)
  4218. {
  4219. cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
  4220. cpi->last_skip_probs_q[2] = cm->base_qindex;
  4221. }
  4222. else if (cpi->common.refresh_golden_frame)
  4223. {
  4224. cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
  4225. cpi->last_skip_probs_q[1] = cm->base_qindex;
  4226. }
  4227. else
  4228. {
  4229. cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
  4230. cpi->last_skip_probs_q[0] = cm->base_qindex;
  4231. /* update the baseline */
  4232. cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
  4233. }
  4234. }
  4235. #if 0 && CONFIG_INTERNAL_STATS
  4236. {
  4237. FILE *f = fopen("tmp.stt", "a");
  4238. vp8_clear_system_state();
  4239. if (cpi->twopass.total_left_stats.coded_error != 0.0)
  4240. fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
  4241. "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
  4242. "%8.2lf %"PRId64" %10.3lf %10"PRId64" %8d\n",
  4243. cpi->common.current_video_frame, cpi->this_frame_target,
  4244. cpi->projected_frame_size,
  4245. (cpi->projected_frame_size - cpi->this_frame_target),
  4246. cpi->total_target_vs_actual,
  4247. cpi->buffer_level,
  4248. (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
  4249. cpi->total_actual_bits, cm->base_qindex,
  4250. cpi->active_best_quality, cpi->active_worst_quality,
  4251. cpi->ni_av_qi, cpi->cq_target_quality,
  4252. cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
  4253. cm->frame_type, cpi->gfu_boost,
  4254. cpi->twopass.est_max_qcorrection_factor,
  4255. cpi->twopass.bits_left,
  4256. cpi->twopass.total_left_stats.coded_error,
  4257. (double)cpi->twopass.bits_left /
  4258. cpi->twopass.total_left_stats.coded_error,
  4259. cpi->tot_recode_hits);
  4260. else
  4261. fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
  4262. "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
  4263. "%8.2lf %"PRId64" %10.3lf %8d\n",
  4264. cpi->common.current_video_frame, cpi->this_frame_target,
  4265. cpi->projected_frame_size,
  4266. (cpi->projected_frame_size - cpi->this_frame_target),
  4267. cpi->total_target_vs_actual,
  4268. cpi->buffer_level,
  4269. (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
  4270. cpi->total_actual_bits, cm->base_qindex,
  4271. cpi->active_best_quality, cpi->active_worst_quality,
  4272. cpi->ni_av_qi, cpi->cq_target_quality,
  4273. cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
  4274. cm->frame_type, cpi->gfu_boost,
  4275. cpi->twopass.est_max_qcorrection_factor,
  4276. cpi->twopass.bits_left,
  4277. cpi->twopass.total_left_stats.coded_error,
  4278. cpi->tot_recode_hits);
  4279. fclose(f);
  4280. {
  4281. FILE *fmodes = fopen("Modes.stt", "a");
  4282. fprintf(fmodes, "%6d:%1d:%1d:%1d ",
  4283. cpi->common.current_video_frame,
  4284. cm->frame_type, cm->refresh_golden_frame,
  4285. cm->refresh_alt_ref_frame);
  4286. fprintf(fmodes, "\n");
  4287. fclose(fmodes);
  4288. }
  4289. }
  4290. #endif
  4291. if (cm->refresh_golden_frame == 1)
  4292. cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
  4293. else
  4294. cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN;
  4295. if (cm->refresh_alt_ref_frame == 1)
  4296. cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
  4297. else
  4298. cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF;
  4299. if (cm->refresh_last_frame & cm->refresh_golden_frame)
  4300. /* both refreshed */
  4301. cpi->gold_is_last = 1;
  4302. else if (cm->refresh_last_frame ^ cm->refresh_golden_frame)
  4303. /* 1 refreshed but not the other */
  4304. cpi->gold_is_last = 0;
  4305. if (cm->refresh_last_frame & cm->refresh_alt_ref_frame)
  4306. /* both refreshed */
  4307. cpi->alt_is_last = 1;
  4308. else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame)
  4309. /* 1 refreshed but not the other */
  4310. cpi->alt_is_last = 0;
  4311. if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame)
  4312. /* both refreshed */
  4313. cpi->gold_is_alt = 1;
  4314. else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame)
  4315. /* 1 refreshed but not the other */
  4316. cpi->gold_is_alt = 0;
  4317. cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
  4318. if (cpi->gold_is_last)
  4319. cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
  4320. if (cpi->alt_is_last)
  4321. cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
  4322. if (cpi->gold_is_alt)
  4323. cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
  4324. if (!cpi->oxcf.error_resilient_mode)
  4325. {
  4326. if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME))
  4327. /* Update the alternate reference frame stats as appropriate. */
  4328. update_alt_ref_frame_stats(cpi);
  4329. else
  4330. /* Update the Golden frame stats as appropriate. */
  4331. update_golden_frame_stats(cpi);
  4332. }
  4333. if (cm->frame_type == KEY_FRAME)
  4334. {
  4335. /* Tell the caller that the frame was coded as a key frame */
  4336. *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
  4337. /* As this frame is a key frame the next defaults to an inter frame. */
  4338. cm->frame_type = INTER_FRAME;
  4339. cpi->last_frame_percent_intra = 100;
  4340. }
  4341. else
  4342. {
  4343. *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY;
  4344. cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
  4345. }
  4346. /* Clear the one shot update flags for segmentation map and mode/ref
  4347. * loop filter deltas.
  4348. */
  4349. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  4350. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  4351. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  4352. /* Dont increment frame counters if this was an altref buffer update
  4353. * not a real frame
  4354. */
  4355. if (cm->show_frame)
  4356. {
  4357. cm->current_video_frame++;
  4358. cpi->frames_since_key++;
  4359. cpi->temporal_pattern_counter++;
  4360. }
  4361. /* reset to normal state now that we are done. */
  4362. #if 0
  4363. {
  4364. char filename[512];
  4365. FILE *recon_file;
  4366. sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
  4367. recon_file = fopen(filename, "wb");
  4368. fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
  4369. cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
  4370. fclose(recon_file);
  4371. }
  4372. #endif
  4373. /* DEBUG */
  4374. /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
  4375. }
  4376. #if !(CONFIG_REALTIME_ONLY)
  4377. static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags)
  4378. {
  4379. if (!cpi->common.refresh_alt_ref_frame)
  4380. vp8_second_pass(cpi);
  4381. encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
  4382. cpi->twopass.bits_left -= 8 * *size;
  4383. if (!cpi->common.refresh_alt_ref_frame)
  4384. {
  4385. double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth
  4386. *cpi->oxcf.two_pass_vbrmin_section / 100);
  4387. cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
  4388. }
  4389. }
  4390. #endif
  4391. int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time)
  4392. {
  4393. struct vpx_usec_timer timer;
  4394. int res = 0;
  4395. vpx_usec_timer_start(&timer);
  4396. /* Reinit the lookahead buffer if the frame size changes */
  4397. if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height)
  4398. {
  4399. assert(cpi->oxcf.lag_in_frames < 2);
  4400. dealloc_raw_frame_buffers(cpi);
  4401. alloc_raw_frame_buffers(cpi);
  4402. }
  4403. if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
  4404. frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL))
  4405. res = -1;
  4406. vpx_usec_timer_mark(&timer);
  4407. cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
  4408. return res;
  4409. }
  4410. static int frame_is_reference(const VP8_COMP *cpi)
  4411. {
  4412. const VP8_COMMON *cm = &cpi->common;
  4413. const MACROBLOCKD *xd = &cpi->mb.e_mbd;
  4414. return cm->frame_type == KEY_FRAME || cm->refresh_last_frame
  4415. || cm->refresh_golden_frame || cm->refresh_alt_ref_frame
  4416. || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf
  4417. || cm->refresh_entropy_probs
  4418. || xd->mode_ref_lf_delta_update
  4419. || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
  4420. }
  4421. int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush)
  4422. {
  4423. VP8_COMMON *cm;
  4424. struct vpx_usec_timer tsctimer;
  4425. struct vpx_usec_timer ticktimer;
  4426. struct vpx_usec_timer cmptimer;
  4427. YV12_BUFFER_CONFIG *force_src_buffer = NULL;
  4428. if (!cpi)
  4429. return -1;
  4430. cm = &cpi->common;
  4431. if (setjmp(cpi->common.error.jmp))
  4432. {
  4433. cpi->common.error.setjmp = 0;
  4434. vp8_clear_system_state();
  4435. return VPX_CODEC_CORRUPT_FRAME;
  4436. }
  4437. cpi->common.error.setjmp = 1;
  4438. vpx_usec_timer_start(&cmptimer);
  4439. cpi->source = NULL;
  4440. #if !(CONFIG_REALTIME_ONLY)
  4441. /* Should we code an alternate reference frame */
  4442. if (cpi->oxcf.error_resilient_mode == 0 &&
  4443. cpi->oxcf.play_alternate &&
  4444. cpi->source_alt_ref_pending)
  4445. {
  4446. if ((cpi->source = vp8_lookahead_peek(cpi->lookahead,
  4447. cpi->frames_till_gf_update_due,
  4448. PEEK_FORWARD)))
  4449. {
  4450. cpi->alt_ref_source = cpi->source;
  4451. if (cpi->oxcf.arnr_max_frames > 0)
  4452. {
  4453. vp8_temporal_filter_prepare_c(cpi,
  4454. cpi->frames_till_gf_update_due);
  4455. force_src_buffer = &cpi->alt_ref_buffer;
  4456. }
  4457. cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
  4458. cm->refresh_alt_ref_frame = 1;
  4459. cm->refresh_golden_frame = 0;
  4460. cm->refresh_last_frame = 0;
  4461. cm->show_frame = 0;
  4462. /* Clear Pending alt Ref flag. */
  4463. cpi->source_alt_ref_pending = 0;
  4464. cpi->is_src_frame_alt_ref = 0;
  4465. }
  4466. }
  4467. #endif
  4468. if (!cpi->source)
  4469. {
  4470. /* Read last frame source if we are encoding first pass. */
  4471. if (cpi->pass == 1 && cm->current_video_frame > 0)
  4472. {
  4473. if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1,
  4474. PEEK_BACKWARD)) == NULL)
  4475. return -1;
  4476. }
  4477. if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush)))
  4478. {
  4479. cm->show_frame = 1;
  4480. cpi->is_src_frame_alt_ref = cpi->alt_ref_source
  4481. && (cpi->source == cpi->alt_ref_source);
  4482. if(cpi->is_src_frame_alt_ref)
  4483. cpi->alt_ref_source = NULL;
  4484. }
  4485. }
  4486. if (cpi->source)
  4487. {
  4488. cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
  4489. cpi->un_scaled_source = cpi->Source;
  4490. *time_stamp = cpi->source->ts_start;
  4491. *time_end = cpi->source->ts_end;
  4492. *frame_flags = cpi->source->flags;
  4493. if (cpi->pass == 1 && cm->current_video_frame > 0)
  4494. {
  4495. cpi->last_frame_unscaled_source = &cpi->last_source->img;
  4496. }
  4497. }
  4498. else
  4499. {
  4500. *size = 0;
  4501. #if !(CONFIG_REALTIME_ONLY)
  4502. if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done)
  4503. {
  4504. vp8_end_first_pass(cpi); /* get last stats packet */
  4505. cpi->twopass.first_pass_done = 1;
  4506. }
  4507. #endif
  4508. return -1;
  4509. }
  4510. if (cpi->source->ts_start < cpi->first_time_stamp_ever)
  4511. {
  4512. cpi->first_time_stamp_ever = cpi->source->ts_start;
  4513. cpi->last_end_time_stamp_seen = cpi->source->ts_start;
  4514. }
  4515. /* adjust frame rates based on timestamps given */
  4516. if (cm->show_frame)
  4517. {
  4518. int64_t this_duration;
  4519. int step = 0;
  4520. if (cpi->source->ts_start == cpi->first_time_stamp_ever)
  4521. {
  4522. this_duration = cpi->source->ts_end - cpi->source->ts_start;
  4523. step = 1;
  4524. }
  4525. else
  4526. {
  4527. int64_t last_duration;
  4528. this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
  4529. last_duration = cpi->last_end_time_stamp_seen
  4530. - cpi->last_time_stamp_seen;
  4531. /* do a step update if the duration changes by 10% */
  4532. if (last_duration)
  4533. step = (int)(((this_duration - last_duration) *
  4534. 10 / last_duration));
  4535. }
  4536. if (this_duration)
  4537. {
  4538. if (step)
  4539. cpi->ref_framerate = 10000000.0 / this_duration;
  4540. else
  4541. {
  4542. double avg_duration, interval;
  4543. /* Average this frame's rate into the last second's average
  4544. * frame rate. If we haven't seen 1 second yet, then average
  4545. * over the whole interval seen.
  4546. */
  4547. interval = (double)(cpi->source->ts_end -
  4548. cpi->first_time_stamp_ever);
  4549. if(interval > 10000000.0)
  4550. interval = 10000000;
  4551. avg_duration = 10000000.0 / cpi->ref_framerate;
  4552. avg_duration *= (interval - avg_duration + this_duration);
  4553. avg_duration /= interval;
  4554. cpi->ref_framerate = 10000000.0 / avg_duration;
  4555. }
  4556. #if CONFIG_MULTI_RES_ENCODING
  4557. if (cpi->oxcf.mr_total_resolutions > 1) {
  4558. LOWER_RES_FRAME_INFO* low_res_frame_info = (LOWER_RES_FRAME_INFO*)
  4559. cpi->oxcf.mr_low_res_mode_info;
  4560. // Frame rate should be the same for all spatial layers in
  4561. // multi-res-encoding (simulcast), so we constrain the frame for
  4562. // higher layers to be that of lowest resolution. This is needed
  4563. // as he application may decide to skip encoding a high layer and
  4564. // then start again, in which case a big jump in time-stamps will
  4565. // be received for that high layer, which will yield an incorrect
  4566. // frame rate (from time-stamp adjustment in above calculation).
  4567. if (cpi->oxcf.mr_encoder_id) {
  4568. cpi->ref_framerate = low_res_frame_info->low_res_framerate;
  4569. }
  4570. else {
  4571. // Keep track of frame rate for lowest resolution.
  4572. low_res_frame_info->low_res_framerate = cpi->ref_framerate;
  4573. }
  4574. }
  4575. #endif
  4576. if (cpi->oxcf.number_of_layers > 1)
  4577. {
  4578. unsigned int i;
  4579. /* Update frame rates for each layer */
  4580. assert(cpi->oxcf.number_of_layers <= VPX_TS_MAX_LAYERS);
  4581. for (i = 0; i < cpi->oxcf.number_of_layers &&
  4582. i < VPX_TS_MAX_LAYERS; ++i)
  4583. {
  4584. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  4585. lc->framerate = cpi->ref_framerate /
  4586. cpi->oxcf.rate_decimator[i];
  4587. }
  4588. }
  4589. else
  4590. vp8_new_framerate(cpi, cpi->ref_framerate);
  4591. }
  4592. cpi->last_time_stamp_seen = cpi->source->ts_start;
  4593. cpi->last_end_time_stamp_seen = cpi->source->ts_end;
  4594. }
  4595. if (cpi->oxcf.number_of_layers > 1)
  4596. {
  4597. int layer;
  4598. update_layer_contexts (cpi);
  4599. /* Restore layer specific context & set frame rate */
  4600. if (cpi->temporal_layer_id >= 0) {
  4601. layer = cpi->temporal_layer_id;
  4602. } else {
  4603. layer = cpi->oxcf.layer_id[
  4604. cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
  4605. }
  4606. restore_layer_context (cpi, layer);
  4607. vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
  4608. }
  4609. if (cpi->compressor_speed == 2)
  4610. {
  4611. vpx_usec_timer_start(&tsctimer);
  4612. vpx_usec_timer_start(&ticktimer);
  4613. }
  4614. cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs;
  4615. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  4616. {
  4617. int i;
  4618. const int num_part = (1 << cm->multi_token_partition);
  4619. /* the available bytes in dest */
  4620. const unsigned long dest_size = dest_end - dest;
  4621. const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
  4622. unsigned char *dp = dest;
  4623. cpi->partition_d[0] = dp;
  4624. dp += dest_size/10; /* reserve 1/10 for control partition */
  4625. cpi->partition_d_end[0] = dp;
  4626. for(i = 0; i < num_part; i++)
  4627. {
  4628. cpi->partition_d[i + 1] = dp;
  4629. dp += tok_part_buff_size;
  4630. cpi->partition_d_end[i + 1] = dp;
  4631. }
  4632. }
  4633. #endif
  4634. /* start with a 0 size frame */
  4635. *size = 0;
  4636. /* Clear down mmx registers */
  4637. vp8_clear_system_state();
  4638. cm->frame_type = INTER_FRAME;
  4639. cm->frame_flags = *frame_flags;
  4640. #if 0
  4641. if (cm->refresh_alt_ref_frame)
  4642. {
  4643. cm->refresh_golden_frame = 0;
  4644. cm->refresh_last_frame = 0;
  4645. }
  4646. else
  4647. {
  4648. cm->refresh_golden_frame = 0;
  4649. cm->refresh_last_frame = 1;
  4650. }
  4651. #endif
  4652. /* find a free buffer for the new frame */
  4653. {
  4654. int i = 0;
  4655. for(; i < NUM_YV12_BUFFERS; i++)
  4656. {
  4657. if(!cm->yv12_fb[i].flags)
  4658. {
  4659. cm->new_fb_idx = i;
  4660. break;
  4661. }
  4662. }
  4663. assert(i < NUM_YV12_BUFFERS );
  4664. }
  4665. #if !(CONFIG_REALTIME_ONLY)
  4666. if (cpi->pass == 1)
  4667. {
  4668. Pass1Encode(cpi, size, dest, frame_flags);
  4669. }
  4670. else if (cpi->pass == 2)
  4671. {
  4672. Pass2Encode(cpi, size, dest, dest_end, frame_flags);
  4673. }
  4674. else
  4675. #endif
  4676. encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
  4677. if (cpi->compressor_speed == 2)
  4678. {
  4679. unsigned int duration, duration2;
  4680. vpx_usec_timer_mark(&tsctimer);
  4681. vpx_usec_timer_mark(&ticktimer);
  4682. duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
  4683. duration2 = (unsigned int)((double)duration / 2);
  4684. if (cm->frame_type != KEY_FRAME)
  4685. {
  4686. if (cpi->avg_encode_time == 0)
  4687. cpi->avg_encode_time = duration;
  4688. else
  4689. cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
  4690. }
  4691. if (duration2)
  4692. {
  4693. {
  4694. if (cpi->avg_pick_mode_time == 0)
  4695. cpi->avg_pick_mode_time = duration2;
  4696. else
  4697. cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3;
  4698. }
  4699. }
  4700. }
  4701. if (cm->refresh_entropy_probs == 0)
  4702. {
  4703. memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
  4704. }
  4705. /* Save the contexts separately for alt ref, gold and last. */
  4706. /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
  4707. if(cm->refresh_alt_ref_frame)
  4708. memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
  4709. if(cm->refresh_golden_frame)
  4710. memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
  4711. if(cm->refresh_last_frame)
  4712. memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
  4713. /* if its a dropped frame honor the requests on subsequent frames */
  4714. if (*size > 0)
  4715. {
  4716. cpi->droppable = !frame_is_reference(cpi);
  4717. /* return to normal state */
  4718. cm->refresh_entropy_probs = 1;
  4719. cm->refresh_alt_ref_frame = 0;
  4720. cm->refresh_golden_frame = 0;
  4721. cm->refresh_last_frame = 1;
  4722. cm->frame_type = INTER_FRAME;
  4723. }
  4724. /* Save layer specific state */
  4725. if (cpi->oxcf.number_of_layers > 1)
  4726. save_layer_context (cpi);
  4727. vpx_usec_timer_mark(&cmptimer);
  4728. cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
  4729. if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame)
  4730. {
  4731. generate_psnr_packet(cpi);
  4732. }
  4733. #if CONFIG_INTERNAL_STATS
  4734. if (cpi->pass != 1)
  4735. {
  4736. cpi->bytes += *size;
  4737. if (cm->show_frame)
  4738. {
  4739. cpi->common.show_frame_mi = cpi->common.mi;
  4740. cpi->count ++;
  4741. if (cpi->b_calculate_psnr)
  4742. {
  4743. uint64_t ye,ue,ve;
  4744. double frame_psnr;
  4745. YV12_BUFFER_CONFIG *orig = cpi->Source;
  4746. YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
  4747. unsigned int y_width = cpi->common.Width;
  4748. unsigned int y_height = cpi->common.Height;
  4749. unsigned int uv_width = (y_width + 1) / 2;
  4750. unsigned int uv_height = (y_height + 1) / 2;
  4751. int y_samples = y_height * y_width;
  4752. int uv_samples = uv_height * uv_width;
  4753. int t_samples = y_samples + 2 * uv_samples;
  4754. double sq_error;
  4755. ye = calc_plane_error(orig->y_buffer, orig->y_stride,
  4756. recon->y_buffer, recon->y_stride, y_width, y_height);
  4757. ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
  4758. recon->u_buffer, recon->uv_stride, uv_width, uv_height);
  4759. ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
  4760. recon->v_buffer, recon->uv_stride, uv_width, uv_height);
  4761. sq_error = (double)(ye + ue + ve);
  4762. frame_psnr = vpx_sse_to_psnr(t_samples, 255.0, sq_error);
  4763. cpi->total_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
  4764. cpi->total_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
  4765. cpi->total_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
  4766. cpi->total_sq_error += sq_error;
  4767. cpi->total += frame_psnr;
  4768. #if CONFIG_POSTPROC
  4769. {
  4770. YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
  4771. double sq_error2;
  4772. double frame_psnr2, frame_ssim2 = 0;
  4773. double weight = 0;
  4774. vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0);
  4775. vp8_clear_system_state();
  4776. ye = calc_plane_error(orig->y_buffer, orig->y_stride,
  4777. pp->y_buffer, pp->y_stride, y_width, y_height);
  4778. ue = calc_plane_error(orig->u_buffer, orig->uv_stride,
  4779. pp->u_buffer, pp->uv_stride, uv_width, uv_height);
  4780. ve = calc_plane_error(orig->v_buffer, orig->uv_stride,
  4781. pp->v_buffer, pp->uv_stride, uv_width, uv_height);
  4782. sq_error2 = (double)(ye + ue + ve);
  4783. frame_psnr2 = vpx_sse_to_psnr(t_samples, 255.0, sq_error2);
  4784. cpi->totalp_y += vpx_sse_to_psnr(y_samples,
  4785. 255.0, (double)ye);
  4786. cpi->totalp_u += vpx_sse_to_psnr(uv_samples,
  4787. 255.0, (double)ue);
  4788. cpi->totalp_v += vpx_sse_to_psnr(uv_samples,
  4789. 255.0, (double)ve);
  4790. cpi->total_sq_error2 += sq_error2;
  4791. cpi->totalp += frame_psnr2;
  4792. frame_ssim2 = vp8_calc_ssim(cpi->Source,
  4793. &cm->post_proc_buffer, 1, &weight);
  4794. cpi->summed_quality += frame_ssim2 * weight;
  4795. cpi->summed_weights += weight;
  4796. if (cpi->oxcf.number_of_layers > 1)
  4797. {
  4798. unsigned int i;
  4799. for (i=cpi->current_layer;
  4800. i<cpi->oxcf.number_of_layers; i++)
  4801. {
  4802. cpi->frames_in_layer[i]++;
  4803. cpi->bytes_in_layer[i] += *size;
  4804. cpi->sum_psnr[i] += frame_psnr;
  4805. cpi->sum_psnr_p[i] += frame_psnr2;
  4806. cpi->total_error2[i] += sq_error;
  4807. cpi->total_error2_p[i] += sq_error2;
  4808. cpi->sum_ssim[i] += frame_ssim2 * weight;
  4809. cpi->sum_weights[i] += weight;
  4810. }
  4811. }
  4812. }
  4813. #endif
  4814. }
  4815. if (cpi->b_calculate_ssimg)
  4816. {
  4817. double y, u, v, frame_all;
  4818. frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show,
  4819. &y, &u, &v);
  4820. if (cpi->oxcf.number_of_layers > 1)
  4821. {
  4822. unsigned int i;
  4823. for (i=cpi->current_layer;
  4824. i<cpi->oxcf.number_of_layers; i++)
  4825. {
  4826. if (!cpi->b_calculate_psnr)
  4827. cpi->frames_in_layer[i]++;
  4828. cpi->total_ssimg_y_in_layer[i] += y;
  4829. cpi->total_ssimg_u_in_layer[i] += u;
  4830. cpi->total_ssimg_v_in_layer[i] += v;
  4831. cpi->total_ssimg_all_in_layer[i] += frame_all;
  4832. }
  4833. }
  4834. else
  4835. {
  4836. cpi->total_ssimg_y += y;
  4837. cpi->total_ssimg_u += u;
  4838. cpi->total_ssimg_v += v;
  4839. cpi->total_ssimg_all += frame_all;
  4840. }
  4841. }
  4842. }
  4843. }
  4844. #if 0
  4845. if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
  4846. {
  4847. skiptruecount += cpi->skip_true_count;
  4848. skipfalsecount += cpi->skip_false_count;
  4849. }
  4850. #endif
  4851. #if 0
  4852. if (cpi->pass != 1)
  4853. {
  4854. FILE *f = fopen("skip.stt", "a");
  4855. fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
  4856. if (cpi->is_src_frame_alt_ref == 1)
  4857. fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
  4858. fclose(f);
  4859. }
  4860. #endif
  4861. #endif
  4862. cpi->common.error.setjmp = 0;
  4863. return 0;
  4864. }
  4865. int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags)
  4866. {
  4867. if (cpi->common.refresh_alt_ref_frame)
  4868. return -1;
  4869. else
  4870. {
  4871. int ret;
  4872. #if CONFIG_MULTITHREAD
  4873. if(cpi->b_lpf_running)
  4874. {
  4875. sem_wait(&cpi->h_event_end_lpf);
  4876. cpi->b_lpf_running = 0;
  4877. }
  4878. #endif
  4879. #if CONFIG_POSTPROC
  4880. cpi->common.show_frame_mi = cpi->common.mi;
  4881. ret = vp8_post_proc_frame(&cpi->common, dest, flags);
  4882. #else
  4883. (void)flags;
  4884. if (cpi->common.frame_to_show)
  4885. {
  4886. *dest = *cpi->common.frame_to_show;
  4887. dest->y_width = cpi->common.Width;
  4888. dest->y_height = cpi->common.Height;
  4889. dest->uv_height = cpi->common.Height / 2;
  4890. ret = 0;
  4891. }
  4892. else
  4893. {
  4894. ret = -1;
  4895. }
  4896. #endif
  4897. vp8_clear_system_state();
  4898. return ret;
  4899. }
  4900. }
  4901. int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4])
  4902. {
  4903. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  4904. int internal_delta_q[MAX_MB_SEGMENTS];
  4905. const int range = 63;
  4906. int i;
  4907. // This method is currently incompatible with the cyclic refresh method
  4908. if ( cpi->cyclic_refresh_mode_enabled )
  4909. return -1;
  4910. // Check number of rows and columns match
  4911. if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols)
  4912. return -1;
  4913. // Range check the delta Q values and convert the external Q range values
  4914. // to internal ones.
  4915. if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
  4916. (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) )
  4917. return -1;
  4918. // Range check the delta lf values
  4919. if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
  4920. (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) )
  4921. return -1;
  4922. if (!map)
  4923. {
  4924. disable_segmentation(cpi);
  4925. return 0;
  4926. }
  4927. // Translate the external delta q values to internal values.
  4928. for ( i = 0; i < MAX_MB_SEGMENTS; i++ )
  4929. internal_delta_q[i] =
  4930. ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
  4931. /* Set the segmentation Map */
  4932. set_segmentation_map(cpi, map);
  4933. /* Activate segmentation. */
  4934. enable_segmentation(cpi);
  4935. /* Set up the quant segment data */
  4936. feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
  4937. feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
  4938. feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
  4939. feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
  4940. /* Set up the loop segment data s */
  4941. feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
  4942. feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
  4943. feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
  4944. feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
  4945. cpi->segment_encode_breakout[0] = threshold[0];
  4946. cpi->segment_encode_breakout[1] = threshold[1];
  4947. cpi->segment_encode_breakout[2] = threshold[2];
  4948. cpi->segment_encode_breakout[3] = threshold[3];
  4949. /* Initialise the feature data structure */
  4950. set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  4951. return 0;
  4952. }
  4953. int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols)
  4954. {
  4955. if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols)
  4956. {
  4957. if (map)
  4958. {
  4959. memcpy(cpi->active_map, map, rows * cols);
  4960. cpi->active_map_enabled = 1;
  4961. }
  4962. else
  4963. cpi->active_map_enabled = 0;
  4964. return 0;
  4965. }
  4966. else
  4967. {
  4968. return -1 ;
  4969. }
  4970. }
  4971. int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode)
  4972. {
  4973. if (horiz_mode <= ONETWO)
  4974. cpi->common.horiz_scale = horiz_mode;
  4975. else
  4976. return -1;
  4977. if (vert_mode <= ONETWO)
  4978. cpi->common.vert_scale = vert_mode;
  4979. else
  4980. return -1;
  4981. return 0;
  4982. }
  4983. int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest)
  4984. {
  4985. int i, j;
  4986. int Total = 0;
  4987. unsigned char *src = source->y_buffer;
  4988. unsigned char *dst = dest->y_buffer;
  4989. /* Loop through the Y plane raw and reconstruction data summing
  4990. * (square differences)
  4991. */
  4992. for (i = 0; i < source->y_height; i += 16)
  4993. {
  4994. for (j = 0; j < source->y_width; j += 16)
  4995. {
  4996. unsigned int sse;
  4997. Total += vpx_mse16x16(src + j, source->y_stride,
  4998. dst + j, dest->y_stride, &sse);
  4999. }
  5000. src += 16 * source->y_stride;
  5001. dst += 16 * dest->y_stride;
  5002. }
  5003. return Total;
  5004. }
  5005. int vp8_get_quantizer(VP8_COMP *cpi)
  5006. {
  5007. return cpi->common.base_qindex;
  5008. }