Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

encantar.js 1003KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545255462554725548255492555025551255522555325554255552555625557255582555925560255612556225563255642556525566255672556825569255702557125572255732557425575255762557725578255792558025581255822558325584255852558625587255882558925590255912559225593255942559525596255972559825599256002560125602256032560425605256062560725608256092561025611256122561325614256152561625617256182561925620256212562225623256242562525626256272562825629256302563125632256332563425635256362563725638256392564025641256422564325644256452564625647256482564925650256512565225653256542565525656256572565825659256602566125662256632566425665256662566725668256692567025671256722567325674256752567625677256782567925680256812568225683256842568525686256872568825689256902569125692256932569425695256962569725698256992570025701257022570325704257052570625707257082570925710257112571225713257142571525716257172571825719257202572125722257232572425725257262572725728257292573025731257322573325734257352573625737257382573925740257412574225743257442574525746257472574825749257502575125752257532575425755257562575725758257592576025761257622576325764257652576625767257682576925770257712577225773257742577525776257772577825779257802578125782257832578425785257862578725788257892579025791257922579325794257952579625797257982579925800258012580225803258042580525806258072580825809258102581125812258132581425815258162581725818258192582025821258222582325824258252582625827258282582925830258312583225833258342583525836258372583825839258402584125842258432584425845258462584725848258492585025851258522585325854258552585625857258582585925860258612586225863258642586525866258672586825869258702587125872258732587425875258762587725878258792588025881258822588325884258852588625887258882588925890258912589225893258942589525896258972589825899259002590125902259032590425905259062590725908259092591025911259122591325914259152591625917259182591925920259212592225923259242592525926259272592825929259302593125932259332593425935259362593725938259392594025941259422594325944259452594625947259482594925950259512595225953259542595525956259572595825959259602596125962259632596425965259662596725968259692597025971259722597325974259752597625977259782597925980259812598225983259842598525986259872598825989259902599125992259932599425995259962599725998259992600026001260022600326004260052600626007260082600926010260112601226013260142601526016260172601826019260202602126022260232602426025260262602726028260292603026031260322603326034260352603626037260382603926040260412604226043260442604526046260472604826049260502605126052260532605426055260562605726058260592606026061260622606326064260652606626067260682606926070260712607226073260742607526076260772607826079260802608126082260832608426085260862608726088260892609026091
  1. /*!
  2. * encantAR.js version 0.3.0
  3. * GPU-accelerated Augmented Reality for the web
  4. * Copyright 2022-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  5. * https://github.com/alemart/encantar-js
  6. *
  7. * @license LGPL-3.0-or-later
  8. * Date: 2024-09-02T19:15:04.498Z
  9. */
  10. (function webpackUniversalModuleDefinition(root, factory) {
  11. if(typeof exports === 'object' && typeof module === 'object')
  12. module.exports = factory();
  13. else if(typeof define === 'function' && define.amd)
  14. define([], factory);
  15. else if(typeof exports === 'object')
  16. exports["AR"] = factory();
  17. else
  18. root["AR"] = factory();
  19. })(self, () => {
  20. return /******/ (() => { // webpackBootstrap
  21. /******/ var __webpack_modules__ = ({
  22. /***/ 774:
  23. /***/ ((module) => {
  24. /*!
  25. * Speedy Vision version 0.9.1
  26. * GPU-accelerated Computer Vision for JavaScript
  27. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com> (https://github.com/alemart)
  28. * https://github.com/alemart/speedy-vision
  29. *
  30. * @license Apache-2.0
  31. * Date: 2024-07-03T02:16:25.769Z
  32. */
  33. (function webpackUniversalModuleDefinition(root, factory) {
  34. if(true)
  35. module.exports = factory();
  36. else {}
  37. })(self, () => {
  38. return /******/ (() => { // webpackBootstrap
  39. /******/ var __webpack_modules__ = ({
  40. /***/ 2199:
  41. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_791__) => {
  42. "use strict";
  43. /* harmony export */ __nested_webpack_require_791__.d(__nested_webpack_exports__, {
  44. /* harmony export */ w: () => (/* binding */ Settings)
  45. /* harmony export */ });
  46. /* harmony import */ var _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_791__(6634);
  47. /* harmony import */ var _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_791__(1001);
  48. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_791__(9037);
  49. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_791__(8581);
  50. /*
  51. * speedy-vision.js
  52. * GPU-accelerated Computer Vision for JavaScript
  53. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  54. *
  55. * Licensed under the Apache License, Version 2.0 (the "License");
  56. * you may not use this file except in compliance with the License.
  57. * You may obtain a copy of the License at
  58. *
  59. * http://www.apache.org/licenses/LICENSE-2.0
  60. *
  61. * Unless required by applicable law or agreed to in writing, software
  62. * distributed under the License is distributed on an "AS IS" BASIS,
  63. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  64. * See the License for the specific language governing permissions and
  65. * limitations under the License.
  66. *
  67. * settings.js
  68. * Global settings
  69. */
  70. /** @typedef {import('../gpu/speedy-gl').PowerPreference} PowerPreference */
  71. /** @typedef {"raf" | "asap"} GPUPollingMode */
  72. /** @typedef {"default" | "none" | "diagnostic"} LoggingMode */
  73. /** @type {GPUPollingMode} Default GPU polling mode */
  74. const DEFAULT_GPU_POLLING_MODE = 'raf';
  75. /** @type {GPUPollingMode} GPU polling mode */
  76. let gpuPollingMode = DEFAULT_GPU_POLLING_MODE;
  77. /** @type {LoggingMode} logging mode */
  78. let loggingMode = 'default';
  79. /**
  80. * Global settings
  81. */
  82. class Settings extends _speedy_namespace__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyNamespace */ .Q {
  83. /**
  84. * Power preference of the WebGL context
  85. * @returns {PowerPreference}
  86. */
  87. static get powerPreference() {
  88. return _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference;
  89. }
  90. /**
  91. * Power preference of the WebGL context
  92. * @param {PowerPreference} value
  93. */
  94. static set powerPreference(value) {
  95. _gpu_speedy_gl__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyGL */ .c.powerPreference = value;
  96. }
  97. /**
  98. * GPU polling mode
  99. * @returns {GPUPollingMode}
  100. */
  101. static get gpuPollingMode() {
  102. return gpuPollingMode;
  103. }
  104. /**
  105. * GPU polling mode
  106. * @param {GPUPollingMode} value
  107. */
  108. static set gpuPollingMode(value) {
  109. if (value !== 'raf' && value !== 'asap') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid GPU polling mode: "${value}"`);
  110. gpuPollingMode = value;
  111. }
  112. /**
  113. * Logging mode
  114. * @returns {LoggingMode}
  115. */
  116. static get logging() {
  117. return loggingMode;
  118. }
  119. /**
  120. * Logging mode
  121. * @param {LoggingMode} mode
  122. */
  123. static set logging(mode) {
  124. if (mode !== 'default' && mode !== 'none' && mode !== 'diagnostic') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid logging mode: "${mode}"`);else if (mode === 'diagnostic') _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log('%c DIAGNOSTIC MODE ', 'background:red;color:white;font-size:36pt;font-weight:bold');
  125. loggingMode = mode;
  126. }
  127. }
  128. /***/ }),
  129. /***/ 6306:
  130. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_4248__) => {
  131. "use strict";
  132. /* harmony export */ __nested_webpack_require_4248__.d(__nested_webpack_exports__, {
  133. /* harmony export */ r: () => (/* binding */ SpeedyMatrixExpr)
  134. /* harmony export */ });
  135. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_4248__(6465);
  136. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_4248__(9037);
  137. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_4248__(8581);
  138. /*
  139. * speedy-vision.js
  140. * GPU-accelerated Computer Vision for JavaScript
  141. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  142. *
  143. * Licensed under the Apache License, Version 2.0 (the "License");
  144. * you may not use this file except in compliance with the License.
  145. * You may obtain a copy of the License at
  146. *
  147. * http://www.apache.org/licenses/LICENSE-2.0
  148. *
  149. * Unless required by applicable law or agreed to in writing, software
  150. * distributed under the License is distributed on an "AS IS" BASIS,
  151. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  152. * See the License for the specific language governing permissions and
  153. * limitations under the License.
  154. *
  155. * speedy-matrix-expr.js
  156. * Symbolic matrix expressions
  157. */
  158. /** @typedef {import('./speedy-matrix').SpeedyMatrixDtype} SpeedyMatrixDtype */
  159. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferType} SpeedyMatrixBufferType */
  160. /** @typedef {import('./speedy-matrix').SpeedyMatrixBufferTypeConstructor} SpeedyMatrixBufferTypeConstructor */
  161. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  162. /** @typedef {Object<SpeedyMatrixDtype,SpeedyMatrixBufferTypeConstructor>} Dtype2BufferType */
  163. /** @const {Dtype2BufferType} */
  164. const DTYPE_TO_BUFFER_TYPE = Object.freeze({
  165. 'float32': Float32Array
  166. });
  167. /**
  168. * @abstract Matrix expression
  169. * It's an opaque object representing an algebraic
  170. * expression. It has no data attached to it.
  171. */
  172. class SpeedyMatrixExpr {
  173. /**
  174. * Constructor
  175. * @param {number} rows
  176. * @param {number} columns
  177. * @param {SpeedyMatrixDtype} dtype
  178. */
  179. constructor(rows, columns, dtype) {
  180. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(rows > 0 && columns > 0);
  181. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(dtype === SpeedyMatrixExpr.DEFAULT_DTYPE); // we only support float32 for now
  182. /** @type {number} number of rows */
  183. this._rows = rows | 0;
  184. /** @type {number} number of columns */
  185. this._columns = columns | 0;
  186. /** @type {SpeedyMatrixDtype} data type */
  187. this._dtype = dtype;
  188. }
  189. /**
  190. * Number of rows
  191. * @returns {number}
  192. */
  193. get rows() {
  194. return this._rows;
  195. }
  196. /**
  197. * Number of columns
  198. * @returns {number}
  199. */
  200. get columns() {
  201. return this._columns;
  202. }
  203. /**
  204. * Data type
  205. * @returns {SpeedyMatrixDtype}
  206. */
  207. get dtype() {
  208. return this._dtype;
  209. }
  210. /**
  211. * Default data type
  212. * @returns {SpeedyMatrixDtype}
  213. */
  214. static get DEFAULT_DTYPE() {
  215. return 'float32';
  216. }
  217. /**
  218. * Buffer types
  219. * @returns {Dtype2BufferType}
  220. */
  221. static get BUFFER_TYPE() {
  222. return DTYPE_TO_BUFFER_TYPE;
  223. }
  224. /**
  225. * Matrix addition
  226. * @param {SpeedyMatrixExpr} expr
  227. * @returns {SpeedyMatrixExpr}
  228. */
  229. plus(expr) {
  230. return new SpeedyMatrixAddExpr(this, expr);
  231. }
  232. /**
  233. * Matrix subtraction
  234. * @param {SpeedyMatrixExpr} expr
  235. * @returns {SpeedyMatrixExpr}
  236. */
  237. minus(expr) {
  238. return new SpeedyMatrixSubtractExpr(this, expr);
  239. }
  240. /**
  241. * Matrix multiplication
  242. * @param {SpeedyMatrixExpr|number} expr
  243. * @returns {SpeedyMatrixExpr}
  244. */
  245. times(expr) {
  246. if (typeof expr === 'number') return new SpeedyMatrixScaleExpr(this, expr);else return new SpeedyMatrixMultiplyExpr(this, expr);
  247. }
  248. /**
  249. * Matrix transposition
  250. * @returns {SpeedyMatrixExpr}
  251. */
  252. transpose() {
  253. return new SpeedyMatrixTransposeExpr(this);
  254. }
  255. /**
  256. * Matrix inversion
  257. * @returns {SpeedyMatrixExpr}
  258. */
  259. inverse() {
  260. return new SpeedyMatrixInvertExpr(this);
  261. }
  262. /**
  263. * Component-wise multiplication
  264. * @param {SpeedyMatrixExpr} expr
  265. * @returns {SpeedyMatrixExpr}
  266. */
  267. compMult(expr) {
  268. return new SpeedyMatrixCompMultExpr(this, expr);
  269. }
  270. /**
  271. * Left division: A \ b, which is equivalent to (pseudo-)inverse(A) * b
  272. * @param {SpeedyMatrixExpr} expr
  273. * @returns {SpeedyMatrixExpr}
  274. */
  275. ldiv(expr) {
  276. return new SpeedyMatrixLdivExpr(this, expr);
  277. }
  278. /**
  279. * Returns a human-readable string representation of the matrix expression
  280. * @returns {string}
  281. */
  282. toString() {
  283. return `SpeedyMatrixExpr(rows=${this.rows}, columns=${this.columns})`;
  284. }
  285. /**
  286. * Evaluate this expression
  287. * @abstract
  288. * @param {WebAssembly.Instance} wasm
  289. * @param {SpeedyMatrixWASMMemory} memory
  290. * @returns {SpeedyMatrix}
  291. */
  292. _evaluate(wasm, memory) {
  293. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  294. }
  295. }
  296. const {
  297. SpeedyMatrix
  298. } = __nested_webpack_require_4248__(4188);
  299. /**
  300. * @abstract operation storing a temporary matrix
  301. */
  302. class SpeedyMatrixTempExpr extends SpeedyMatrixExpr {
  303. /**
  304. * Constructor
  305. * @param {number} rows
  306. * @param {number} columns
  307. * @param {SpeedyMatrixDtype} dtype
  308. */
  309. constructor(rows, columns, dtype) {
  310. super(rows, columns, dtype);
  311. /** @type {SpeedyMatrix} holds the results of a computation */
  312. this._tempMatrix = SpeedyMatrix.Zeros(this.rows, this.columns, this.dtype);
  313. }
  314. }
  315. /**
  316. * @abstract unary operation
  317. */
  318. class SpeedyMatrixUnaryOperationExpr extends SpeedyMatrixTempExpr {
  319. /**
  320. * Constructor
  321. * @param {number} rows rows of the output matrix
  322. * @param {number} columns columns of the output matrix
  323. * @param {SpeedyMatrixExpr} operand
  324. */
  325. constructor(rows, columns, operand) {
  326. super(rows, columns, operand.dtype);
  327. /** @type {SpeedyMatrixExpr} operand */
  328. this._operand = operand;
  329. }
  330. /**
  331. * Evaluate this expression
  332. * @param {WebAssembly.Instance} wasm
  333. * @param {SpeedyMatrixWASMMemory} memory
  334. * @returns {SpeedyMatrix}
  335. */
  336. _evaluate(wasm, memory) {
  337. const operand = this._operand._evaluate(wasm, memory);
  338. const result = this._tempMatrix;
  339. // allocate matrices
  340. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  341. const operandptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, operand);
  342. // copy operand to WASM memory
  343. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, operandptr, operand);
  344. // run the WASM routine
  345. this._compute(wasm, memory, resultptr, operandptr);
  346. // copy result from WASM memory
  347. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  348. // deallocate matrices
  349. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, operandptr);
  350. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  351. // done!
  352. return result;
  353. }
  354. /**
  355. * Compute the result of this operation
  356. * @abstract
  357. * @param {WebAssembly.Instance} wasm
  358. * @param {SpeedyMatrixWASMMemory} memory
  359. * @param {number} resultptr pointer to Mat32
  360. * @param {number} operandptr pointer to Mat32
  361. */
  362. _compute(wasm, memory, resultptr, operandptr) {
  363. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  364. }
  365. }
  366. /**
  367. * @abstract binary operation
  368. */
  369. class SpeedyMatrixBinaryOperationExpr extends SpeedyMatrixTempExpr {
  370. /**
  371. * Constructor
  372. * @param {number} rows rows of the output matrix
  373. * @param {number} columns columns of the output matrix
  374. * @param {SpeedyMatrixExpr} left left operand
  375. * @param {SpeedyMatrixExpr} right right operand
  376. */
  377. constructor(rows, columns, left, right) {
  378. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.dtype === right.dtype);
  379. super(rows, columns, left.dtype);
  380. /** @type {SpeedyMatrixExpr} left operand */
  381. this._left = left;
  382. /** @type {SpeedyMatrixExpr} right operand */
  383. this._right = right;
  384. }
  385. /**
  386. * Evaluate this expression
  387. * @param {WebAssembly.Instance} wasm
  388. * @param {SpeedyMatrixWASMMemory} memory
  389. * @returns {SpeedyMatrix}
  390. */
  391. _evaluate(wasm, memory) {
  392. const left = this._left._evaluate(wasm, memory);
  393. const right = this._right._evaluate(wasm, memory);
  394. const result = this._tempMatrix;
  395. // allocate matrices
  396. const resultptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, result);
  397. const leftptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, left);
  398. const rightptr = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.allocateMat32(wasm, memory, right);
  399. // copy input matrices to WASM memory
  400. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, leftptr, left);
  401. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyToMat32(wasm, memory, rightptr, right);
  402. // run the WASM routine
  403. this._compute(wasm, memory, resultptr, leftptr, rightptr);
  404. // copy output matrix from WASM memory
  405. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.copyFromMat32(wasm, memory, resultptr, result);
  406. // deallocate matrices
  407. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, rightptr);
  408. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, leftptr);
  409. _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixWASM */ .U.deallocateMat32(wasm, memory, resultptr);
  410. // done!
  411. return result;
  412. }
  413. /**
  414. * Compute the result of this operation
  415. * @abstract
  416. * @param {WebAssembly.Instance} wasm
  417. * @param {SpeedyMatrixWASMMemory} memory
  418. * @param {number} resultptr pointer to Mat32
  419. * @param {number} leftptr pointer to Mat32
  420. * @param {number} rightptr pointer to Mat32
  421. */
  422. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  423. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .AbstractMethodError */ .aQ();
  424. }
  425. }
  426. /**
  427. * Transpose matrix
  428. */
  429. class SpeedyMatrixTransposeExpr extends SpeedyMatrixUnaryOperationExpr {
  430. /**
  431. * Constructor
  432. * @param {SpeedyMatrixExpr} operand
  433. */
  434. constructor(operand) {
  435. super(operand.columns, operand.rows, operand);
  436. }
  437. /**
  438. * Compute result = operand^T
  439. * @param {WebAssembly.Instance} wasm
  440. * @param {SpeedyMatrixWASMMemory} memory
  441. * @param {number} resultptr pointer to Mat32
  442. * @param {number} operandptr pointer to Mat32
  443. */
  444. _compute(wasm, memory, resultptr, operandptr) {
  445. wasm.exports.Mat32_transpose(resultptr, operandptr);
  446. }
  447. }
  448. /**
  449. * Invert square matrix
  450. */
  451. class SpeedyMatrixInvertExpr extends SpeedyMatrixUnaryOperationExpr {
  452. /**
  453. * Constructor
  454. * @param {SpeedyMatrixExpr} operand
  455. */
  456. constructor(operand) {
  457. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(operand.rows === operand.columns);
  458. super(operand.rows, operand.columns, operand);
  459. /** @type {number} size of the matrix */
  460. this._size = operand.rows;
  461. }
  462. /**
  463. * Compute result = operand ^ (-1)
  464. * @param {WebAssembly.Instance} wasm
  465. * @param {SpeedyMatrixWASMMemory} memory
  466. * @param {number} resultptr pointer to Mat32
  467. * @param {number} operandptr pointer to Mat32
  468. */
  469. _compute(wasm, memory, resultptr, operandptr) {
  470. switch (this._size) {
  471. case 0:
  472. break;
  473. case 1:
  474. wasm.exports.Mat32_inverse1(resultptr, operandptr);
  475. break;
  476. case 2:
  477. wasm.exports.Mat32_inverse2(resultptr, operandptr);
  478. break;
  479. case 3:
  480. wasm.exports.Mat32_inverse3(resultptr, operandptr);
  481. break;
  482. default:
  483. wasm.exports.Mat32_qr_inverse(resultptr, operandptr);
  484. break;
  485. }
  486. }
  487. }
  488. /**
  489. * Multiply matrix by a scalar value
  490. */
  491. class SpeedyMatrixScaleExpr extends SpeedyMatrixUnaryOperationExpr {
  492. /**
  493. * Constructor
  494. * @param {SpeedyMatrixExpr} operand
  495. * @param {number} scalar
  496. */
  497. constructor(operand, scalar) {
  498. super(operand.rows, operand.columns, operand);
  499. /** @type {number} scalar value */
  500. this._scalar = +scalar;
  501. }
  502. /**
  503. * Compute result = scalar * operand
  504. * @param {WebAssembly.Instance} wasm
  505. * @param {SpeedyMatrixWASMMemory} memory
  506. * @param {number} resultptr pointer to Mat32
  507. * @param {number} operandptr pointer to Mat32
  508. */
  509. _compute(wasm, memory, resultptr, operandptr) {
  510. wasm.exports.Mat32_scale(resultptr, operandptr, this._scalar);
  511. }
  512. }
  513. /**
  514. * Matrix addition
  515. */
  516. class SpeedyMatrixAddExpr extends SpeedyMatrixBinaryOperationExpr {
  517. /**
  518. * Constructor
  519. * @param {SpeedyMatrixExpr} left left operand
  520. * @param {SpeedyMatrixExpr} right right operand
  521. */
  522. constructor(left, right) {
  523. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  524. super(left.rows, left.columns, left, right);
  525. }
  526. /**
  527. * Compute result = left + right
  528. * @param {WebAssembly.Instance} wasm
  529. * @param {SpeedyMatrixWASMMemory} memory
  530. * @param {number} resultptr pointer to Mat32
  531. * @param {number} leftptr pointer to Mat32
  532. * @param {number} rightptr pointer to Mat32
  533. */
  534. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  535. wasm.exports.Mat32_add(resultptr, leftptr, rightptr);
  536. }
  537. }
  538. /**
  539. * Matrix subtraction
  540. */
  541. class SpeedyMatrixSubtractExpr extends SpeedyMatrixBinaryOperationExpr {
  542. /**
  543. * Constructor
  544. * @param {SpeedyMatrixExpr} left left operand
  545. * @param {SpeedyMatrixExpr} right right operand
  546. */
  547. constructor(left, right) {
  548. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  549. super(left.rows, left.columns, left, right);
  550. }
  551. /**
  552. * Compute result = left - right
  553. * @param {WebAssembly.Instance} wasm
  554. * @param {SpeedyMatrixWASMMemory} memory
  555. * @param {number} resultptr pointer to Mat32
  556. * @param {number} leftptr pointer to Mat32
  557. * @param {number} rightptr pointer to Mat32
  558. */
  559. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  560. wasm.exports.Mat32_subtract(resultptr, leftptr, rightptr);
  561. }
  562. }
  563. /**
  564. * Matrix multiplication
  565. */
  566. class SpeedyMatrixMultiplyExpr extends SpeedyMatrixBinaryOperationExpr {
  567. /**
  568. * Constructor
  569. * @param {SpeedyMatrixExpr} left left operand
  570. * @param {SpeedyMatrixExpr} right right operand
  571. */
  572. constructor(left, right) {
  573. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.columns === right.rows);
  574. super(left.rows, right.columns, left, right);
  575. }
  576. /**
  577. * Compute result = left * right
  578. * @param {WebAssembly.Instance} wasm
  579. * @param {SpeedyMatrixWASMMemory} memory
  580. * @param {number} resultptr pointer to Mat32
  581. * @param {number} leftptr pointer to Mat32
  582. * @param {number} rightptr pointer to Mat32
  583. */
  584. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  585. wasm.exports.Mat32_multiply(resultptr, leftptr, rightptr);
  586. }
  587. }
  588. /**
  589. * Component-wise multiplication
  590. */
  591. class SpeedyMatrixCompMultExpr extends SpeedyMatrixBinaryOperationExpr {
  592. /**
  593. * Constructor
  594. * @param {SpeedyMatrixExpr} left left operand
  595. * @param {SpeedyMatrixExpr} right right operand
  596. */
  597. constructor(left, right) {
  598. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(left.rows === right.rows && left.columns === right.columns);
  599. super(right.rows, right.columns, left, right);
  600. }
  601. /**
  602. * Compute result = left <compMult> right
  603. * @param {WebAssembly.Instance} wasm
  604. * @param {SpeedyMatrixWASMMemory} memory
  605. * @param {number} resultptr pointer to Mat32
  606. * @param {number} leftptr pointer to Mat32
  607. * @param {number} rightptr pointer to Mat32
  608. */
  609. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  610. wasm.exports.Mat32_compmult(resultptr, leftptr, rightptr);
  611. }
  612. }
  613. /**
  614. * Left-division. A \ b is equivalent to (pseudo-)inverse(A) * b
  615. */
  616. class SpeedyMatrixLdivExpr extends SpeedyMatrixBinaryOperationExpr {
  617. /**
  618. * Constructor
  619. * @param {SpeedyMatrixExpr} left left operand
  620. * @param {SpeedyMatrixExpr} right right operand
  621. */
  622. constructor(left, right) {
  623. const m = left.rows,
  624. n = left.columns;
  625. // TODO right doesn't need to be a column vector
  626. _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.assert(m >= n && right.rows === m && right.columns === 1);
  627. super(n, 1, left, right);
  628. }
  629. /**
  630. * Compute result = left \ right
  631. * @param {WebAssembly.Instance} wasm
  632. * @param {SpeedyMatrixWASMMemory} memory
  633. * @param {number} resultptr pointer to Mat32
  634. * @param {number} leftptr pointer to Mat32
  635. * @param {number} rightptr pointer to Mat32
  636. */
  637. _compute(wasm, memory, resultptr, leftptr, rightptr) {
  638. wasm.exports.Mat32_qr_ols(resultptr, leftptr, rightptr, 2);
  639. }
  640. }
  641. /***/ }),
  642. /***/ 6465:
  643. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_21592__) => {
  644. "use strict";
  645. /* harmony export */ __nested_webpack_require_21592__.d(__nested_webpack_exports__, {
  646. /* harmony export */ U: () => (/* binding */ SpeedyMatrixWASM)
  647. /* harmony export */ });
  648. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_21592__(9192);
  649. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_21592__(8581);
  650. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_21592__(9037);
  651. /* harmony import */ var _utils_globals__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_21592__(3816);
  652. /*
  653. * speedy-vision.js
  654. * GPU-accelerated Computer Vision for JavaScript
  655. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  656. *
  657. * Licensed under the Apache License, Version 2.0 (the "License");
  658. * you may not use this file except in compliance with the License.
  659. * You may obtain a copy of the License at
  660. *
  661. * http://www.apache.org/licenses/LICENSE-2.0
  662. *
  663. * Unless required by applicable law or agreed to in writing, software
  664. * distributed under the License is distributed on an "AS IS" BASIS,
  665. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  666. * See the License for the specific language governing permissions and
  667. * limitations under the License.
  668. *
  669. * speedy-matrix-wasm.js
  670. * WebAssembly bridge
  671. */
  672. /** @typedef {import('./speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  673. /**
  674. * @typedef {object} SpeedyMatrixWASMMemory a union-like helper for accessing a WebAssembly.Memory object
  675. * @property {object} as
  676. * @property {WebAssembly.Memory} as.object
  677. * @property {Uint8Array} as.uint8
  678. * @property {Int32Array} as.int32
  679. * @property {Uint32Array} as.uint32
  680. * @property {Float32Array} as.float32
  681. * @property {Float64Array} as.float64
  682. */
  683. /**
  684. * @typedef {object} SpeedyMatrixWASMHandle
  685. * @property {WebAssembly.Instance} wasm
  686. * @property {SpeedyMatrixWASMMemory} memory
  687. * @property {WebAssembly.Module} module
  688. */
  689. /** @type {Uint8Array} WebAssembly binary */
  690. const WASM_BINARY = __nested_webpack_require_21592__(3575);
  691. /** @type {WebAssembly.Instance|null} WebAssembly Instance, to be loaded asynchronously */
  692. let _instance = null;
  693. /** @type {WebAssembly.Module|null} WebAssembly Module, to be loaded asynchronously */
  694. let _module = null;
  695. /** @type {SpeedyMatrixWASMMemory} Augmented WebAssembly Memory object */
  696. const _memory = (mem => ({
  697. as: {
  698. object: mem,
  699. uint8: new Uint8Array(mem.buffer),
  700. int32: new Int32Array(mem.buffer),
  701. uint32: new Uint32Array(mem.buffer),
  702. float32: new Float32Array(mem.buffer),
  703. float64: new Float64Array(mem.buffer)
  704. }
  705. }))(typeof WebAssembly === 'undefined' ? new Uint8Array(1024) :
  706. // use a filler
  707. new WebAssembly.Memory({
  708. initial: 16,
  709. // 1 MB
  710. maximum: 256
  711. }));
  712. /**
  713. * WebAssembly utilities
  714. */
  715. class SpeedyMatrixWASM {
  716. /**
  717. * Gets you the WASM instance, augmented memory & module
  718. * @returns {SpeedyPromise<SpeedyMatrixWASMHandle>}
  719. */
  720. static ready() {
  721. // Check if WebAssembly is supported
  722. if (typeof WebAssembly === 'undefined') return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM('This application requires WebAssembly. Please update your system.'));
  723. // Endianness check
  724. if (!_utils_globals__WEBPACK_IMPORTED_MODULE_3__.LITTLE_ENDIAN) return _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .NotSupportedError */ .EM(`Can't run WebAssembly code: not in a little-endian machine!`));
  725. // Get the WASM instance
  726. return new _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i((resolve, reject) => {
  727. SpeedyMatrixWASM._ready(resolve, reject);
  728. });
  729. }
  730. /**
  731. * Synchronously gets you the WASM instance, augmented memory & module
  732. * @returns {SpeedyMatrixWASMHandle}
  733. */
  734. static get handle() {
  735. if (!_instance || !_module) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't get WASM handle: routines not yet loaded`);
  736. return {
  737. wasm: _instance,
  738. memory: _memory,
  739. module: _module
  740. };
  741. }
  742. /**
  743. * Gets you the WASM imports bound to a memory object
  744. * @param {SpeedyMatrixWASMMemory} memory
  745. * @returns {Object<string,Function>}
  746. */
  747. static imports(memory) {
  748. const obj = new SpeedyMatrixWASMImports(memory);
  749. return Object.getOwnPropertyNames(SpeedyMatrixWASMImports.prototype).filter(property => typeof obj[property] === 'function' && property !== 'constructor').reduce((imports, methodName) => (imports[methodName] = obj[methodName], imports), Object.create(null));
  750. }
  751. /**
  752. * Allocate a Mat32 in WebAssembly memory without copying any data
  753. * @param {WebAssembly.Instance} wasm
  754. * @param {SpeedyMatrixWASMMemory} memory
  755. * @param {SpeedyMatrix} matrix
  756. * @returns {number} pointer to the new Mat32
  757. */
  758. static allocateMat32(wasm, memory, matrix) {
  759. const dataptr = wasm.exports.malloc(matrix.data.byteLength);
  760. const matptr = wasm.exports.Mat32_create(matrix.rows, matrix.columns, matrix.step0, matrix.step1, matrix._data.length, dataptr);
  761. return matptr;
  762. }
  763. /**
  764. * Deallocate a Mat32 in WebAssembly
  765. * @param {WebAssembly.Instance} wasm
  766. * @param {SpeedyMatrixWASMMemory} memory
  767. * @param {number} matptr pointer to the allocated Mat32
  768. * @returns {number} NULL
  769. */
  770. static deallocateMat32(wasm, memory, matptr) {
  771. const dataptr = wasm.exports.Mat32_data(matptr);
  772. wasm.exports.free(matptr);
  773. wasm.exports.free(dataptr);
  774. return 0;
  775. }
  776. /**
  777. * Copy the data of a matrix to a WebAssembly Mat32
  778. * @param {WebAssembly.Instance} wasm
  779. * @param {SpeedyMatrixWASMMemory} memory
  780. * @param {number} matptr pointer to a Mat32
  781. * @param {SpeedyMatrix} matrix
  782. * @returns {number} matptr
  783. */
  784. static copyToMat32(wasm, memory, matptr, matrix) {
  785. // We assume the following:
  786. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  787. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  788. // 3. the data type is float32
  789. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  790. //matrix.dtype === 'float32' &&
  791. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
  792. const dataptr = wasm.exports.Mat32_data(matptr);
  793. memory.as.float32.set(matrix.data, dataptr / Float32Array.BYTES_PER_ELEMENT);
  794. return matptr;
  795. }
  796. /**
  797. * Copy the data of a WebAssembly Mat32 to a matrix
  798. * @param {WebAssembly.Instance} wasm
  799. * @param {SpeedyMatrixWASMMemory} memory
  800. * @param {number} matptr pointer to a Mat32
  801. * @param {SpeedyMatrix} matrix
  802. * @returns {number} matptr
  803. */
  804. static copyFromMat32(wasm, memory, matptr, matrix) {
  805. // We assume the following:
  806. // 1. the host uses little-endian byte ordering (just like WebAssembly)
  807. // 2. the allocated pointers are 4-byte aligned (the bump allocator guarantees this)
  808. // 3. the data type is float32
  809. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.assert(
  810. //matrix.dtype === 'float32' &&
  811. matrix.data.byteLength === wasm.exports.Mat32_dataSize(matptr));
  812. const base = wasm.exports.Mat32_data(matptr) / Float32Array.BYTES_PER_ELEMENT;
  813. for (let offset = matrix.data.length - 1; offset >= 0; offset--) matrix.data[offset] = memory.as.float32[base + offset];
  814. return matptr;
  815. }
  816. /**
  817. * Polls the WebAssembly instance until it's ready
  818. * @param {function(SpeedyMatrixWASMHandle): void} resolve
  819. * @param {function(Error): void} reject
  820. * @param {number} [counter]
  821. */
  822. static _ready(resolve, reject, counter = 1000) {
  823. if (_instance !== null && _module !== null) resolve({
  824. wasm: _instance,
  825. memory: _memory,
  826. module: _module
  827. });else if (counter <= 0) reject(new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .TimeoutError */ .MU(`Can't load WASM routines`));else setTimeout(SpeedyMatrixWASM._ready, 0, resolve, reject, counter - 1);
  828. }
  829. }
  830. /**
  831. * Methods called from WASM
  832. */
  833. class SpeedyMatrixWASMImports {
  834. /**
  835. * Constructor
  836. * @param {SpeedyMatrixWASMMemory} memory will be bound to this object
  837. */
  838. constructor(memory) {
  839. // find all methods of this object
  840. const methodNames = Object.getOwnPropertyNames(this.constructor.prototype).filter(property => typeof this[property] === 'function').filter(property => property !== 'constructor');
  841. // bind all methods to this object
  842. methodNames.forEach(methodName => {
  843. this[methodName] = this[methodName].bind(this);
  844. });
  845. /** @type {SpeedyMatrixWASMMemory} WASM memory */
  846. this.memory = memory;
  847. /** @type {CStringUtils} utilities related to C strings */
  848. this.cstring = new CStringUtils(memory);
  849. // done!
  850. return Object.freeze(this);
  851. }
  852. /**
  853. * Prints a message
  854. * @param {number} ptr pointer to char
  855. */
  856. print(ptr) {
  857. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(this.cstring.get(ptr));
  858. }
  859. /**
  860. * Throws an error
  861. * @param {number} ptr pointer to char
  862. */
  863. fatal(ptr) {
  864. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(this.cstring.get(ptr));
  865. }
  866. /**
  867. * Fills a memory segment with a byte
  868. * @param {number} value byte
  869. * @param {number} start memory address, inclusive
  870. * @param {number} end memory address greater than start, exclusive
  871. */
  872. bytefill(value, start, end) {
  873. this.memory.as.uint8.fill(value, start, end);
  874. }
  875. /**
  876. * Copy a memory segment to another segment
  877. * @param {number} target memory address, where we'll start writing
  878. * @param {number} start memory address, where we'll start copying (inclusive)
  879. * @param {number} end memory address, where we'll end the copy (exclusive)
  880. */
  881. copyWithin(target, start, end) {
  882. this.memory.as.uint8.copyWithin(target, start, end);
  883. }
  884. }
  885. /**
  886. * Utilities related to C strings
  887. */
  888. class CStringUtils {
  889. /**
  890. * Constructor
  891. * @param {SpeedyMatrixWASMMemory} memory
  892. */
  893. constructor(memory) {
  894. /** @type {TextDecoder} */
  895. this._decoder = new TextDecoder('utf-8');
  896. /** @type {SpeedyMatrixWASMMemory} */
  897. this._memory = memory;
  898. }
  899. /**
  900. * Convert a C string to a JavaScript string
  901. * @param {number} ptr pointer to char
  902. * @returns {string}
  903. */
  904. get(ptr) {
  905. const byte = this._memory.as.uint8;
  906. const size = this._memory.as.uint8.byteLength;
  907. let p = ptr;
  908. while (p < size && 0 !== byte[p]) ++p;
  909. return this._decoder.decode(byte.subarray(ptr, p));
  910. }
  911. }
  912. /**
  913. * WebAssembly loader
  914. * @param {SpeedyMatrixWASMMemory} memory
  915. */
  916. (function loadWASM(memory) {
  917. const base64decode = data => Uint8Array.from(atob(data), v => v.charCodeAt(0));
  918. // Skip if WebAssembly is unsupported
  919. if (typeof WebAssembly === 'undefined') return;
  920. // Load the WASM binary
  921. _speedy_promise__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyPromise */ .i.resolve(WASM_BINARY).then(data => base64decode(data)).then(bytes => WebAssembly.instantiate(bytes, {
  922. env: Object.assign({
  923. memory: memory.as.object
  924. }, SpeedyMatrixWASM.imports(memory))
  925. })).then(wasm => {
  926. _instance = wasm.instance;
  927. _module = wasm.module;
  928. wasm.instance.exports.srand(Date.now() * 0.001 & 0xffffffff); // srand(time(NULL))
  929. _utils_utils__WEBPACK_IMPORTED_MODULE_2__/* .Utils */ .A.log(`The WebAssembly routines have been loaded!`);
  930. }).catch(err => {
  931. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_1__/* .WebAssemblyError */ .NO(`Can't load the WebAssembly routines: ${err}`, err);
  932. });
  933. })(_memory);
  934. /***/ }),
  935. /***/ 4188:
  936. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_33268__) => {
  937. "use strict";
  938. __nested_webpack_require_33268__.r(__nested_webpack_exports__);
  939. /* harmony export */ __nested_webpack_require_33268__.d(__nested_webpack_exports__, {
  940. /* harmony export */ SpeedyMatrix: () => (/* binding */ SpeedyMatrix)
  941. /* harmony export */ });
  942. /* harmony import */ var _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_33268__(6306);
  943. /* harmony import */ var _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_33268__(6465);
  944. /* harmony import */ var _speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_33268__(9192);
  945. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_33268__(9037);
  946. /*
  947. * speedy-vision.js
  948. * GPU-accelerated Computer Vision for JavaScript
  949. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  950. *
  951. * Licensed under the Apache License, Version 2.0 (the "License");
  952. * you may not use this file except in compliance with the License.
  953. * You may obtain a copy of the License at
  954. *
  955. * http://www.apache.org/licenses/LICENSE-2.0
  956. *
  957. * Unless required by applicable law or agreed to in writing, software
  958. * distributed under the License is distributed on an "AS IS" BASIS,
  959. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  960. * See the License for the specific language governing permissions and
  961. * limitations under the License.
  962. *
  963. * speedy-matrix.js
  964. * Matrix class
  965. */
  966. /** @typedef {"float32"} SpeedyMatrixDtype Matrix data type */
  967. /** @typedef {Float32Array} SpeedyMatrixBufferType Buffer type */
  968. /** @typedef {Float32ArrayConstructor} SpeedyMatrixBufferTypeConstructor Buffer class */
  969. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMMemory} SpeedyMatrixWASMMemory */
  970. /** @typedef {import('./speedy-matrix-wasm').SpeedyMatrixWASMHandle} SpeedyMatrixWASMHandle */
  971. /**
  972. * Matrix class
  973. */
  974. class SpeedyMatrix extends _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r {
  975. /**
  976. * @private
  977. *
  978. * Low-level constructor
  979. * @param {number} rows number of rows
  980. * @param {number} columns number of columns
  981. * @param {number} step0 step size between two consecutive elements (e.g., 1)
  982. * @param {number} step1 step size between two consecutive columns (e.g., rows)
  983. * @param {SpeedyMatrixBufferType} data entries in column-major format
  984. */
  985. constructor(rows, columns, step0, step1, data) {
  986. super(rows, columns, _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE);
  987. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.constructor === _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[this.dtype]);
  988. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(step0 > 0 && step1 >= step0);
  989. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(data.length + rows * columns === 0 ||
  990. // empty matrix and empty buffer, or
  991. data.length === 1 + step0 * (rows - 1) + step1 * (columns - 1) // correctly sized buffer
  992. );
  993. /** @type {number} step size between two consecutive elements */
  994. this._step0 = step0 | 0;
  995. /** @type {number} step size between two consecutive columns */
  996. this._step1 = step1 | 0;
  997. /** @type {SpeedyMatrixBufferType} buffer containing the entries of the matrix in column-major order */
  998. this._data = data;
  999. }
  1000. /**
  1001. * Create a new matrix with the specified size and entries
  1002. * @param {number} rows number of rows
  1003. * @param {number} columns number of columns
  1004. * @param {number[]} entries in column-major format
  1005. * @param {SpeedyMatrixDtype} [dtype] data type
  1006. * @returns {SpeedyMatrix}
  1007. */
  1008. static Create(rows, columns, entries, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1009. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1010. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns === entries.length, `Can't create matrix: expected ${rows * columns} entries, but found ${entries.length}`);
  1011. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1012. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [entries]));
  1013. }
  1014. /**
  1015. * Create a new matrix filled with zeros with the specified size
  1016. * @param {number} rows number of rows
  1017. * @param {number} [columns] number of columns
  1018. * @param {SpeedyMatrixDtype} [dtype] data type
  1019. * @returns {SpeedyMatrix}
  1020. */
  1021. static Zeros(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1022. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1023. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1024. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]));
  1025. }
  1026. /**
  1027. * Create a new matrix filled with ones with the specified size
  1028. * @param {number} rows number of rows
  1029. * @param {number} [columns] number of columns
  1030. * @param {SpeedyMatrixDtype} [dtype] data type
  1031. * @returns {SpeedyMatrix}
  1032. */
  1033. static Ones(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1034. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1035. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1036. return new SpeedyMatrix(rows, columns, 1, rows, Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]).fill(1));
  1037. }
  1038. /**
  1039. * Create a new identity matrix with the specified size
  1040. * @param {number} rows number of rows
  1041. * @param {number} [columns] number of columns
  1042. * @param {SpeedyMatrixDtype} [dtype] data type
  1043. * @returns {SpeedyMatrix}
  1044. */
  1045. static Eye(rows, columns = rows, dtype = _speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.DEFAULT_DTYPE) {
  1046. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(rows * columns > 0, `Can't create a matrix without a shape`);
  1047. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(Object.prototype.hasOwnProperty.call(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE, dtype), `Invalid dtype: "${dtype}"`);
  1048. const data = Reflect.construct(_speedy_matrix_expr__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyMatrixExpr */ .r.BUFFER_TYPE[dtype], [rows * columns]);
  1049. for (let j = Math.min(rows, columns) - 1; j >= 0; j--) data[j * rows + j] = 1;
  1050. return new SpeedyMatrix(rows, columns, 1, rows, data);
  1051. }
  1052. /**
  1053. * Evaluate an expression synchronously and store the result in a new matrix
  1054. * @param {SpeedyMatrixExpr} expr matrix expression
  1055. * @returns {SpeedyMatrix}
  1056. */
  1057. static From(expr) {
  1058. return SpeedyMatrix.Zeros(expr.rows, expr.columns, expr.dtype).setToSync(expr);
  1059. }
  1060. /**
  1061. * Returns a promise that resolves immediately if the WebAssembly routines
  1062. * are ready to be used, or as soon as they do become ready
  1063. * @returns {SpeedyPromise<void>}
  1064. */
  1065. static ready() {
  1066. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => void 0);
  1067. }
  1068. /**
  1069. * Get the underlying buffer
  1070. * @returns {SpeedyMatrixBufferType}
  1071. */
  1072. get data() {
  1073. return this._data;
  1074. }
  1075. /**
  1076. * Row-step
  1077. * @returns {number} defaults to 1
  1078. */
  1079. get step0() {
  1080. return this._step0;
  1081. }
  1082. /**
  1083. * Column-step
  1084. * @returns {number} defaults to this.rows
  1085. */
  1086. get step1() {
  1087. return this._step1;
  1088. }
  1089. /**
  1090. * Extract a block from this matrix. Use a shared underlying buffer
  1091. * @param {number} firstRow
  1092. * @param {number} lastRow
  1093. * @param {number} firstColumn
  1094. * @param {number} lastColumn
  1095. * @returns {SpeedyMatrix}
  1096. */
  1097. block(firstRow, lastRow, firstColumn, lastColumn) {
  1098. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(firstRow <= lastRow && firstColumn <= lastColumn, `Invalid indices: [${firstRow}:${lastRow},${firstColumn}:${lastColumn}]`);
  1099. // ensure that the indices are within bounds
  1100. firstRow = Math.max(firstRow, 0);
  1101. lastRow = Math.min(lastRow, this._rows - 1);
  1102. firstColumn = Math.max(firstColumn, 0);
  1103. lastColumn = Math.min(lastColumn, this._columns - 1);
  1104. // compute the dimensions of the new submatrix
  1105. const rows = lastRow - firstRow + 1;
  1106. const columns = lastColumn - firstColumn + 1;
  1107. // obtain the relevant portion of the data
  1108. const step0 = this._step0,
  1109. step1 = this._step1;
  1110. const begin = firstRow * step0 + firstColumn * step1; // inclusive
  1111. const end = 1 + lastRow * step0 + lastColumn * step1; // exclusive
  1112. // create new matrix
  1113. return new SpeedyMatrix(rows, columns, step0, step1, this._data.subarray(begin, end));
  1114. }
  1115. /**
  1116. * Extract a row from this matrix
  1117. * @param {number} index 0-based
  1118. * @returns {SpeedyMatrix}
  1119. */
  1120. row(index) {
  1121. return this.block(index, index, 0, this._columns - 1);
  1122. }
  1123. /**
  1124. * Extract a column from this matrix
  1125. * @param {number} index 0-based
  1126. * @returns {SpeedyMatrix}
  1127. */
  1128. column(index) {
  1129. return this.block(0, this._rows - 1, index, index);
  1130. }
  1131. /**
  1132. * Extract the main diagonal from this matrix
  1133. * @returns {SpeedyMatrix} as a column-vector
  1134. */
  1135. diagonal() {
  1136. const diagsize = Math.min(this._rows, this._columns);
  1137. // compute the dimensions of the new submatrix
  1138. const rows = diagsize; // make it a column vector
  1139. const columns = 1;
  1140. // obtain the relevant portion of the data
  1141. const diagstep = this._step0 + this._step1; // jump a row and a column
  1142. const begin = 0; // inclusive
  1143. const end = 1 + (diagsize - 1) * diagstep; // exclusive
  1144. // create new matrix
  1145. return new SpeedyMatrix(rows, columns, diagstep, diagstep, this._data.subarray(begin, end));
  1146. }
  1147. /**
  1148. * Read a single entry of this matrix
  1149. * @param {number} row 0-based index
  1150. * @param {number} column 0-based index
  1151. * @returns {number}
  1152. */
  1153. at(row, column) {
  1154. if (row >= 0 && row < this._rows && column >= 0 && column < this._columns) return this._data[this._step0 * row + this._step1 * column];else return Number.NaN;
  1155. }
  1156. /**
  1157. * Read the entries of the matrix in column-major format
  1158. * @returns {number[]}
  1159. */
  1160. read() {
  1161. const entries = new Array(this._rows * this._columns);
  1162. const step0 = this._step0,
  1163. step1 = this._step1;
  1164. let i = 0;
  1165. for (let column = 0; column < this._columns; column++) {
  1166. for (let row = 0; row < this._rows; row++) entries[i++] = this._data[row * step0 + column * step1];
  1167. }
  1168. return entries;
  1169. }
  1170. /**
  1171. * Returns a human-readable string representation of the matrix
  1172. * @returns {string}
  1173. */
  1174. toString() {
  1175. const DECIMALS = 5;
  1176. const rows = this.rows,
  1177. columns = this.columns;
  1178. const entries = this.read();
  1179. const mat = /** @type {number[][]} */new Array(rows);
  1180. for (let i = 0; i < rows; i++) {
  1181. mat[i] = new Array(columns);
  1182. for (let j = 0; j < columns; j++) mat[i][j] = entries[j * rows + i];
  1183. }
  1184. const fix = x => x.toFixed(DECIMALS);
  1185. const fmt = mat.map(row => ' ' + row.map(fix).join(', ')).join(',\n');
  1186. const str = `SpeedyMatrix(rows=${rows}, columns=${columns}, data=[\n${fmt}\n])`;
  1187. return str;
  1188. }
  1189. /**
  1190. * Set the contents of this matrix to the result of an expression
  1191. * @param {SpeedyMatrixExpr} expr matrix expression
  1192. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1193. */
  1194. setTo(expr) {
  1195. return _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.ready().then(_ => {
  1196. // TODO: add support for WebWorkers
  1197. return this.setToSync(expr);
  1198. });
  1199. }
  1200. /**
  1201. * Synchronously set the contents of this matrix to the result of an expression
  1202. * @param {SpeedyMatrixExpr} expr matrix expression
  1203. * @returns {SpeedyMatrix} this
  1204. */
  1205. setToSync(expr) {
  1206. const {
  1207. wasm,
  1208. memory
  1209. } = _speedy_matrix_wasm__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyMatrixWASM */ .U.handle;
  1210. // evaluate the expression
  1211. const result = expr._evaluate(wasm, memory);
  1212. /*
  1213. // shallow copy the results to this matrix
  1214. // limitation: can't handle blocks properly
  1215. // (a tree-like structure could be useful)
  1216. this._rows = result.rows;
  1217. this._columns = result.columns;
  1218. //this._dtype = result.dtype;
  1219. this._data = result.data;
  1220. this._step0 = result.step0;
  1221. this._step1 = result.step1;
  1222. */
  1223. // validate shape
  1224. _utils_utils__WEBPACK_IMPORTED_MODULE_3__/* .Utils */ .A.assert(this._rows === result._rows && this._columns === result._columns && this.dtype === result.dtype, `Can't set the values of a ${this.rows} x ${this.columns} ${this.dtype} matrix to those of a ${result.rows} x ${result.columns} ${result.dtype} matrix`);
  1225. // deep copy
  1226. const step0 = this._step0,
  1227. step1 = this._step1,
  1228. rstep0 = result._step0,
  1229. rstep1 = result._step1;
  1230. if (step0 === rstep0 && step1 === rstep1 && this._data.length === result._data.length) {
  1231. // fast copy
  1232. this._data.set(result._data);
  1233. } else {
  1234. // copy each element
  1235. for (let column = this._columns - 1; column >= 0; column--) {
  1236. for (let row = this._rows - 1; row >= 0; row--) this._data[row * step0 + column * step1] = result._data[row * rstep0 + column * rstep1];
  1237. }
  1238. }
  1239. // done!
  1240. return this;
  1241. }
  1242. /**
  1243. * Fill this matrix with a scalar value
  1244. * @param {number} value
  1245. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to this
  1246. */
  1247. fill(value) {
  1248. this.fillSync(value);
  1249. return _speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i.resolve(this);
  1250. }
  1251. /**
  1252. * Synchronously fill this matrix with a scalar value
  1253. * @param {number} value
  1254. * @returns {SpeedyMatrix} this
  1255. */
  1256. fillSync(value) {
  1257. value = +value;
  1258. if (this._rows * this._columns === this._data.length) {
  1259. this._data.fill(value);
  1260. return this;
  1261. }
  1262. for (let column = 0; column < this._columns; column++) {
  1263. for (let row = 0; row < this._rows; row++) {
  1264. this._data[row * this._step0 + column * this._step1] = value;
  1265. }
  1266. }
  1267. return this;
  1268. }
  1269. /**
  1270. * Evaluate this expression
  1271. * @param {WebAssembly.Instance} wasm
  1272. * @param {SpeedyMatrixWASMMemory} memory
  1273. * @returns {SpeedyMatrix}
  1274. */
  1275. _evaluate(wasm, memory) {
  1276. return this;
  1277. }
  1278. }
  1279. /***/ }),
  1280. /***/ 6634:
  1281. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_48547__) => {
  1282. "use strict";
  1283. /* harmony export */ __nested_webpack_require_48547__.d(__nested_webpack_exports__, {
  1284. /* harmony export */ Q: () => (/* binding */ SpeedyNamespace)
  1285. /* harmony export */ });
  1286. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_48547__(8581);
  1287. /*
  1288. * speedy-vision.js
  1289. * GPU-accelerated Computer Vision for JavaScript
  1290. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1291. *
  1292. * Licensed under the Apache License, Version 2.0 (the "License");
  1293. * you may not use this file except in compliance with the License.
  1294. * You may obtain a copy of the License at
  1295. *
  1296. * http://www.apache.org/licenses/LICENSE-2.0
  1297. *
  1298. * Unless required by applicable law or agreed to in writing, software
  1299. * distributed under the License is distributed on an "AS IS" BASIS,
  1300. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1301. * See the License for the specific language governing permissions and
  1302. * limitations under the License.
  1303. *
  1304. * speedy-namespace.js
  1305. * Symbolizes a namespace
  1306. */
  1307. /**
  1308. * An abstract namespace
  1309. * @abstract
  1310. */
  1311. class SpeedyNamespace {
  1312. /**
  1313. * Namespaces can't be instantiated.
  1314. * Only static methods are allowed.
  1315. * @abstract
  1316. * @throws SpeedyError
  1317. */
  1318. constructor() {
  1319. // only static methods are allowed
  1320. throw new _utils_errors__WEBPACK_IMPORTED_MODULE_0__/* .AbstractMethodError */ .aQ(`Namespaces can't be instantiated`);
  1321. }
  1322. }
  1323. /***/ }),
  1324. /***/ 9192:
  1325. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_50059__) => {
  1326. "use strict";
  1327. /* harmony export */ __nested_webpack_require_50059__.d(__nested_webpack_exports__, {
  1328. /* harmony export */ i: () => (/* binding */ SpeedyPromise)
  1329. /* harmony export */ });
  1330. /*
  1331. * speedy-vision.js
  1332. * GPU-accelerated Computer Vision for JavaScript
  1333. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1334. *
  1335. * Licensed under the Apache License, Version 2.0 (the "License");
  1336. * you may not use this file except in compliance with the License.
  1337. * You may obtain a copy of the License at
  1338. *
  1339. * http://www.apache.org/licenses/LICENSE-2.0
  1340. *
  1341. * Unless required by applicable law or agreed to in writing, software
  1342. * distributed under the License is distributed on an "AS IS" BASIS,
  1343. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1344. * See the License for the specific language governing permissions and
  1345. * limitations under the License.
  1346. *
  1347. * speedy-promise.js
  1348. * Speedy Promises: a fast implementation of Promises
  1349. */
  1350. const PENDING = 0;
  1351. const FULFILLED = 1;
  1352. const REJECTED = 2;
  1353. const SUSPEND_ASYNC = 1;
  1354. const asap = typeof queueMicrotask !== 'undefined' && queueMicrotask ||
  1355. // browsers
  1356. typeof process !== 'undefined' && process.nextTick || (
  1357. // node.js
  1358. f => Promise.resolve().then(() => f())); // most compatible
  1359. /**
  1360. * SpeedyPromise: Super Fast Promises. SpeedyPromises can
  1361. * interoperate with ES6 Promises. This implementation is
  1362. * based on the Promises/A+ specification.
  1363. * @template T
  1364. */
  1365. class SpeedyPromise {
  1366. /**
  1367. * Constructor
  1368. * @param {function(function(T=): void, function(Error): void): void} callback
  1369. */
  1370. constructor(callback) {
  1371. this._state = PENDING;
  1372. this._value = undefined;
  1373. this._onFulfillment = null;
  1374. this._onRejection = null;
  1375. this._children = 0;
  1376. this[0] = this;
  1377. this._parent = undefined;
  1378. this._flags = 0;
  1379. this._fulfill = this._fulfill.bind(this);
  1380. this._reject = this._reject.bind(this);
  1381. this._resolve = this._resolve.bind(this);
  1382. this._broadcastIfAsync = this._broadcastIfAsync.bind(this);
  1383. callback(this._fulfill, this._reject);
  1384. }
  1385. /**
  1386. * Setup handlers
  1387. * @template U, V=never
  1388. * @param {null|undefined|(function(T): U|PromiseLike<U>|SpeedyPromise<U>)} onFulfillment called when the SpeedyPromise is fulfilled
  1389. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1390. * @returns {SpeedyPromise<U>}
  1391. */
  1392. then(onFulfillment, onRejection = null) {
  1393. const child = new SpeedyPromise(this._nop);
  1394. child._onFulfillment = typeof onFulfillment === 'function' && onFulfillment;
  1395. child._onRejection = typeof onRejection === 'function' && onRejection;
  1396. child._parent = this;
  1397. this[this._children++] = child; // attach child
  1398. this._flags &= ~SUSPEND_ASYNC; // restore the async behavior
  1399. this._notify();
  1400. return child;
  1401. }
  1402. /**
  1403. * Setup rejection handler
  1404. * @template U, V=never
  1405. * @param {null|undefined|(function(Error): V|PromiseLike<V>|SpeedyPromise<V>)} [onRejection] called when the SpeedyPromise is rejected
  1406. * @returns {SpeedyPromise<V>}
  1407. */
  1408. catch(onRejection) {
  1409. return this.then(null, onRejection);
  1410. }
  1411. /**
  1412. * Execute a callback when the promise is settled
  1413. * (i.e., fulfilled or rejected)
  1414. * @param {function(): void} onFinally
  1415. * @returns {SpeedyPromise<T>}
  1416. */
  1417. finally(onFinally) {
  1418. const fn = val => {
  1419. onFinally();
  1420. return val;
  1421. };
  1422. return this.then(fn, fn);
  1423. }
  1424. /**
  1425. * Start the computation immediately, synchronously.
  1426. * Can't afford to spend any time at all waiting for micro-tasks, etc.
  1427. * @returns {SpeedyPromise<T>} this
  1428. */
  1429. turbocharge() {
  1430. let my = this;
  1431. // suspend the async behavior
  1432. this._flags |= SUSPEND_ASYNC;
  1433. while (my._parent !== undefined) {
  1434. my = my._parent;
  1435. my._flags |= SUSPEND_ASYNC;
  1436. }
  1437. // notify the children of the root
  1438. my._notify(); // will be synchronous
  1439. // return this SpeedyPromise
  1440. return this;
  1441. }
  1442. /**
  1443. * Convert to string
  1444. * @returns {string}
  1445. */
  1446. toString() {
  1447. switch (this._state) {
  1448. case PENDING:
  1449. return `SpeedyPromise { <pending> }`;
  1450. case FULFILLED:
  1451. return `SpeedyPromise { <fulfilled> ${this._value} }`;
  1452. case REJECTED:
  1453. return `SpeedyPromise { <rejected> ${this._value} }`;
  1454. default:
  1455. return '';
  1456. }
  1457. }
  1458. /**
  1459. * Symbol.toStringTag
  1460. * @returns {string}
  1461. */
  1462. get [Symbol.toStringTag]() {
  1463. return 'SpeedyPromise';
  1464. }
  1465. /**
  1466. * Creates a resolved SpeedyPromise
  1467. * @template U
  1468. * @param {U} [value]
  1469. * @returns {SpeedyPromise<U>}
  1470. */
  1471. static resolve(value) {
  1472. const promise = new SpeedyPromise(this._snop);
  1473. if (typeof value === 'object' && value !== null && 'then' in value || typeof value === 'function' && 'then' in value) {
  1474. // resolve asynchronously
  1475. promise._resolve(value);
  1476. } else {
  1477. // fulfill synchronously
  1478. promise._value = value;
  1479. promise._state = FULFILLED;
  1480. }
  1481. return promise;
  1482. }
  1483. /**
  1484. * Creates a rejected SpeedyPromise
  1485. * @template U
  1486. * @param {Error} reason
  1487. * @returns {SpeedyPromise<U>}
  1488. */
  1489. static reject(reason) {
  1490. const promise = new SpeedyPromise(this._snop);
  1491. promise._value = reason;
  1492. promise._state = REJECTED;
  1493. return promise;
  1494. }
  1495. /**
  1496. * Returns a SpeedyPromise that resolves to an array
  1497. * containing the results of the input promises/values,
  1498. * in their given order. The returned SpeedyPromise will
  1499. * resolve if all input promises resolve, or reject if
  1500. * any input promise rejects.
  1501. * @template U
  1502. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1503. * @returns {SpeedyPromise<U[]>}
  1504. *
  1505. * FIXME iterables need not be all <U>
  1506. */
  1507. static all(iterable) {
  1508. return new SpeedyPromise((resolve, reject) => {
  1509. const input = [];
  1510. // get elements
  1511. for (const element of iterable) input.push(element);
  1512. // resolve synchronously if there are no elements
  1513. const length = input.length;
  1514. if (length == 0) {
  1515. resolve([]);
  1516. return;
  1517. }
  1518. // resolve asynchronously
  1519. let counter = length;
  1520. const output = new Array(length);
  1521. const partialResolve = i => val => {
  1522. output[i] = val;
  1523. if (0 == --counter) resolve(output);
  1524. };
  1525. for (let i = 0; i < length; i++) {
  1526. const element = input[i];
  1527. if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(partialResolve(i), reject);else SpeedyPromise.resolve(element).then(partialResolve(i), reject);
  1528. }
  1529. });
  1530. }
  1531. /**
  1532. * Returns a promise that gets fulfilled or rejected as soon
  1533. * as the first promise in the iterable gets fulfilled or
  1534. * rejected (with its value/reason).
  1535. * @template U
  1536. * @param {Iterable<U>|Iterable<SpeedyPromise<U>>|Iterable<Promise<U>>} iterable e.g., a SpeedyPromise[], a thenable[]
  1537. * @returns {SpeedyPromise<U>}
  1538. */
  1539. static race(iterable) {
  1540. return new SpeedyPromise((resolve, reject) => {
  1541. const input = [];
  1542. // get elements
  1543. for (const element of iterable) input.push(element);
  1544. // if the iterable is empty, the promise
  1545. // will be pending forever...
  1546. // resolve asynchronously
  1547. const length = input.length;
  1548. for (let i = 0; i < length; i++) {
  1549. const element = input[i];
  1550. if (element.__proto__ === SpeedyPromise.prototype || element.__proto__ === Promise.prototype) element.then(resolve, reject);else SpeedyPromise.resolve(element).then(resolve, reject);
  1551. }
  1552. });
  1553. }
  1554. /**
  1555. * Fulfill this promise with a value
  1556. * @param {T} value
  1557. */
  1558. _fulfill(value) {
  1559. this._setState(FULFILLED, value);
  1560. }
  1561. /**
  1562. * Reject this promise with a reason
  1563. * @param {Error} reason
  1564. */
  1565. _reject(reason) {
  1566. this._setState(REJECTED, reason);
  1567. }
  1568. /**
  1569. * Set the state and the value of this promise
  1570. * @param {number} state
  1571. * @param {T|Error} value
  1572. */
  1573. _setState(state, value) {
  1574. // the promise is already fulfilled or rejected
  1575. if (this._state != PENDING) return;
  1576. // set the new state
  1577. this._state = state;
  1578. this._value = value;
  1579. this._notify();
  1580. }
  1581. /**
  1582. * Notify my children that this promise is no
  1583. * longer pending. This is an async operation:
  1584. * my childen will be notified "as soon
  1585. * as possible" (it will be scheduled).
  1586. * We may force this to be synchronous, though
  1587. */
  1588. _notify() {
  1589. // nothing to do
  1590. if (this._state == PENDING) return;
  1591. // have we turbocharged this promise?
  1592. if (this._flags & SUSPEND_ASYNC) {
  1593. this._broadcast(); // execute synchronously
  1594. return;
  1595. }
  1596. // install a timer (default behavior)
  1597. asap(this._broadcastIfAsync);
  1598. }
  1599. /**
  1600. * Helper method
  1601. */
  1602. _broadcastIfAsync() {
  1603. // we may have installed a timer at some
  1604. // point, but turbocharged the promise later
  1605. if (!(this._flags & SUSPEND_ASYNC)) this._broadcast();
  1606. }
  1607. /**
  1608. * Tell my children that this promise
  1609. * is either fulfilled or rejected.
  1610. * This is a synchronous operation
  1611. */
  1612. _broadcast() {
  1613. const children = this._children;
  1614. const state = this._state;
  1615. if (state === FULFILLED) {
  1616. for (let i = 0; i < children; i++) {
  1617. const child = this[i];
  1618. const callback = child._onFulfillment;
  1619. try {
  1620. if (callback) {
  1621. if (callback !== child._nop) {
  1622. child._resolve(callback(this._value)); // promise resolution procedure
  1623. child._onFulfillment = child._nop; // will not be called again
  1624. }
  1625. } else child._fulfill(this._value);
  1626. } catch (e) {
  1627. child._reject(e);
  1628. }
  1629. }
  1630. } else if (state === REJECTED) {
  1631. for (let i = 0; i < children; i++) {
  1632. const child = this[i];
  1633. const callback = child._onRejection;
  1634. try {
  1635. if (callback) {
  1636. if (callback !== child._nop) {
  1637. child._resolve(callback(this._value)); // promise resolution procedure
  1638. child._onRejection = child._nop; // will not be called again
  1639. }
  1640. } else child._reject(this._value);
  1641. } catch (e) {
  1642. child._reject(e);
  1643. }
  1644. }
  1645. }
  1646. }
  1647. /**
  1648. * Promise Resolution Procedure
  1649. * based on the Promises/A+ spec
  1650. * @param {T} x
  1651. */
  1652. _resolve(x) {
  1653. if (typeof x !== 'object' && typeof x !== 'function' || x === null) {
  1654. // if(x !== Object(x))
  1655. this._fulfill(x);
  1656. return;
  1657. }
  1658. if (x === this) throw new TypeError(); // Circular reference
  1659. if (x.__proto__ === SpeedyPromise.prototype || x.__proto__ === Promise.prototype) {
  1660. x.then(this._resolve, this._reject);
  1661. return;
  1662. }
  1663. try {
  1664. const then = x.then;
  1665. if (typeof then === 'function') {
  1666. let resolve = this._resolve,
  1667. reject = this._reject;
  1668. try {
  1669. then.call(x, y => {
  1670. resolve(y);
  1671. resolve = reject = this._nop;
  1672. }, r => {
  1673. reject(r);
  1674. resolve = reject = this._nop;
  1675. });
  1676. } catch (e) {
  1677. if (resolve !== this._nop && reject !== this._nop) this._reject(e);
  1678. }
  1679. } else {
  1680. this._fulfill(x);
  1681. }
  1682. } catch (e) {
  1683. this._reject(e);
  1684. }
  1685. }
  1686. /**
  1687. * No-operation
  1688. */
  1689. _nop() {}
  1690. /**
  1691. * Static no-operation
  1692. */
  1693. static _snop() {}
  1694. }
  1695. //module.exports = { SpeedyPromise };
  1696. /*
  1697. // Uncomment to test performance with regular Promises
  1698. module.exports = { SpeedyPromise: Promise };
  1699. Promise.prototype.turbocharge = function() { return this };
  1700. */
  1701. /***/ }),
  1702. /***/ 9420:
  1703. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_61794__) => {
  1704. "use strict";
  1705. // EXPORTS
  1706. __nested_webpack_require_61794__.d(__nested_webpack_exports__, {
  1707. gx: () => (/* binding */ createShader),
  1708. bf: () => (/* binding */ importShader)
  1709. });
  1710. // UNUSED EXPORTS: ShaderDeclaration, ShaderDeclarationBuilder
  1711. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  1712. var speedy_gl = __nested_webpack_require_61794__(1001);
  1713. // EXTERNAL MODULE: ./src/utils/utils.js
  1714. var utils = __nested_webpack_require_61794__(9037);
  1715. // EXTERNAL MODULE: ./src/utils/types.js
  1716. var types = __nested_webpack_require_61794__(6049);
  1717. // EXTERNAL MODULE: ./src/utils/errors.js
  1718. var errors = __nested_webpack_require_61794__(8581);
  1719. ;// CONCATENATED MODULE: ./src/gpu/shader-preprocessor.js
  1720. function _wrapRegExp() { _wrapRegExp = function (e, r) { return new BabelRegExp(e, void 0, r); }; var e = RegExp.prototype, r = new WeakMap(); function BabelRegExp(e, t, p) { var o = RegExp(e, t); return r.set(o, p || r.get(e)), _setPrototypeOf(o, BabelRegExp.prototype); } function buildGroups(e, t) { var p = r.get(t); return Object.keys(p).reduce(function (r, t) { var o = p[t]; if ("number" == typeof o) r[t] = e[o];else { for (var i = 0; void 0 === e[o[i]] && i + 1 < o.length;) i++; r[t] = e[o[i]]; } return r; }, Object.create(null)); } return _inherits(BabelRegExp, RegExp), BabelRegExp.prototype.exec = function (r) { var t = e.exec.call(this, r); if (t) { t.groups = buildGroups(t, this); var p = t.indices; p && (p.groups = buildGroups(p, this)); } return t; }, BabelRegExp.prototype[Symbol.replace] = function (t, p) { if ("string" == typeof p) { var o = r.get(this); return e[Symbol.replace].call(this, t, p.replace(/\$<([^>]+)>/g, function (e, r) { var t = o[r]; return "$" + (Array.isArray(t) ? t.join("$") : t); })); } if ("function" == typeof p) { var i = this; return e[Symbol.replace].call(this, t, function () { var e = arguments; return "object" != typeof e[e.length - 1] && (e = [].slice.call(e)).push(buildGroups(e, i)), p.apply(this, e); }); } return e[Symbol.replace].call(this, t, p); }, _wrapRegExp.apply(this, arguments); }
  1721. function _inherits(t, e) { if ("function" != typeof e && null !== e) throw new TypeError("Super expression must either be null or a function"); t.prototype = Object.create(e && e.prototype, { constructor: { value: t, writable: !0, configurable: !0 } }), Object.defineProperty(t, "prototype", { writable: !1 }), e && _setPrototypeOf(t, e); }
  1722. function _setPrototypeOf(t, e) { return _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function (t, e) { return t.__proto__ = e, t; }, _setPrototypeOf(t, e); }
  1723. /*
  1724. * speedy-vision.js
  1725. * GPU-accelerated Computer Vision for JavaScript
  1726. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1727. *
  1728. * Licensed under the Apache License, Version 2.0 (the "License");
  1729. * you may not use this file except in compliance with the License.
  1730. * You may obtain a copy of the License at
  1731. *
  1732. * http://www.apache.org/licenses/LICENSE-2.0
  1733. *
  1734. * Unless required by applicable law or agreed to in writing, software
  1735. * distributed under the License is distributed on an "AS IS" BASIS,
  1736. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1737. * See the License for the specific language governing permissions and
  1738. * limitations under the License.
  1739. *
  1740. * shader-preprocessor.js
  1741. * Custom preprocessor for shaders
  1742. */
  1743. /** @typedef {Object<string,number>} ShaderPreprocessorTemplateOfConstants */
  1744. /** @typedef {import('./shader-declaration').ShaderDeclarationPreprocessorConstants} ShaderPreprocessorConstants */
  1745. // Import numeric globals
  1746. const globals = __nested_webpack_require_61794__(3816);
  1747. const numericGlobals = /** @type {ShaderPreprocessorTemplateOfConstants} */
  1748. Object.keys(globals).filter(key => typeof globals[key] == 'number').reduce((obj, key) => (obj[key] = globals[key], obj), {});
  1749. /** @type {ShaderPreprocessorTemplateOfConstants} Constants available to all shaders */
  1750. const basicConstants = Object.freeze(Object.assign(Object.assign({}, numericGlobals), {}, {
  1751. // fragment shader
  1752. 'FS_USE_CUSTOM_PRECISION': 0,
  1753. // use default precision settings
  1754. 'FS_OUTPUT_TYPE': 0,
  1755. // normalized RGBA
  1756. // colors
  1757. 'PIXELCOMPONENT_RED': types/* PixelComponent */.kQ.RED,
  1758. 'PIXELCOMPONENT_GREEN': types/* PixelComponent */.kQ.GREEN,
  1759. 'PIXELCOMPONENT_BLUE': types/* PixelComponent */.kQ.BLUE,
  1760. 'PIXELCOMPONENT_ALPHA': types/* PixelComponent */.kQ.ALPHA
  1761. }));
  1762. /** @type {function(string,string):ShaderPreprocessorTemplateOfConstants} Platform-related constants available to all shaders */
  1763. const platformConstants = (platform, glRenderer) => Object.freeze({
  1764. 'APPLE': /(Mac|iOS|iPhone|iPad|iPod)/i.test(platform) | 0,
  1765. // "MacIntel", "macOS", "iOS", "iPhone", "iPad"...
  1766. 'APPLE_GPU': /Apple/.test(glRenderer) | 0,
  1767. // the renderer is always "Apple GPU" on Safari and on Epiphany at the time of this writing; on Chrome, it may be "Apple M1" for example...
  1768. 'INTEL_GRAPHICS': /Intel.*Graphics/.test(glRenderer) | 0 // Intel[(R)] ... [HD] Graphics xyz ...
  1769. });
  1770. // Regular Expressions
  1771. const commentsRegex = [/\/\*(.|\s)*?\*\//g, /\/\/.*$/gm];
  1772. const includeRegex = /^\s*@\s*include\s+"(.*?)"/gm;
  1773. const constantRegex = /@(\w+)@/g;
  1774. const unrollRegex = [/*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+\+()\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
  1775. counter: 2
  1776. }), /*#__PURE__*/_wrapRegExp(/@\s*unroll\s+?for\s*\(\s*(int|)\s*(\w+)\s*=\s*(\x2D?\d+|\w+)\s*;\s*\2\s*(<=?)\s*(\x2D?\d+|\w+)\s*;\s*\2\s*\+=\s*(\x2D?\d+)\s*\)\s*\{\s*([\s\S]+?)\s*\}/g, {
  1777. counter: 2
  1778. })];
  1779. /**
  1780. * Custom preprocessor for the shaders
  1781. */
  1782. class ShaderPreprocessor {
  1783. /**
  1784. * Runs the preprocessor and generates GLSL code
  1785. * @param {ShaderPreprocessorConstants} defines user-provided preprocessor constants for this shader
  1786. * @param {string} infix annotated GLSL code
  1787. * @param {string} [prefix]
  1788. * @param {string} [suffix]
  1789. * @returns {string} preprocessed GLSL code
  1790. */
  1791. static generateGLSL(defines, infix, prefix = null, suffix = null) {
  1792. //
  1793. // The preprocessor will remove comments from GLSL code,
  1794. // include requested GLSL files and import global constants
  1795. // defined for all shaders (see above)
  1796. //
  1797. const errors = []; // compile-time errors
  1798. const constants = generateConstants(defines);
  1799. const annotatedGLSL = generateUnprocessedGLSL(defines, infix, prefix, suffix);
  1800. return unrollLoops(annotatedGLSL.replace(commentsRegex[0], '').replace(commentsRegex[1], '').replace(constantRegex, (_, name) => String(
  1801. // Replace preprocessor @CONSTANTS@ by their numeric values
  1802. constants.has(name) ? Number(constants.get(name)) : (errors.push(`Undefined constant ${name}`), 0))).replace(includeRegex, (_, filename) =>
  1803. // Included files may include other files.
  1804. // XXX no cycle detection!
  1805. ShaderPreprocessor.generateGLSL(defines, readfileSync(filename))), defines) + errors.map(msg => `\n#error ${msg}\n`).join('');
  1806. }
  1807. }
  1808. /**
  1809. * Generate GLSL code based on the input arguments
  1810. * @param {ShaderPreprocessorConstants} defines
  1811. * @param {string} infix
  1812. * @param {string} [prefix]
  1813. * @param {string} [suffix]
  1814. * @returns {string} GLSL code
  1815. */
  1816. function generateUnprocessedGLSL(defines, infix, prefix = null, suffix = null) {
  1817. const parts = [];
  1818. if (prefix !== null) parts.push(prefix);
  1819. for (const [key, value] of defines) parts.push(`#define ${key} ${Number(value)}`);
  1820. parts.push(infix);
  1821. if (suffix !== null) parts.push(suffix);
  1822. return parts.join('\n');
  1823. }
  1824. /**
  1825. * Generate pre-processor constants. Constants provided by the
  1826. * user have higher priority than globally available constants.
  1827. * @param {ShaderPreprocessorConstants} defines user-provided
  1828. * @returns {ShaderPreprocessorConstants}
  1829. */
  1830. function generateConstants(defines) {
  1831. utils/* Utils */.A.assert(speedy_gl/* SpeedyGL */.c.isInitialized());
  1832. const myConstants = /** @type {ShaderPreprocessorConstants} */new Map();
  1833. const globalConstants = Object.assign(Object.create(null), basicConstants, platformConstants(utils/* Utils */.A.platformString(), speedy_gl/* SpeedyGL */.c.instance.renderer));
  1834. // globally available constants have lower priority
  1835. for (const key in globalConstants) {
  1836. //if(Object.prototype.hasOwnProperty.call(globalConstants, key))
  1837. myConstants.set(key, globalConstants[key]);
  1838. }
  1839. // user-defined constants have higher priority
  1840. for (const [key, value] of defines) myConstants.set(key, value);
  1841. // done!
  1842. return myConstants;
  1843. }
  1844. /**
  1845. * Reads a shader from the shaders/include/ folder
  1846. * @param {string} filename
  1847. * @returns {string}
  1848. */
  1849. function readfileSync(filename) {
  1850. if (String(filename).match(/^[a-zA-Z0-9_-]+\.glsl$/)) return __nested_webpack_require_61794__(5235)("./" + filename);
  1851. throw new errors/* FileNotFoundError */.kG(`Shader preprocessor: can't read file "${filename}"`);
  1852. }
  1853. /**
  1854. * Unroll for loops in our own preprocessor
  1855. * @param {string} code
  1856. * @param {ShaderPreprocessorConstants} defines
  1857. * @returns {string}
  1858. */
  1859. function unrollLoops(code, defines) {
  1860. //
  1861. // Currently, only integer for loops with positive step values
  1862. // can be unrolled. (TODO: negative step values?)
  1863. //
  1864. // The current implementation does not support curly braces
  1865. // inside unrolled loops. You may define macros to get around
  1866. // this, but do you actually need to unroll such loops?
  1867. //
  1868. // Loops that don't fit the supported pattern will crash
  1869. // the preprocessor if you try to unroll them.
  1870. //
  1871. const fn = unroll.bind(defines); // CRAZY!
  1872. const n = unrollRegex.length;
  1873. for (let i = 0; i < n; i++) code = code.replace(unrollRegex[i], fn);
  1874. return code;
  1875. }
  1876. /**
  1877. * Unroll a loop pattern (regexp)
  1878. * @param {string} match the matched for loop
  1879. * @param {string} type
  1880. * @param {string} counter
  1881. * @param {string} start
  1882. * @param {string} cmp
  1883. * @param {string} end
  1884. * @param {string} step
  1885. * @param {string} loopcode
  1886. * @returns {string} unrolled loop
  1887. */
  1888. function unroll(match, type, counter, start, cmp, end, step, loopcode) {
  1889. const defines = /** @type {ShaderPreprocessorConstants} */this;
  1890. // check if the loop limits are numeric constants or #defined numbers from the outside
  1891. const hasStart = Number.isFinite(+start) || defines.has(start);
  1892. const hasEnd = Number.isFinite(+end) || defines.has(end);
  1893. if (!hasStart || !hasEnd) {
  1894. if (defines.size > 0) throw new errors/* ParseError */.mB(`Can't unroll loop: unknown limits (start=${start}, end=${end}). Code:\n\n${match}`);else return match; // don't unroll now, because defines is empty - maybe we'll succeed in the next pass
  1895. }
  1896. // parse and validate limits & step
  1897. let istart = defines.has(start) ? defines.get(start) : parseInt(start);
  1898. let iend = defines.has(end) ? defines.get(end) : parseInt(end);
  1899. let istep = step.length == 0 ? 1 : parseInt(step);
  1900. utils/* Utils */.A.assert(istart <= iend && istep > 0);
  1901. /*
  1902. // debug
  1903. console.log(`Encontrei "${match}"`);
  1904. console.log(`type="${type}"`);
  1905. console.log(`counter="${counter}"`);
  1906. console.log(`start="${start}"`);
  1907. console.log(`cmp="${cmp}"`);
  1908. console.log(`end="${end}"`);
  1909. console.log(`step="${step}"`);
  1910. console.log(`loopcode="${loopcode}"`)
  1911. console.log('Defines:', defines);
  1912. */
  1913. // continue statements are not supported inside unrolled loops
  1914. // and will generate a compiler error. Using break is ok.
  1915. const hasBreak = loopcode.match(/\bbreak\s*;/) !== null;
  1916. // create a new scope
  1917. let unrolledCode = hasBreak ? 'switch(1) { default:\n' : '{\n';
  1918. // declare counter
  1919. unrolledCode += `${type} ${counter};\n`;
  1920. // unroll loop
  1921. iend += cmp == '<=' ? 1 : 0;
  1922. for (let i = istart; i < iend; i += istep) unrolledCode += `{\n${counter} = ${i};\n${loopcode}\n}\n`;
  1923. // close scope
  1924. unrolledCode += '}\n';
  1925. //console.log('Unrolled code:\n\n' + unrolledCode);
  1926. // done!
  1927. return unrolledCode;
  1928. }
  1929. ;// CONCATENATED MODULE: ./src/gpu/shader-declaration.js
  1930. /*
  1931. * speedy-vision.js
  1932. * GPU-accelerated Computer Vision for JavaScript
  1933. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  1934. *
  1935. * Licensed under the Apache License, Version 2.0 (the "License");
  1936. * you may not use this file except in compliance with the License.
  1937. * You may obtain a copy of the License at
  1938. *
  1939. * http://www.apache.org/licenses/LICENSE-2.0
  1940. *
  1941. * Unless required by applicable law or agreed to in writing, software
  1942. * distributed under the License is distributed on an "AS IS" BASIS,
  1943. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  1944. * See the License for the specific language governing permissions and
  1945. * limitations under the License.
  1946. *
  1947. * shader-declaration.js
  1948. * Encapsulates a shader declaration
  1949. */
  1950. const DEFAULT_ATTRIBUTES = Object.freeze({
  1951. position: 'a_position',
  1952. texCoord: 'a_texCoord'
  1953. });
  1954. const DEFAULT_ATTRIBUTES_LOCATION = Object.freeze({
  1955. position: 0,
  1956. // use location 0; see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  1957. texCoord: 1
  1958. });
  1959. const DEFAULT_VERTEX_SHADER_PREFIX = `#version 300 es
  1960. precision highp float;
  1961. precision highp int;
  1962. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.position}) in vec2 ${DEFAULT_ATTRIBUTES.position};
  1963. layout (location=${DEFAULT_ATTRIBUTES_LOCATION.texCoord}) in vec2 ${DEFAULT_ATTRIBUTES.texCoord};
  1964. out highp vec2 texCoord;
  1965. uniform highp vec2 texSize;
  1966. #define vsinit() \
  1967. gl_Position = vec4(${DEFAULT_ATTRIBUTES.position}, 0.0f, 1.0f); \
  1968. texCoord = ${DEFAULT_ATTRIBUTES.texCoord};
  1969. \n\n`;
  1970. const DEFAULT_VERTEX_SHADER = `#define vsmain() ;`;
  1971. const DEFAULT_VERTEX_SHADER_SUFFIX = `\n\nvoid main() { vsinit(); vsmain(); }\n`;
  1972. const DEFAULT_FRAGMENT_SHADER_PREFIX = `#version 300 es
  1973. #if @FS_USE_CUSTOM_PRECISION@ == 0
  1974. precision mediump float; // ~float16
  1975. precision mediump sampler2D;
  1976. precision highp int; // int32
  1977. #endif
  1978. #if @FS_OUTPUT_TYPE@ == 0
  1979. #define OUT_TYPE mediump vec4
  1980. #elif @FS_OUTPUT_TYPE@ == 1
  1981. #define OUT_TYPE mediump ivec4
  1982. #elif @FS_OUTPUT_TYPE@ == 2
  1983. #define OUT_TYPE mediump uvec4
  1984. #else
  1985. #error Unknown FS_OUTPUT_TYPE
  1986. #endif
  1987. out OUT_TYPE color;
  1988. in highp vec2 texCoord;
  1989. uniform highp vec2 texSize;
  1990. @include "global.glsl"\n\n`;
  1991. const PRIVATE_TOKEN = Symbol();
  1992. /** @typedef {string} ShaderDeclarationUnprocessedGLSL */
  1993. /** @typedef {string[]} ShaderDeclarationArgumentList */
  1994. /** @typedef {Map<string,string>} ShaderDeclarationUniformTypes */
  1995. /** @typedef {Map<string,number>} ShaderDeclarationPreprocessorConstants */
  1996. /**
  1997. * Shader Declaration
  1998. * @abstract
  1999. */
  2000. class ShaderDeclaration {
  2001. /**
  2002. * @private Constructor
  2003. * @param {Symbol} privateToken
  2004. * @param {ShaderDeclarationArgumentList} argumentList
  2005. * @param {ShaderDeclarationPreprocessorConstants} defines
  2006. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2007. * @param {ShaderDeclarationUnprocessedGLSL} vsSource unprocessed GLSL code of the vertex shader
  2008. */
  2009. constructor(privateToken, argumentList, defines, fsSource, vsSource) {
  2010. // private constructor!
  2011. if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er();
  2012. /** @type {ShaderDeclarationArgumentList} an ordered list of uniform names */
  2013. this._arguments = [...argumentList];
  2014. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2015. this._defines = new Map(defines);
  2016. /** @type {string} preprocessed source code of the fragment shader */
  2017. this._fragmentSource = ShaderPreprocessor.generateGLSL(this._defines, fsSource, DEFAULT_FRAGMENT_SHADER_PREFIX);
  2018. /** @type {string} preprocessed source code of the vertex shader */
  2019. this._vertexSource = ShaderPreprocessor.generateGLSL(this._defines, vsSource, DEFAULT_VERTEX_SHADER_PREFIX, DEFAULT_VERTEX_SHADER_SUFFIX);
  2020. /** @type {ShaderDeclarationUniformTypes} it maps uniform names to their types */
  2021. this._uniforms = this._autodetectUniforms(this._fragmentSource + '\n' + this._vertexSource);
  2022. // validate arguments
  2023. this._validateArguments(this._arguments, this._uniforms);
  2024. }
  2025. /**
  2026. * Return the preprocessed GLSL source code of the fragment shader
  2027. * @returns {string}
  2028. */
  2029. get fragmentSource() {
  2030. return this._fragmentSource;
  2031. }
  2032. /**
  2033. * Return the preprocessed GLSL source code of the vertex shader
  2034. * @returns {string}
  2035. */
  2036. get vertexSource() {
  2037. return this._vertexSource;
  2038. }
  2039. /**
  2040. * Get the names of the vertex shader attributes
  2041. * @returns {typeof DEFAULT_ATTRIBUTES}
  2042. */
  2043. get attributes() {
  2044. return DEFAULT_ATTRIBUTES;
  2045. }
  2046. /**
  2047. * Get the pre-defined locations of the vertex shader attributes
  2048. * @returns {typeof DEFAULT_ATTRIBUTES_LOCATION}
  2049. */
  2050. get locationOfAttributes() {
  2051. return DEFAULT_ATTRIBUTES_LOCATION;
  2052. }
  2053. /**
  2054. * Names of the arguments that will be passed to the Shader,
  2055. * corresponding to GLSL uniforms, in the order they will be passed
  2056. * @returns {string[]}
  2057. */
  2058. get arguments() {
  2059. return [].concat(this._arguments);
  2060. }
  2061. /**
  2062. * Names of the uniforms declared in the shader
  2063. * @returns {string[]}
  2064. */
  2065. get uniforms() {
  2066. return Array.from(this._uniforms.keys());
  2067. }
  2068. /**
  2069. * The GLSL type of a uniform variable declared in the shader
  2070. * @param {string} name
  2071. * @returns {string}
  2072. */
  2073. uniformType(name) {
  2074. if (!this._uniforms.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized uniform variable: "${name}"`);
  2075. return this._uniforms.get(name);
  2076. }
  2077. /**
  2078. * The value of an externally defined constant, i.e., via withDefines()
  2079. * @param {string} name
  2080. * @returns {number}
  2081. */
  2082. definedConstant(name) {
  2083. if (!this._defines.has(name)) throw new errors/* IllegalArgumentError */.qw(`Unrecognized externally defined constant: "${name}"`);
  2084. return this._defines.get(name);
  2085. }
  2086. /**
  2087. * Parses a GLSL source and detects the uniform variables,
  2088. * as well as their types
  2089. * @param {string} preprocessedSource
  2090. * @returns {ShaderDeclarationUniformTypes} specifies the types of all uniforms
  2091. */
  2092. _autodetectUniforms(preprocessedSource) {
  2093. const sourceWithoutComments = preprocessedSource; // assume we've preprocessed the source already
  2094. const regex = /^\s*uniform\s+(highp\s+|mediump\s+|lowp\s+)?(\w+)\s+([^;]+)/gm;
  2095. const uniforms = /** @type {ShaderDeclarationUniformTypes} */new Map();
  2096. let match;
  2097. while ((match = regex.exec(sourceWithoutComments)) !== null) {
  2098. const type = match[2];
  2099. const names = match[3].split(',').map(name => name.trim()).filter(name => name); // trim & remove empty names
  2100. for (const name of names) {
  2101. if (name.endsWith(']')) {
  2102. // is it an array?
  2103. if (!(match = name.match(/(\w+)\s*\[\s*(\d+)\s*\]$/))) throw new errors/* ParseError */.mB(`Unspecified array length for uniform "${name}" in the shader`);
  2104. // read array name & size
  2105. const [array, size] = [match[1], Number(match[2])];
  2106. // register uniforms
  2107. for (let i = 0; i < size; i++) uniforms.set(`${array}[${i}]`, type);
  2108. } else {
  2109. // register a regular uniform
  2110. if (!uniforms.has(name) || uniforms.get(name) === type) uniforms.set(name, type);else throw new errors/* IllegalOperationError */.Er(`Redefinition of uniform "${name}" in the shader`);
  2111. }
  2112. }
  2113. }
  2114. return uniforms;
  2115. }
  2116. /**
  2117. * Checks if all the arguments of the shader declaration are backed by a
  2118. * uniform variable in GLSL code
  2119. * @param {ShaderDeclarationArgumentList} argumentList
  2120. * @param {ShaderDeclarationUniformTypes} uniforms
  2121. * @throws {IllegalArgumentError}
  2122. */
  2123. _validateArguments(argumentList, uniforms) {
  2124. for (const argname of argumentList) {
  2125. if (!uniforms.has(argname)) {
  2126. if (!uniforms.has(argname + '[0]')) throw new errors/* IllegalArgumentError */.qw(`Argument "${argname}" has not been declared in the shader`);
  2127. }
  2128. }
  2129. }
  2130. }
  2131. /**
  2132. * A ShaderDeclaration that has its GLSL code stored in-memory
  2133. */
  2134. class MemoryShaderDeclaration extends ShaderDeclaration {
  2135. /**
  2136. * @private Constructor
  2137. * @param {Symbol} privateToken
  2138. * @param {ShaderDeclarationArgumentList} argumentList
  2139. * @param {ShaderDeclarationPreprocessorConstants} defines
  2140. * @param {ShaderDeclarationUnprocessedGLSL} fsSource unprocessed GLSL code of the fragment shader
  2141. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource] unprocessed GLSL code of the vertex shader
  2142. */
  2143. constructor(privateToken, argumentList, defines, fsSource, vsSource = DEFAULT_VERTEX_SHADER) {
  2144. super(privateToken, argumentList, defines, fsSource, vsSource);
  2145. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the fragment shader */
  2146. this._fsUnprocessedSource = String(fsSource);
  2147. /** @type {ShaderDeclarationUnprocessedGLSL} unprocessed GLSL code of the vertex shader */
  2148. this._vsUnprocessedSource = String(vsSource);
  2149. }
  2150. }
  2151. /**
  2152. * A ShaderDeclaration that has its GLSL code stored in a file
  2153. */
  2154. class FileShaderDeclaration extends ShaderDeclaration {
  2155. /**
  2156. * @private Constructor
  2157. * @param {Symbol} privateToken
  2158. * @param {ShaderDeclarationArgumentList} argumentList
  2159. * @param {ShaderDeclarationPreprocessorConstants} defines
  2160. * @param {string} fsFilepath path to the file of the unprocessed GLSL code of the fragment shader
  2161. * @param {string} [vsFilepath] path to the file of the unprocessed GLSL code of the vertex shader
  2162. */
  2163. constructor(privateToken, argumentList, defines, fsFilepath, vsFilepath = '') {
  2164. // validate paths
  2165. if (!String(fsFilepath).match(/^[a-zA-Z0-9_\-/]+\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import fragment shader at "${fsFilepath}"`);else if (vsFilepath != '' && !String(vsFilepath).match(/^[a-zA-Z0-9_\-/]+\.vs\.glsl$/)) throw new errors/* FileNotFoundError */.kG(`Can't import vertex shader at "${vsFilepath}"`);
  2166. // import files
  2167. const fsSource = __nested_webpack_require_61794__(4606)("./" + String(fsFilepath));
  2168. const vsSource = vsFilepath != '' ? __nested_webpack_require_61794__(4606)("./" + String(vsFilepath)) : DEFAULT_VERTEX_SHADER;
  2169. // super class
  2170. super(privateToken, argumentList, defines, fsSource, vsSource);
  2171. /** @type {string} filepath of the fragment shader */
  2172. this._fsFilepath = String(fsFilepath);
  2173. /** @type {string} filepath of the vertex shader */
  2174. this._vsFilepath = String(vsFilepath);
  2175. }
  2176. /**
  2177. * Return the preprocessed GLSL source code of the fragment shader
  2178. * @returns {string}
  2179. */
  2180. get fragmentSource() {
  2181. // we override this method to include the filepath. The motivation
  2182. // is to easily identify the file when debugging compiling errors.
  2183. return this._addHeader('// File: ' + this._fsFilepath, super.fragmentSource);
  2184. }
  2185. /**
  2186. * Return the preprocessed GLSL source code of the vertex shader
  2187. * @returns {string}
  2188. */
  2189. get vertexSource() {
  2190. // we override this method to include the filepath. The motivation
  2191. // is to easily identify the file when debugging compiling errors.
  2192. return this._addHeader('// File: ' + (this._vsFilepath != '' ? this._vsFilepath : '(default-vs) ' + this._fsFilepath), super.vertexSource);
  2193. }
  2194. /**
  2195. * Add a header to a GLSL code
  2196. * @param {string} header code to be added
  2197. * @param {string} src pre-processed GLSL code
  2198. * @returns {string} src with an added header
  2199. */
  2200. _addHeader(header, src) {
  2201. utils/* Utils */.A.assert(header.startsWith('//') && !header.includes('\n'));
  2202. const j = src.indexOf('\n');
  2203. const versionDirective = src.substr(0, j);
  2204. const body = src.substr(j);
  2205. utils/* Utils */.A.assert(versionDirective.startsWith('#version '));
  2206. const head = versionDirective + '\n' + header;
  2207. return head + body;
  2208. }
  2209. }
  2210. /**
  2211. * A builder of a ShaderDeclaration
  2212. * @abstract
  2213. */
  2214. class ShaderDeclarationBuilder {
  2215. /**
  2216. * @private Constructor
  2217. * @param {Symbol} privateToken
  2218. */
  2219. constructor(privateToken) {
  2220. if (privateToken !== PRIVATE_TOKEN) throw new errors/* IllegalOperationError */.Er(); // private constructor!
  2221. /** @type {string[]} ordered list of uniform names */
  2222. this._arguments = [];
  2223. /** @type {ShaderDeclarationPreprocessorConstants} externally #defined pre-processor constants */
  2224. this._defines = new Map();
  2225. }
  2226. /**
  2227. * Specify the list & order of arguments to be
  2228. * passed to the shader
  2229. * @param {string[]} args argument names
  2230. * @returns {this}
  2231. */
  2232. withArguments(...args) {
  2233. // the list of arguments may be declared only once
  2234. if (this._arguments.length > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of shader arguments`);
  2235. // get arguments
  2236. for (let j = 0; j < args.length; j++) this._arguments.push(String(args[j]));
  2237. // done!
  2238. return this;
  2239. }
  2240. /**
  2241. * Specify a set of #defines to be prepended to the shader
  2242. * @param {Object<string,number>} defines key-value pairs
  2243. * @returns {this}
  2244. */
  2245. withDefines(defines) {
  2246. // the list of #defines may be defined only once
  2247. if (this._defines.size > 0) throw new errors/* IllegalOperationError */.Er(`Redefinition of externally defined constants of a shader`);
  2248. // store and write the #defines
  2249. const keys = Object.keys(defines);
  2250. for (const key of keys) {
  2251. const value = Number(defines[key]); // force numeric values (just in case)
  2252. this._defines.set(key, value);
  2253. }
  2254. // done!
  2255. return this;
  2256. }
  2257. /**
  2258. * Build a ShaderDeclaration
  2259. * @returns {ShaderDeclaration}
  2260. */
  2261. build() {
  2262. throw new errors/* AbstractMethodError */.aQ();
  2263. }
  2264. }
  2265. /**
  2266. * A builder of a MemoryShaderDeclaration
  2267. */
  2268. class MemoryShaderDeclarationBuilder extends ShaderDeclarationBuilder {
  2269. /**
  2270. * @private Constructor
  2271. * @param {Symbol} privateToken
  2272. * @param {ShaderDeclarationUnprocessedGLSL} fsSource
  2273. * @param {ShaderDeclarationUnprocessedGLSL} [vsSource]
  2274. */
  2275. constructor(privateToken, fsSource, vsSource) {
  2276. super(privateToken);
  2277. /** @type {ShaderDeclarationUnprocessedGLSL} the unprocessed GLSL code of the fragment shader */
  2278. this._fsSource = String(fsSource);
  2279. /** @type {ShaderDeclarationUnprocessedGLSL|undefined} the unprocessed GLSL code of the vertex shader */
  2280. this._vsSource = vsSource !== undefined ? String(vsSource) : undefined;
  2281. }
  2282. /**
  2283. * Build a MemoryShaderDeclaration
  2284. * @returns {ShaderDeclaration}
  2285. */
  2286. build() {
  2287. return new MemoryShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsSource, this._vsSource);
  2288. }
  2289. }
  2290. /**
  2291. * A builder of a FileShaderDeclaration
  2292. */
  2293. class FileShaderDeclarationBuilder extends ShaderDeclarationBuilder {
  2294. /**
  2295. * @private Constructor
  2296. * @param {Symbol} privateToken
  2297. * @param {string} fsFilepath
  2298. * @param {string} [vsFilepath]
  2299. */
  2300. constructor(privateToken, fsFilepath, vsFilepath) {
  2301. super(privateToken);
  2302. /** @type {string} path to the unprocessed GLSL code of the fragment shader */
  2303. this._fsFilepath = String(fsFilepath);
  2304. /** @type {string|undefined} path to the unprocessed GLSL code of the vertex shader */
  2305. this._vsFilepath = vsFilepath !== undefined ? String(vsFilepath) : undefined;
  2306. }
  2307. /**
  2308. * Build a FileShaderDeclaration
  2309. * @returns {ShaderDeclaration}
  2310. */
  2311. build() {
  2312. return new FileShaderDeclaration(PRIVATE_TOKEN, this._arguments, this._defines, this._fsFilepath, this._vsFilepath);
  2313. }
  2314. }
  2315. /**
  2316. * Import a ShaderDeclaration from a GLSL file
  2317. * @param {string} filepath relative to the shaders/ folder (a .glsl file)
  2318. * @param {string} [vsfilepath] optional vertex shader (a .vs.glsl file)
  2319. * @returns {ShaderDeclaration}
  2320. */
  2321. function importShader(filepath, vsfilepath = undefined) {
  2322. return new FileShaderDeclarationBuilder(PRIVATE_TOKEN, filepath, vsfilepath);
  2323. }
  2324. /**
  2325. * Create a ShaderDeclaration from a GLSL source code
  2326. * @param {string} source fragment shader
  2327. * @param {string} [vssource] optional vertex shader
  2328. * @returns {ShaderDeclaration}
  2329. */
  2330. function createShader(source, vssource = undefined) {
  2331. return new MemoryShaderDeclarationBuilder(PRIVATE_TOKEN, source, vssource);
  2332. }
  2333. /***/ }),
  2334. /***/ 1672:
  2335. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_89681__) => {
  2336. "use strict";
  2337. __nested_webpack_require_89681__.r(__nested_webpack_exports__);
  2338. /* harmony export */ __nested_webpack_require_89681__.d(__nested_webpack_exports__, {
  2339. /* harmony export */ conv2D: () => (/* binding */ conv2D),
  2340. /* harmony export */ convX: () => (/* binding */ convX),
  2341. /* harmony export */ convY: () => (/* binding */ convY)
  2342. /* harmony export */ });
  2343. /* harmony import */ var _shader_declaration__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_89681__(9420);
  2344. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_89681__(9037);
  2345. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_89681__(8581);
  2346. /*
  2347. * speedy-vision.js
  2348. * GPU-accelerated Computer Vision for JavaScript
  2349. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2350. *
  2351. * Licensed under the Apache License, Version 2.0 (the "License");
  2352. * you may not use this file except in compliance with the License.
  2353. * You may obtain a copy of the License at
  2354. *
  2355. * http://www.apache.org/licenses/LICENSE-2.0
  2356. *
  2357. * Unless required by applicable law or agreed to in writing, software
  2358. * distributed under the License is distributed on an "AS IS" BASIS,
  2359. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2360. * See the License for the specific language governing permissions and
  2361. * limitations under the License.
  2362. *
  2363. * convolution.js
  2364. * Convolution shader generators
  2365. */
  2366. /**
  2367. * Generate a 2D convolution with a square kernel
  2368. * @param {number[]} kernel convolution kernel
  2369. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2370. * @returns {ShaderDeclarationBuilder}
  2371. */
  2372. function conv2D(kernel, normalizationConstant = 1.0) {
  2373. const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
  2374. const kSize = Math.sqrt(kernel32.length) | 0;
  2375. const N = kSize >> 1; // idiv 2
  2376. // validate input
  2377. if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 2D convolution with an invalid kSize of ${kSize}`);else if (kSize * kSize != kernel32.length) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Invalid 2D convolution kernel of ${kernel32.length} elements (expected: square)`);
  2378. // select the appropriate pixel function
  2379. const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2380. // code generator
  2381. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.cartesian(_utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N), _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N)).map(cur => fn(kernel32[(cur[0] + N) * kSize + (cur[1] + N)], cur[0], cur[1])).join('\n');
  2382. const generateCode = (k, dy, dx) => `
  2383. result += ${pixelAtOffset}(image, ivec2(${-dx | 0}, ${-dy | 0})) * float(${+k});
  2384. `;
  2385. // shader
  2386. const source = `
  2387. uniform sampler2D image;
  2388. void main()
  2389. {
  2390. float alpha = threadPixel(image).a;
  2391. vec4 result = vec4(0.0f);
  2392. ${foreachKernelElement(generateCode)}
  2393. color = vec4(result.rgb, alpha);
  2394. }
  2395. `;
  2396. // done!
  2397. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2398. }
  2399. /**
  2400. * Generate a 1D convolution function on the x-axis
  2401. * @param {number[]} kernel convolution kernel
  2402. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2403. * @returns {ShaderDeclarationBuilder}
  2404. */
  2405. function convX(kernel, normalizationConstant = 1.0) {
  2406. return conv1D('x', kernel, normalizationConstant);
  2407. }
  2408. /**
  2409. * Generate a 1D convolution function on the y-axis
  2410. * @param {number[]} kernel convolution kernel
  2411. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2412. * @returns {ShaderDeclarationBuilder}
  2413. */
  2414. function convY(kernel, normalizationConstant = 1.0) {
  2415. return conv1D('y', kernel, normalizationConstant);
  2416. }
  2417. /**
  2418. * 1D convolution function generator
  2419. * @param {string} axis either "x" or "y"
  2420. * @param {number[]} kernel convolution kernel
  2421. * @param {number} [normalizationConstant] will be multiplied by all kernel entries
  2422. * @returns {ShaderDeclarationBuilder}
  2423. */
  2424. function conv1D(axis, kernel, normalizationConstant = 1.0) {
  2425. const kernel32 = new Float32Array(kernel.map(x => +x * +normalizationConstant));
  2426. const kSize = kernel32.length;
  2427. const N = kSize >> 1; // idiv 2
  2428. // validate input
  2429. if (kSize < 1 || kSize % 2 == 0) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform a 1D convolution with an invalid kSize of ${kSize}`);else if (axis != 'x' && axis != 'y') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_2__/* .IllegalArgumentError */ .qw(`Can't perform 1D convolution: invalid axis "${axis}"`); // this should never happen
  2430. // select the appropriate pixel function
  2431. const pixelAtOffset = N <= 7 ? 'pixelAtShortOffset' : 'pixelAtLongOffset';
  2432. // code generator
  2433. const foreachKernelElement = fn => _utils_utils__WEBPACK_IMPORTED_MODULE_1__/* .Utils */ .A.symmetricRange(N).reduce((acc, cur) => acc + fn(kernel32[cur + N], cur), '');
  2434. const generateCode = (k, i) => axis == 'x' ? `
  2435. pixel += ${pixelAtOffset}(image, ivec2(${-i | 0}, 0)) * float(${+k});
  2436. ` : `
  2437. pixel += ${pixelAtOffset}(image, ivec2(0, ${-i | 0})) * float(${+k});
  2438. `;
  2439. // shader
  2440. const source = `
  2441. uniform sampler2D image;
  2442. void main()
  2443. {
  2444. float alpha = threadPixel(image).a;
  2445. vec4 pixel = vec4(0.0f);
  2446. ${foreachKernelElement(generateCode)}
  2447. color = vec4(pixel.rgb, alpha);
  2448. }
  2449. `;
  2450. // done!
  2451. return (0,_shader_declaration__WEBPACK_IMPORTED_MODULE_0__/* .createShader */ .gx)(source).withArguments('image');
  2452. }
  2453. /***/ }),
  2454. /***/ 1001:
  2455. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_95546__) => {
  2456. "use strict";
  2457. /* harmony export */ __nested_webpack_require_95546__.d(__nested_webpack_exports__, {
  2458. /* harmony export */ c: () => (/* binding */ SpeedyGL)
  2459. /* harmony export */ });
  2460. /* harmony import */ var _utils_utils__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_95546__(9037);
  2461. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_95546__(2199);
  2462. /* harmony import */ var _utils_observable__WEBPACK_IMPORTED_MODULE_4__ = __nested_webpack_require_95546__(3211);
  2463. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_95546__(9192);
  2464. /* harmony import */ var _utils_errors__WEBPACK_IMPORTED_MODULE_3__ = __nested_webpack_require_95546__(8581);
  2465. /*
  2466. * speedy-vision.js
  2467. * GPU-accelerated Computer Vision for JavaScript
  2468. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2469. *
  2470. * Licensed under the Apache License, Version 2.0 (the "License");
  2471. * you may not use this file except in compliance with the License.
  2472. * You may obtain a copy of the License at
  2473. *
  2474. * http://www.apache.org/licenses/LICENSE-2.0
  2475. *
  2476. * Unless required by applicable law or agreed to in writing, software
  2477. * distributed under the License is distributed on an "AS IS" BASIS,
  2478. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2479. * See the License for the specific language governing permissions and
  2480. * limitations under the License.
  2481. *
  2482. * speedy-gl.js
  2483. * A wrapper around the WebGL Rendering Context
  2484. */
  2485. /** @typedef {'default' | 'low-power' | 'high-performance'} PowerPreference */
  2486. // Constants
  2487. const SINGLETON_KEY = Symbol();
  2488. const DEFAULT_POWER_PREFERENCE = 'default';
  2489. //
  2490. // We use a small canvas to improve the performance
  2491. // of createImageBitmap() on Firefox.
  2492. //
  2493. // A large canvas (2048x2048) causes a FPS drop, even
  2494. // if we only extract a small region of it (this is
  2495. // unlike Chrome, which is fast).
  2496. //
  2497. // Note: we automatically increase the size of the
  2498. // canvas (as needed) when rendering to it.
  2499. //
  2500. const CANVAS_WIDTH = 16,
  2501. CANVAS_HEIGHT = 16;
  2502. /** @type {SpeedyGL} Singleton */
  2503. let instance = null;
  2504. /** @type {PowerPreference} power preference */
  2505. let powerPreference = DEFAULT_POWER_PREFERENCE;
  2506. /**
  2507. * A wrapper around a WebGL Rendering Context
  2508. */
  2509. class SpeedyGL extends _utils_observable__WEBPACK_IMPORTED_MODULE_4__/* .Observable */ .c {
  2510. /**
  2511. * Constructor
  2512. * @param {Symbol} key
  2513. * @private
  2514. */
  2515. constructor(key) {
  2516. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.assert(key === SINGLETON_KEY);
  2517. super();
  2518. /** @type {boolean} internal flag */
  2519. this._reinitializeOnContextLoss = true;
  2520. /** @type {HTMLCanvasElement} internal canvas */
  2521. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2522. /** @type {WebGL2RenderingContext} WebGL rendering context */
  2523. this._gl = this._createContext(this._canvas);
  2524. /** @type {string} vendor string of the video driver */
  2525. this._vendor = '';
  2526. /** @type {string} renderer string of the video driver */
  2527. this._renderer = '';
  2528. // read driver info
  2529. this._readDriverInfo();
  2530. // log driver info
  2531. if (_core_settings__WEBPACK_IMPORTED_MODULE_1__/* .Settings */ .w.logging === 'diagnostic') this._logDriverInfo();
  2532. }
  2533. /**
  2534. * Get Singleton
  2535. * @returns {SpeedyGL}
  2536. */
  2537. static get instance() {
  2538. return instance || (instance = new SpeedyGL(SINGLETON_KEY));
  2539. }
  2540. /**
  2541. * The WebGL Rendering Context
  2542. * Be careful not to cache this rendering context, as it may be lost!
  2543. * @returns {WebGL2RenderingContext}
  2544. */
  2545. get gl() {
  2546. return this._gl;
  2547. }
  2548. /**
  2549. * The internal canvas
  2550. * @returns {HTMLCanvasElement}
  2551. */
  2552. get canvas() {
  2553. return this._canvas;
  2554. }
  2555. /**
  2556. * Renderer string of the video driver
  2557. * @returns {string}
  2558. */
  2559. get renderer() {
  2560. return this._renderer;
  2561. }
  2562. /**
  2563. * Vendor string of the video driver
  2564. * @returns {string}
  2565. */
  2566. get vendor() {
  2567. return this._vendor;
  2568. }
  2569. /**
  2570. * Create a WebGL-capable canvas
  2571. * @param {Function} reinitialize to be called if we get a WebGL context loss event
  2572. * @returns {HTMLCanvasElement}
  2573. */
  2574. _createCanvas(reinitialize) {
  2575. const canvas = _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.createCanvas(CANVAS_WIDTH, CANVAS_HEIGHT);
  2576. canvas.addEventListener('webglcontextlost', ev => {
  2577. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Lost WebGL2 context`);
  2578. setTimeout(reinitialize, 0);
  2579. ev.preventDefault();
  2580. }, false);
  2581. /*canvas.addEventListener('webglcontextrestored', ev => {
  2582. Utils.warning(`Restored WebGL2 context`);
  2583. ev.preventDefault();
  2584. }, false);*/
  2585. return canvas;
  2586. }
  2587. /**
  2588. * Create a WebGL2 Rendering Context
  2589. * @param {HTMLCanvasElement} canvas
  2590. * @returns {WebGL2RenderingContext}
  2591. */
  2592. _createContext(canvas) {
  2593. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log(`Creating a ${powerPreference} WebGL2 rendering context...`);
  2594. // does the browser support WebGL2?
  2595. if (typeof WebGL2RenderingContext === 'undefined') throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`This application requires WebGL2. Please update your system.`);
  2596. const gl = canvas.getContext('webgl2', {
  2597. premultipliedAlpha: false,
  2598. preserveDrawingBuffer: false,
  2599. powerPreference: powerPreference,
  2600. alpha: true,
  2601. // see https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#avoid_alphafalse_which_can_be_expensive
  2602. antialias: false,
  2603. depth: false,
  2604. stencil: false,
  2605. desynchronized: true
  2606. });
  2607. if (!gl) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM(`Can't create a WebGL2 Rendering Context. Try a different browser!`);
  2608. return gl;
  2609. }
  2610. /**
  2611. * Reinitialize WebGL
  2612. */
  2613. _reinitialize() {
  2614. // disable reinitialization?
  2615. if (!this._reinitializeOnContextLoss) return;
  2616. // warning
  2617. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.warning(`Reinitializing WebGL2...`);
  2618. // create new canvas
  2619. this._canvas.remove();
  2620. this._canvas = this._createCanvas(this._reinitialize.bind(this));
  2621. // create new context
  2622. this._gl = this._createContext(this._canvas);
  2623. // is this needed?
  2624. this._readDriverInfo();
  2625. // notify observers: we have a new context!
  2626. // we need to recreate all textures...
  2627. this._notify();
  2628. }
  2629. /**
  2630. * Read debugging information about the video driver of the user
  2631. */
  2632. _readDriverInfo() {
  2633. // Depending on the privacy settings of the browser, this information
  2634. // may be unavailable. When available, it may not be entirely correct.
  2635. // See https://developer.mozilla.org/en-US/docs/Web/API/WEBGL_debug_renderer_info
  2636. const gl = this._gl;
  2637. let debugInfo = null;
  2638. if (navigator.userAgent.includes('Firefox')) {
  2639. this._vendor = ''; //gl.getParameter(gl.VENDOR); // not useful
  2640. this._renderer = gl.getParameter(gl.RENDERER); // only useful on Firefox, apparently
  2641. } else if (null != (debugInfo = gl.getExtension('WEBGL_debug_renderer_info'))) {
  2642. this._vendor = gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL);
  2643. this._renderer = gl.getParameter(debugInfo.UNMASKED_RENDERER_WEBGL);
  2644. } else {
  2645. this._vendor = ''; // unavailable information
  2646. this._renderer = '';
  2647. }
  2648. }
  2649. /**
  2650. * Log debugging information about the video driver and the platform
  2651. */
  2652. _logDriverInfo() {
  2653. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('Platform: ' + _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.platformString());
  2654. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL vendor: ' + this.vendor);
  2655. _utils_utils__WEBPACK_IMPORTED_MODULE_0__/* .Utils */ .A.log('GL renderer: ' + this.renderer);
  2656. }
  2657. /**
  2658. * Lose the WebGL context. This is used to manually
  2659. * free resources, and also for purposes of testing
  2660. * @returns {WEBGL_lose_context}
  2661. */
  2662. loseContext() {
  2663. const gl = this._gl;
  2664. // find the appropriate extension
  2665. const ext = gl.getExtension('WEBGL_lose_context');
  2666. if (!ext) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .NotSupportedError */ .EM('WEBGL_lose_context extension is unavailable');
  2667. // nothing to do?
  2668. if (gl.isContextLost()) return ext;
  2669. // disable reinitialization
  2670. this._reinitializeOnContextLoss = false;
  2671. // lose context
  2672. ext.loseContext();
  2673. // done!
  2674. return ext;
  2675. }
  2676. /**
  2677. * Lose & restore the WebGL context
  2678. * @param {number} [secondsToRestore]
  2679. * @return {SpeedyPromise<WEBGL_lose_context>} resolves as soon as the context is restored
  2680. */
  2681. loseAndRestoreContext(secondsToRestore = 1) {
  2682. const ms = Math.max(secondsToRestore, 0) * 1000;
  2683. const ext = this.loseContext();
  2684. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_2__/* .SpeedyPromise */ .i(resolve => {
  2685. setTimeout(() => {
  2686. //ext.restoreContext();
  2687. this._reinitializeOnContextLoss = true;
  2688. this._reinitialize();
  2689. setTimeout(() => resolve(ext), 0); // next frame
  2690. }, ms);
  2691. });
  2692. }
  2693. /**
  2694. * Power preference for the WebGL context
  2695. * @returns {PowerPreference}
  2696. */
  2697. static get powerPreference() {
  2698. return powerPreference;
  2699. }
  2700. /**
  2701. * Power preference for the WebGL context
  2702. * @param {PowerPreference} value
  2703. */
  2704. static set powerPreference(value) {
  2705. // validate
  2706. if (!(value === 'default' || value === 'low-power' || value === 'high-performance')) throw new _utils_errors__WEBPACK_IMPORTED_MODULE_3__/* .IllegalArgumentError */ .qw(`Invalid powerPreference: "${value}"`);
  2707. // the power preference should be set before we create the WebGL context
  2708. if (instance == null || powerPreference !== value) {
  2709. powerPreference = value;
  2710. // recreate the context if it already exists. Experimental.
  2711. if (instance != null) instance.loseAndRestoreContext();
  2712. }
  2713. }
  2714. /**
  2715. * Check if an instance of SpeedyGL has already been created
  2716. * @returns {boolean}
  2717. */
  2718. static isInitialized() {
  2719. return instance != null;
  2720. }
  2721. }
  2722. /***/ }),
  2723. /***/ 8581:
  2724. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_105678__) => {
  2725. "use strict";
  2726. /* harmony export */ __nested_webpack_require_105678__.d(__nested_webpack_exports__, {
  2727. /* harmony export */ EM: () => (/* binding */ NotSupportedError),
  2728. /* harmony export */ Er: () => (/* binding */ IllegalOperationError),
  2729. /* harmony export */ FJ: () => (/* binding */ ResourceNotLoadedError),
  2730. /* harmony export */ MU: () => (/* binding */ TimeoutError),
  2731. /* harmony export */ NO: () => (/* binding */ WebAssemblyError),
  2732. /* harmony export */ Uk: () => (/* binding */ AccessDeniedError),
  2733. /* harmony export */ aQ: () => (/* binding */ AbstractMethodError),
  2734. /* harmony export */ kG: () => (/* binding */ FileNotFoundError),
  2735. /* harmony export */ l: () => (/* binding */ OutOfMemoryError),
  2736. /* harmony export */ mB: () => (/* binding */ ParseError),
  2737. /* harmony export */ pf: () => (/* binding */ AssertionError),
  2738. /* harmony export */ qw: () => (/* binding */ IllegalArgumentError),
  2739. /* harmony export */ wB: () => (/* binding */ GLError),
  2740. /* harmony export */ xB: () => (/* binding */ SpeedyError)
  2741. /* harmony export */ });
  2742. /* unused harmony export NotImplementedError */
  2743. /*
  2744. * speedy-vision.js
  2745. * GPU-accelerated Computer Vision for JavaScript
  2746. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  2747. *
  2748. * Licensed under the Apache License, Version 2.0 (the "License");
  2749. * you may not use this file except in compliance with the License.
  2750. * You may obtain a copy of the License at
  2751. *
  2752. * http://www.apache.org/licenses/LICENSE-2.0
  2753. *
  2754. * Unless required by applicable law or agreed to in writing, software
  2755. * distributed under the License is distributed on an "AS IS" BASIS,
  2756. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  2757. * See the License for the specific language governing permissions and
  2758. * limitations under the License.
  2759. *
  2760. * errors.js
  2761. * Error classes
  2762. */
  2763. /** @typedef {SpeedyError|Error|null} SpeedyErrorCause */
  2764. /**
  2765. * Generic error class for Speedy
  2766. */
  2767. class SpeedyError extends Error {
  2768. /**
  2769. * Class constructor
  2770. * @param {string} message message text
  2771. * @param {SpeedyErrorCause} [cause] cause of the error
  2772. */
  2773. constructor(message, cause = null) {
  2774. super([message, cause ? cause.toString() : '[speedy-vision.js]'].join('\n-> '));
  2775. /** @type {SpeedyErrorCause} cause of the error */
  2776. this._cause = cause;
  2777. }
  2778. /**
  2779. * Error name
  2780. * @returns {string}
  2781. */
  2782. get name() {
  2783. return this.constructor.name;
  2784. }
  2785. /**
  2786. * Set error name (ignored)
  2787. * @param {string} _ ignored
  2788. */
  2789. set name(_) {
  2790. void 0;
  2791. }
  2792. /**
  2793. * Get the cause of the error. Available if
  2794. * it has been specified in the constructor
  2795. * @returns {SpeedyErrorCause}
  2796. */
  2797. get cause() {
  2798. return this._cause;
  2799. }
  2800. }
  2801. /**
  2802. * Unsupported operation error
  2803. * The requested operation is not supported
  2804. */
  2805. class NotSupportedError extends SpeedyError {
  2806. /**
  2807. * Class constructor
  2808. * @param {string} [message] additional text
  2809. * @param {SpeedyErrorCause} [cause] cause of the error
  2810. */
  2811. constructor(message = '', cause = null) {
  2812. super(`Unsupported operation. ${message}`, cause);
  2813. }
  2814. }
  2815. /**
  2816. * Not implemented error
  2817. * The called method is not implemented
  2818. */
  2819. class NotImplementedError extends SpeedyError {
  2820. /**
  2821. * Class constructor
  2822. * @param {string} [message] additional text
  2823. * @param {SpeedyErrorCause} [cause] cause of the error
  2824. */
  2825. constructor(message = '', cause = null) {
  2826. super(`Method not implemented. ${message}`, cause);
  2827. }
  2828. }
  2829. /**
  2830. * WebGL error
  2831. */
  2832. class GLError extends SpeedyError {
  2833. /**
  2834. * Class constructor
  2835. * @param {string} [message] additional text
  2836. * @param {SpeedyErrorCause} [cause] cause of the error
  2837. */
  2838. constructor(message = '', cause = null) {
  2839. super(`WebGL error. ${message}`, cause);
  2840. }
  2841. /**
  2842. * Get an error object describing the latest WebGL error
  2843. * @param {WebGL2RenderingContext} gl
  2844. * @returns {GLError}
  2845. */
  2846. static from(gl) {
  2847. const recognizedErrors = ['NO_ERROR', 'INVALID_ENUM', 'INVALID_VALUE', 'INVALID_OPERATION', 'INVALID_FRAMEBUFFER_OPERATION', 'OUT_OF_MEMORY', 'CONTEXT_LOST_WEBGL'];
  2848. const glError = gl.getError();
  2849. const message = recognizedErrors.find(error => gl[error] == glError) || 'Unknown';
  2850. return new GLError(message);
  2851. }
  2852. }
  2853. /**
  2854. * AbstractMethodError
  2855. * Thrown when one tries to call an abstract method
  2856. */
  2857. class AbstractMethodError extends SpeedyError {
  2858. /**
  2859. * Class constructor
  2860. * @param {string} [message] additional text
  2861. * @param {SpeedyErrorCause} [cause] cause of the error
  2862. */
  2863. constructor(message = '', cause = null) {
  2864. super(`Can't call abstract method. ${message}`, cause);
  2865. }
  2866. }
  2867. /**
  2868. * Illegal argument error
  2869. * A method has received one or more illegal arguments
  2870. */
  2871. class IllegalArgumentError extends SpeedyError {
  2872. /**
  2873. * Class constructor
  2874. * @param {string} [message] additional text
  2875. * @param {SpeedyErrorCause} [cause] cause of the error
  2876. */
  2877. constructor(message = '', cause = null) {
  2878. super(`Illegal argument. ${message}`, cause);
  2879. }
  2880. }
  2881. /**
  2882. * Illegal operation error
  2883. * The method arguments are valid, but the method can't
  2884. * be called due to the current the state of the object
  2885. */
  2886. class IllegalOperationError extends SpeedyError {
  2887. /**
  2888. * Class constructor
  2889. * @param {string} [message] additional text
  2890. * @param {SpeedyErrorCause} [cause] cause of the error
  2891. */
  2892. constructor(message = '', cause = null) {
  2893. super(`Illegal operation. ${message}`, cause);
  2894. }
  2895. }
  2896. /**
  2897. * Out of memory
  2898. */
  2899. class OutOfMemoryError extends SpeedyError {
  2900. /**
  2901. * Class constructor
  2902. * @param {string} [message] additional text
  2903. * @param {SpeedyErrorCause} [cause] cause of the error
  2904. */
  2905. constructor(message = '', cause = null) {
  2906. super(`Out of memory. ${message}`, cause);
  2907. }
  2908. }
  2909. /**
  2910. * File not found error
  2911. */
  2912. class FileNotFoundError extends SpeedyError {
  2913. /**
  2914. * Class constructor
  2915. * @param {string} [message] additional text
  2916. * @param {SpeedyErrorCause} [cause] cause of the error
  2917. */
  2918. constructor(message = '', cause = null) {
  2919. super(`File not found. ${message}`, cause);
  2920. }
  2921. }
  2922. /**
  2923. * Resource not loaded error
  2924. */
  2925. class ResourceNotLoadedError extends SpeedyError {
  2926. /**
  2927. * Class constructor
  2928. * @param {string} [message] additional text
  2929. * @param {SpeedyErrorCause} [cause] cause of the error
  2930. */
  2931. constructor(message = '', cause = null) {
  2932. super(`Resource not loaded. ${message}`, cause);
  2933. }
  2934. }
  2935. /**
  2936. * Timeout error
  2937. */
  2938. class TimeoutError extends SpeedyError {
  2939. /**
  2940. * Class constructor
  2941. * @param {string} [message] additional text
  2942. * @param {SpeedyErrorCause} [cause] cause of the error
  2943. */
  2944. constructor(message = '', cause = null) {
  2945. super(`Timeout error. ${message}`, cause);
  2946. }
  2947. }
  2948. /**
  2949. * Parse error
  2950. */
  2951. class ParseError extends SpeedyError {
  2952. /**
  2953. * Class constructor
  2954. * @param {string} [message] additional text
  2955. * @param {SpeedyErrorCause} [cause] cause of the error
  2956. */
  2957. constructor(message = '', cause = null) {
  2958. super(`Parse error. ${message}`, cause);
  2959. }
  2960. }
  2961. /**
  2962. * Assertion error
  2963. */
  2964. class AssertionError extends SpeedyError {
  2965. /**
  2966. * Class constructor
  2967. * @param {string} [message] additional text
  2968. * @param {SpeedyErrorCause} [cause] cause of the error
  2969. */
  2970. constructor(message = '', cause = null) {
  2971. super(`Assertion failed. ${message}`, cause);
  2972. }
  2973. }
  2974. /**
  2975. * Access denied
  2976. */
  2977. class AccessDeniedError extends SpeedyError {
  2978. /**
  2979. * Class constructor
  2980. * @param {string} [message] additional text
  2981. * @param {SpeedyErrorCause} [cause] cause of the error
  2982. */
  2983. constructor(message = '', cause = null) {
  2984. super(`Access denied. ${message}`, cause);
  2985. }
  2986. }
  2987. /**
  2988. * WebAssembly error
  2989. */
  2990. class WebAssemblyError extends SpeedyError {
  2991. /**
  2992. * Class constructor
  2993. * @param {string} [message] additional text
  2994. * @param {SpeedyErrorCause} [cause] cause of the error
  2995. */
  2996. constructor(message = '', cause = null) {
  2997. super(`WebAssembly error. ${message}`, cause);
  2998. }
  2999. }
  3000. /***/ }),
  3001. /***/ 3816:
  3002. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_113692__) => {
  3003. "use strict";
  3004. __nested_webpack_require_113692__.r(__nested_webpack_exports__);
  3005. /* harmony export */ __nested_webpack_require_113692__.d(__nested_webpack_exports__, {
  3006. /* harmony export */ DEFAULT_ENCODER_CAPACITY: () => (/* binding */ DEFAULT_ENCODER_CAPACITY),
  3007. /* harmony export */ FIX_BITS: () => (/* binding */ FIX_BITS),
  3008. /* harmony export */ FIX_RESOLUTION: () => (/* binding */ FIX_RESOLUTION),
  3009. /* harmony export */ LITTLE_ENDIAN: () => (/* binding */ LITTLE_ENDIAN),
  3010. /* harmony export */ LOG2_MAX_DESCRIPTOR_SIZE: () => (/* binding */ LOG2_MAX_DESCRIPTOR_SIZE),
  3011. /* harmony export */ LOG2_PYRAMID_MAX_SCALE: () => (/* binding */ LOG2_PYRAMID_MAX_SCALE),
  3012. /* harmony export */ MATCH_INDEX_BITS: () => (/* binding */ MATCH_INDEX_BITS),
  3013. /* harmony export */ MATCH_INDEX_MASK: () => (/* binding */ MATCH_INDEX_MASK),
  3014. /* harmony export */ MATCH_MAX_DISTANCE: () => (/* binding */ MATCH_MAX_DISTANCE),
  3015. /* harmony export */ MATCH_MAX_INDEX: () => (/* binding */ MATCH_MAX_INDEX),
  3016. /* harmony export */ MAX_DESCRIPTOR_SIZE: () => (/* binding */ MAX_DESCRIPTOR_SIZE),
  3017. /* harmony export */ MAX_ENCODER_CAPACITY: () => (/* binding */ MAX_ENCODER_CAPACITY),
  3018. /* harmony export */ MAX_TEXTURE_LENGTH: () => (/* binding */ MAX_TEXTURE_LENGTH),
  3019. /* harmony export */ MIN_ENCODER_LENGTH: () => (/* binding */ MIN_ENCODER_LENGTH),
  3020. /* harmony export */ MIN_KEYPOINT_SIZE: () => (/* binding */ MIN_KEYPOINT_SIZE),
  3021. /* harmony export */ PYRAMID_MAX_LEVELS: () => (/* binding */ PYRAMID_MAX_LEVELS),
  3022. /* harmony export */ PYRAMID_MAX_SCALE: () => (/* binding */ PYRAMID_MAX_SCALE)
  3023. /* harmony export */ });
  3024. /*
  3025. * speedy-vision.js
  3026. * GPU-accelerated Computer Vision for JavaScript
  3027. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3028. *
  3029. * Licensed under the Apache License, Version 2.0 (the "License");
  3030. * you may not use this file except in compliance with the License.
  3031. * You may obtain a copy of the License at
  3032. *
  3033. * http://www.apache.org/licenses/LICENSE-2.0
  3034. *
  3035. * Unless required by applicable law or agreed to in writing, software
  3036. * distributed under the License is distributed on an "AS IS" BASIS,
  3037. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3038. * See the License for the specific language governing permissions and
  3039. * limitations under the License.
  3040. *
  3041. * globals.js
  3042. * Global constants
  3043. */
  3044. // -----------------------------------------------------------------
  3045. // IMAGE PYRAMIDS & SCALE-SPACE
  3046. // -----------------------------------------------------------------
  3047. /** @type {number} The maximum number of levels in a pyramid, considering a scale factor of 2x between levels */
  3048. const PYRAMID_MAX_LEVELS = 8;
  3049. /** @type {number} The base-2 logarithm of PYRAMID_MAX_SCALE */
  3050. const LOG2_PYRAMID_MAX_SCALE = 0;
  3051. /** @type {number} The maximum supported scale for a pyramid level */
  3052. const PYRAMID_MAX_SCALE = 1 << LOG2_PYRAMID_MAX_SCALE;
  3053. // -----------------------------------------------------------------
  3054. // FIXED-POINT MATH
  3055. // -----------------------------------------------------------------
  3056. /** @type {number} How many bits do we use to store fractional data? */
  3057. const FIX_BITS = 3; // step size: 0.125 = 1/2^FIX_BITS
  3058. /** @type {number} Fixed-point resolution */
  3059. const FIX_RESOLUTION = 1 << FIX_BITS; // float(2^(FIX_BITS))
  3060. // -----------------------------------------------------------------
  3061. // TEXTURE LIMITS
  3062. // -----------------------------------------------------------------
  3063. /** @type {number} Maximum texture length (width, height) */
  3064. const MAX_TEXTURE_LENGTH = (1 << 16 - FIX_BITS) - 1; // must be 2^n - 1 due to keypoint encoding
  3065. // -----------------------------------------------------------------
  3066. // KEYPOINTS
  3067. // -----------------------------------------------------------------
  3068. /** @type {number} Size of a keypoint header, in bytes (must be divisible by 4) */
  3069. const MIN_KEYPOINT_SIZE = 8;
  3070. /** @type {number} Minimum length of a keypoint encoder, in pixels (encodes at least 1 keypoint) */
  3071. const MIN_ENCODER_LENGTH = 2; // capacity computations are based on this // Math.ceil(Math.sqrt(MIN_KEYPOINT_SIZE / 4));
  3072. /** @type {number} Maximum number of keypoints we can encode (the actual length of the encoder may vary) */
  3073. const MAX_ENCODER_CAPACITY = 8192;
  3074. /** @type {number} Default capacity of a keypoint encoder (64x64 texture with 2 pixels per keypoint) */
  3075. const DEFAULT_ENCODER_CAPACITY = 2048;
  3076. /** @type {number} log2 of MAX_DESCRIPTOR_SIZE */
  3077. const LOG2_MAX_DESCRIPTOR_SIZE = 6;
  3078. /** @type {number} maximum size of a keypoint descriptor, in bytes */
  3079. const MAX_DESCRIPTOR_SIZE = 1 << LOG2_MAX_DESCRIPTOR_SIZE;
  3080. /** @type {number} How many bits will we use when encoding the index of a keypoint match? */
  3081. const MATCH_INDEX_BITS = 32 - (LOG2_MAX_DESCRIPTOR_SIZE + 3); // 32 - log2(MAX_DESCRIPTOR_SIZE * 8)
  3082. /** @type {number} Bitwise mask to extract a keypoint index from an encoded match */
  3083. const MATCH_INDEX_MASK = (1 << MATCH_INDEX_BITS) - 1;
  3084. /** @type {number} Maximum size of the database of keypoints for matching */
  3085. const MATCH_MAX_INDEX = (1 << MATCH_INDEX_BITS) - 1;
  3086. /** @type {number} The maximum distance that can be stored in a match */
  3087. const MATCH_MAX_DISTANCE = (1 << 32 - MATCH_INDEX_BITS) - 1;
  3088. // -----------------------------------------------------------------
  3089. // MISC
  3090. // -----------------------------------------------------------------
  3091. /** @type {boolean} Are we in a little-endian machine? */
  3092. const LITTLE_ENDIAN = function () {
  3093. return 0xCAFE === new Uint16Array(new Uint8Array([0xFE, 0xCA]).buffer)[0];
  3094. }();
  3095. /***/ }),
  3096. /***/ 3211:
  3097. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_119275__) => {
  3098. "use strict";
  3099. /* harmony export */ __nested_webpack_require_119275__.d(__nested_webpack_exports__, {
  3100. /* harmony export */ c: () => (/* binding */ Observable)
  3101. /* harmony export */ });
  3102. /*
  3103. * speedy-vision.js
  3104. * GPU-accelerated Computer Vision for JavaScript
  3105. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3106. *
  3107. * Licensed under the Apache License, Version 2.0 (the "License");
  3108. * you may not use this file except in compliance with the License.
  3109. * You may obtain a copy of the License at
  3110. *
  3111. * http://www.apache.org/licenses/LICENSE-2.0
  3112. *
  3113. * Unless required by applicable law or agreed to in writing, software
  3114. * distributed under the License is distributed on an "AS IS" BASIS,
  3115. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3116. * See the License for the specific language governing permissions and
  3117. * limitations under the License.
  3118. *
  3119. * observable.js
  3120. * Observer design pattern
  3121. */
  3122. /**
  3123. * Implementation of the Observer design pattern
  3124. * @abstract
  3125. */
  3126. class Observable {
  3127. /**
  3128. * Constructor
  3129. */
  3130. constructor() {
  3131. /** @type {Function[]} subscribers / callbacks */
  3132. this._subscribers = [];
  3133. /** @type {object[]} "this" pointers */
  3134. this._thisptr = [];
  3135. /** @type {Array<any[]>} function arguments */
  3136. this._args = [];
  3137. }
  3138. /**
  3139. * Add subscriber
  3140. * @param {Function} fn callback
  3141. * @param {object} [thisptr] "this" pointer to be used when invoking the callback
  3142. * @param {...any} args arguments to be passed to the callback
  3143. */
  3144. subscribe(fn, thisptr, ...args) {
  3145. this._subscribers.push(fn);
  3146. this._thisptr.push(thisptr);
  3147. this._args.push(args);
  3148. }
  3149. /**
  3150. * Remove subscriber
  3151. * @param {Function} fn previously added callback
  3152. * @param {object} [thisptr] "this" pointer
  3153. */
  3154. unsubscribe(fn, thisptr) {
  3155. for (let j = this._subscribers.length - 1; j >= 0; j--) {
  3156. if (this._subscribers[j] === fn && this._thisptr[j] === thisptr) {
  3157. this._subscribers.splice(j, 1);
  3158. this._thisptr.splice(j, 1);
  3159. this._args.splice(j, 1);
  3160. break;
  3161. }
  3162. }
  3163. }
  3164. /**
  3165. * Notify all subscribers about a state change
  3166. * @protected
  3167. */
  3168. _notify() {
  3169. for (let i = 0; i < this._subscribers.length; i++) this._subscribers[i].apply(this._thisptr[i], this._args[i]);
  3170. }
  3171. }
  3172. /***/ }),
  3173. /***/ 6049:
  3174. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_121659__) => {
  3175. "use strict";
  3176. /* harmony export */ __nested_webpack_require_121659__.d(__nested_webpack_exports__, {
  3177. /* harmony export */ f5: () => (/* binding */ ImageFormat),
  3178. /* harmony export */ kQ: () => (/* binding */ PixelComponent),
  3179. /* harmony export */ kg: () => (/* binding */ ColorComponentId),
  3180. /* harmony export */ zu: () => (/* binding */ MediaType)
  3181. /* harmony export */ });
  3182. /*
  3183. * speedy-vision.js
  3184. * GPU-accelerated Computer Vision for JavaScript
  3185. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3186. *
  3187. * Licensed under the Apache License, Version 2.0 (the "License");
  3188. * you may not use this file except in compliance with the License.
  3189. * You may obtain a copy of the License at
  3190. *
  3191. * http://www.apache.org/licenses/LICENSE-2.0
  3192. *
  3193. * Unless required by applicable law or agreed to in writing, software
  3194. * distributed under the License is distributed on an "AS IS" BASIS,
  3195. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3196. * See the License for the specific language governing permissions and
  3197. * limitations under the License.
  3198. *
  3199. * types.js
  3200. * Types & formats
  3201. */
  3202. /**
  3203. * Media types
  3204. * @enum {Symbol}
  3205. */
  3206. const MediaType = Object.freeze({
  3207. Image: Symbol('Image'),
  3208. Video: Symbol('Video'),
  3209. Canvas: Symbol('Canvas'),
  3210. OffscreenCanvas: Symbol('OffscreenCanvas'),
  3211. Bitmap: Symbol('Bitmap'),
  3212. Data: Symbol('Data')
  3213. });
  3214. /**
  3215. * Image formats
  3216. * @enum {Symbol}
  3217. */
  3218. const ImageFormat = Object.freeze({
  3219. RGBA: Symbol('RGBA'),
  3220. GREY: Symbol('GREY')
  3221. });
  3222. /**
  3223. * Pixel component (bitwise flags)
  3224. * @typedef {number} PixelComponent
  3225. */
  3226. const PixelComponent = Object.freeze({
  3227. RED: 1,
  3228. GREEN: 2,
  3229. BLUE: 4,
  3230. ALPHA: 8,
  3231. ALL: 15 // = RED | GREEN | BLUE | ALPHA
  3232. });
  3233. /**
  3234. * Component ID utility
  3235. */
  3236. const ColorComponentId = Object.freeze({
  3237. [PixelComponent.RED]: 0,
  3238. [PixelComponent.GREEN]: 1,
  3239. [PixelComponent.BLUE]: 2,
  3240. [PixelComponent.ALPHA]: 3
  3241. });
  3242. /***/ }),
  3243. /***/ 9037:
  3244. /***/ ((__unused_webpack_module, __nested_webpack_exports__, __nested_webpack_require_123644__) => {
  3245. "use strict";
  3246. /* harmony export */ __nested_webpack_require_123644__.d(__nested_webpack_exports__, {
  3247. /* harmony export */ A: () => (/* binding */ Utils)
  3248. /* harmony export */ });
  3249. /* harmony import */ var _errors__WEBPACK_IMPORTED_MODULE_0__ = __nested_webpack_require_123644__(8581);
  3250. /* harmony import */ var _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__ = __nested_webpack_require_123644__(9192);
  3251. /* harmony import */ var _core_settings__WEBPACK_IMPORTED_MODULE_2__ = __nested_webpack_require_123644__(2199);
  3252. /*
  3253. * speedy-vision.js
  3254. * GPU-accelerated Computer Vision for JavaScript
  3255. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  3256. *
  3257. * Licensed under the Apache License, Version 2.0 (the "License");
  3258. * you may not use this file except in compliance with the License.
  3259. * You may obtain a copy of the License at
  3260. *
  3261. * http://www.apache.org/licenses/LICENSE-2.0
  3262. *
  3263. * Unless required by applicable law or agreed to in writing, software
  3264. * distributed under the License is distributed on an "AS IS" BASIS,
  3265. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  3266. * See the License for the specific language governing permissions and
  3267. * limitations under the License.
  3268. *
  3269. * utils.js
  3270. * Generic utilities
  3271. */
  3272. /**
  3273. * Generic utilities
  3274. */
  3275. class Utils {
  3276. /**
  3277. * Generates a warning
  3278. * @param {string} text message text
  3279. * @param {...string} args optional text
  3280. */
  3281. static warning(text, ...args) {
  3282. //if(Settings.logging === 'default' || Settings.logging === 'diagnostic') // TODO: warnings & errors only?
  3283. if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.warn('[speedy-vision] ' + text, ...args);
  3284. }
  3285. /**
  3286. * Logs a message
  3287. * @param {string} text message text
  3288. * @param {...string} args optional text
  3289. */
  3290. static log(text, ...args) {
  3291. if (_core_settings__WEBPACK_IMPORTED_MODULE_2__/* .Settings */ .w.logging !== 'none') console.log('[speedy-vision] ' + text, ...args);
  3292. }
  3293. /**
  3294. * Assertion
  3295. * @param {boolean} expr expression
  3296. * @param {string} [text] error message
  3297. * @throws {AssertionError}
  3298. */
  3299. static assert(expr, text = '') {
  3300. if (!expr) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AssertionError */ .pf(text);
  3301. }
  3302. /**
  3303. * Gets the names of the arguments of the specified function
  3304. * @param {Function} fun
  3305. * @returns {string[]}
  3306. */
  3307. static functionArguments(fun) {
  3308. const code = fun.toString();
  3309. const regex = code.startsWith('function') ? 'function\\s.*\\(([^)]*)\\)' : code.startsWith('(') ? '\\(([^)]*)\\).*=>' : '([^=]+).*=>';
  3310. const match = new RegExp(regex).exec(code);
  3311. if (match !== null) {
  3312. const args = match[1].replace(/\/\*.*?\*\//g, ''); // remove comments
  3313. return args.split(',').map(argname => argname.replace(/=.*$/, '').trim() // remove default params & trim
  3314. ).filter(argname => argname // handle trailing commas
  3315. );
  3316. } else throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .ParseError */ .mB(`Can't detect function arguments of ${code}`);
  3317. }
  3318. /**
  3319. * Get all property descriptors from an object,
  3320. * traversing its entire prototype chain
  3321. * @param {object} obj
  3322. * @returns {object}
  3323. */
  3324. static getAllPropertyDescriptors(obj) {
  3325. if (obj) {
  3326. const proto = Object.getPrototypeOf(obj);
  3327. return Object.assign(Object.assign({}, Utils.getAllPropertyDescriptors(proto)), Object.getOwnPropertyDescriptors(obj));
  3328. } else return Object.create(null);
  3329. }
  3330. /**
  3331. * Creates a HTMLCanvasElement with the given dimensions
  3332. * @param {number} width in pixels
  3333. * @param {number} height in pixels
  3334. * @returns {HTMLCanvasElement}
  3335. */
  3336. static createCanvas(width, height) {
  3337. const canvas = document.createElement('canvas');
  3338. canvas.width = width;
  3339. canvas.height = height;
  3340. return canvas;
  3341. }
  3342. /**
  3343. * Generate a 1D gaussian kernel with custom sigma
  3344. * Tip: use kernelSize >= (5 * sigma), kernelSize odd
  3345. * @param {number} sigma gaussian sigma
  3346. * @param {number} [kernelSize] kernel size, odd number
  3347. * @param {boolean} [normalized] normalize entries so that their sum is 1
  3348. * @returns {number[]}
  3349. */
  3350. static gaussianKernel(sigma, kernelSize = 0, normalized = true) {
  3351. /*
  3352. * Let G(x) be a Gaussian function centered at 0 with fixed sigma:
  3353. *
  3354. * G(x) = (1 / (sigma * sqrt(2 * pi))) * exp(-(x / (sqrt(2) * sigma))^2)
  3355. *
  3356. * In addition, let f(p) be a kernel value at pixel p, -k/2 <= p <= k/2:
  3357. *
  3358. * f(p) = \int_{p - 0.5}^{p + 0.5} G(x) dx (integrate around p)
  3359. * = \int_{0}^{p + 0.5} G(x) dx - \int_{0}^{p - 0.5} G(x) dx
  3360. *
  3361. * Setting a constant c := sqrt(2) * sigma, it follows that:
  3362. *
  3363. * f(p) = (1 / 2c) * (erf((p + 0.5) / c) - erf((p - 0.5) / c))
  3364. */
  3365. // default kernel size
  3366. if (kernelSize == 0) {
  3367. kernelSize = Math.ceil(5.0 * sigma) | 0;
  3368. kernelSize += 1 - kernelSize % 2;
  3369. }
  3370. // validate input
  3371. kernelSize |= 0;
  3372. if (kernelSize < 1 || kernelSize % 2 == 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid kernel size given to gaussianKernel: ${kernelSize} x 1`);else if (sigma <= 0.0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Invalid sigma given to gaussianKernel: ${sigma}`);
  3373. // function erf(x) = -erf(-x) can be approximated numerically. See:
  3374. // https://en.wikipedia.org/wiki/Error_function#Numerical_approximations
  3375. const kernel = new Array(kernelSize);
  3376. // set constants
  3377. const N = kernelSize >> 1; // integer (floor, div 2)
  3378. const c = +sigma * 1.4142135623730951; // sigma * sqrt(2)
  3379. const m = 0.3275911;
  3380. const a1 = 0.254829592;
  3381. const a2 = -0.284496736;
  3382. const a3 = 1.421413741;
  3383. const a4 = -1.453152027;
  3384. const a5 = 1.061405429;
  3385. // compute the kernel
  3386. let sum = 0.0;
  3387. for (let j = 0; j < kernelSize; j++) {
  3388. let xa = (j - N + 0.5) / c;
  3389. let xb = (j - N - 0.5) / c;
  3390. let sa = 1.0,
  3391. sb = 1.0;
  3392. if (xa < 0.0) {
  3393. sa = -1.0;
  3394. xa = -xa;
  3395. }
  3396. if (xb < 0.0) {
  3397. sb = -1.0;
  3398. xb = -xb;
  3399. }
  3400. const ta = 1.0 / (1.0 + m * xa);
  3401. const tb = 1.0 / (1.0 + m * xb);
  3402. const pa = ((((a5 * ta + a4) * ta + a3) * ta + a2) * ta + a1) * ta;
  3403. const pb = ((((a5 * tb + a4) * tb + a3) * tb + a2) * tb + a1) * tb;
  3404. const ya = 1.0 - pa * Math.exp(-xa * xa);
  3405. const yb = 1.0 - pb * Math.exp(-xb * xb);
  3406. const erfa = sa * ya;
  3407. const erfb = sb * yb;
  3408. const fp = (erfa - erfb) / (2.0 * c);
  3409. kernel[j] = fp;
  3410. sum += fp;
  3411. }
  3412. // normalize the kernel
  3413. if (normalized) {
  3414. for (let j = 0; j < kernelSize; j++) kernel[j] /= sum;
  3415. }
  3416. // done!
  3417. return kernel;
  3418. }
  3419. /**
  3420. * Generate a 2D kernel in column-major format using two separable 1D kernels
  3421. * @param {number[]} ka 1D kernel
  3422. * @param {number[]} [kb]
  3423. * @returns {number[]}
  3424. */
  3425. static kernel2d(ka, kb = ka) {
  3426. const ksize = ka.length;
  3427. Utils.assert(ka.length == ka.length);
  3428. Utils.assert(ksize >= 1 && ksize % 2 == 1);
  3429. // compute the outer product ka x kb
  3430. let kernel2d = new Array(ksize * ksize),
  3431. k = 0;
  3432. for (let col = 0; col < ksize; col++) {
  3433. for (let row = 0; row < ksize; row++) kernel2d[k++] = ka[row] * kb[col];
  3434. }
  3435. return kernel2d;
  3436. }
  3437. /**
  3438. * Cartesian product a x b: [ [ai, bj] for all i, j ]
  3439. * @param {number[]} a
  3440. * @param {number[]} b
  3441. * @returns {Array<[number,number]>}
  3442. */
  3443. static cartesian(a, b) {
  3444. return [].concat(...a.map(a => b.map(b => [a, b])));
  3445. }
  3446. /**
  3447. * Symmetric range
  3448. * @param {number} n non-negative integer
  3449. * @returns {number[]} [ -n, ..., n ]
  3450. */
  3451. static symmetricRange(n) {
  3452. if ((n |= 0) < 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a non-negative integer as input`);
  3453. return [...Array(2 * n + 1).keys()].map(x => x - n);
  3454. }
  3455. /**
  3456. * Compute the [0, n) range of integers
  3457. * @param {number} n positive integer
  3458. * @returns {number[]} [ 0, 1, ..., n-1 ]
  3459. */
  3460. static range(n) {
  3461. if ((n |= 0) <= 0) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .IllegalArgumentError */ .qw(`Expected a positive integer as input`);
  3462. return [...Array(n).keys()];
  3463. }
  3464. /**
  3465. * Shuffle in-place
  3466. * @template T
  3467. * @param {T[]} arr
  3468. * @returns {T[]} arr
  3469. */
  3470. static shuffle(arr) {
  3471. const len = arr.length;
  3472. const m = len - 1;
  3473. // Fisher-Yattes
  3474. for (let i = 0; i < m; i++) {
  3475. const j = i + (Math.random() * (len - i) | 0); // i <= j < arr.length
  3476. if (i !== j) {
  3477. const t = arr[i];
  3478. arr[i] = arr[j];
  3479. arr[j] = t;
  3480. }
  3481. }
  3482. return arr;
  3483. }
  3484. /**
  3485. * Flatten an array (1 level only)
  3486. * @template U
  3487. * @param {U[]} array
  3488. * @returns {U[]}
  3489. */
  3490. static flatten(array) {
  3491. //return array.flat();
  3492. //return array.reduce((arr, val) => arr.concat(val), []);
  3493. const flat = [];
  3494. for (let i = 0, n = array.length; i < n; i++) {
  3495. const entry = array[i];
  3496. if (Array.isArray(entry)) {
  3497. for (let j = 0, m = entry.length; j < m; j++) flat.push(entry[j]);
  3498. } else flat.push(entry);
  3499. }
  3500. return flat;
  3501. }
  3502. /**
  3503. * Decode a 16-bit float from a
  3504. * unsigned 16-bit integer
  3505. * @param {number} uint16
  3506. * @returns {number}
  3507. */
  3508. static decodeFloat16(uint16) {
  3509. // decode according to sec 2.1.2
  3510. // 16-Bit Floating Point Numbers
  3511. // of the OpenGL ES 3 spec
  3512. const s = (uint16 & 0xFFFF) >> 15; // sign bit
  3513. const e = (uint16 & 0x7FFF) >> 10; // exponent
  3514. const m = uint16 & 0x3FF; // mantissa
  3515. const sign = 1 - 2 * s; // (-1)^s
  3516. if (e == 0) return m == 0 ? sign * 0.0 : sign * m * 5.960464477539063e-8; // zero / subnormal
  3517. else if (e == 31) return m == 0 ? sign * Number.POSITIVE_INFINITY : Number.NaN;
  3518. const f = e >= 15 ? 1 << e - 15 : 1.0 / (1 << 15 - e); // 2^(e-15)
  3519. return sign * f * (1.0 + m * 0.0009765625); // normal
  3520. }
  3521. /**
  3522. * Wrapper around getUserMedia()
  3523. * @param {MediaStreamConstraints} [constraints] will be passed to getUserMedia()
  3524. * @returns {SpeedyPromise<HTMLVideoElement>}
  3525. */
  3526. static requestCameraStream(constraints = {
  3527. audio: false,
  3528. video: true
  3529. }) {
  3530. Utils.log('Accessing the webcam...');
  3531. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) throw new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM('Unsupported browser: no mediaDevices.getUserMedia()');
  3532. return new _core_speedy_promise__WEBPACK_IMPORTED_MODULE_1__/* .SpeedyPromise */ .i((resolve, reject) => {
  3533. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  3534. const video = document.createElement('video');
  3535. video.onloadedmetadata = () => {
  3536. video.play();
  3537. Utils.log(`The camera is on! Resolution: ${video.videoWidth} x ${video.videoHeight}`);
  3538. resolve(video);
  3539. };
  3540. video.setAttribute('playsinline', '');
  3541. video.setAttribute('autoplay', '');
  3542. if (constraints.audio === false || constraints.audio === undefined) video.setAttribute('muted', '');
  3543. video.srcObject = stream;
  3544. }).catch(err => {
  3545. if (err.name === 'NotAllowedError') {
  3546. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .AccessDeniedError */ .Uk(`Please give access to the camera and reload the page.`, err));
  3547. } else if (err.name === 'OverconstrainedError' || err.name === 'NotFoundError') {
  3548. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .NotSupportedError */ .EM(`Can't access the webcam with the requested constraints: ${JSON.stringify(constraints)}.`, err));
  3549. } else {
  3550. reject(new _errors__WEBPACK_IMPORTED_MODULE_0__/* .SpeedyError */ .xB(`Can't access the webcam.`, err));
  3551. }
  3552. });
  3553. });
  3554. }
  3555. /**
  3556. * Format binary data as a string with hex values
  3557. * @param {ArrayBuffer} bytes
  3558. * @returns {string}
  3559. */
  3560. static formatBinaryData(bytes) {
  3561. const uint8 = new Uint8Array(bytes);
  3562. const array = Array.from(uint8, b => b.toString(16).padStart(2, '0'));
  3563. return array.join(' ');
  3564. }
  3565. /**
  3566. * Returns a string containing platform brand information
  3567. * @returns {string}
  3568. */
  3569. static platformString() {
  3570. // navigator.userAgent is easily and often spoofed, and thus is unreliable
  3571. // use the NavigatorUAData interface if available
  3572. if (typeof navigator.userAgentData === 'object') {
  3573. // use only low entropy data, so we don't need to ask the permission
  3574. // of the user to read this string
  3575. return navigator.userAgentData.platform;
  3576. }
  3577. // navigator.platform is deprecated. It can be spoofed on Firefox, but,
  3578. // at the time of this writing, there is no alternative apparently.
  3579. return navigator.platform;
  3580. }
  3581. }
  3582. /***/ }),
  3583. /***/ 5235:
  3584. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_136472__) => {
  3585. var map = {
  3586. "./colors.glsl": 8609,
  3587. "./filters.glsl": 4672,
  3588. "./fixed-point.glsl": 9778,
  3589. "./float16.glsl": 8710,
  3590. "./global.glsl": 2434,
  3591. "./int32.glsl": 439,
  3592. "./keypoint-descriptors.glsl": 8545,
  3593. "./keypoint-matches.glsl": 6762,
  3594. "./keypoints.glsl": 7639,
  3595. "./math.glsl": 431,
  3596. "./platform.glsl": 6822,
  3597. "./pyramids.glsl": 2728,
  3598. "./subpixel.glsl": 6823
  3599. };
  3600. function webpackContext(req) {
  3601. var id = webpackContextResolve(req);
  3602. return __nested_webpack_require_136472__(id);
  3603. }
  3604. function webpackContextResolve(req) {
  3605. if(!__nested_webpack_require_136472__.o(map, req)) {
  3606. var e = new Error("Cannot find module '" + req + "'");
  3607. e.code = 'MODULE_NOT_FOUND';
  3608. throw e;
  3609. }
  3610. return map[req];
  3611. }
  3612. webpackContext.keys = function webpackContextKeys() {
  3613. return Object.keys(map);
  3614. };
  3615. webpackContext.resolve = webpackContextResolve;
  3616. module.exports = webpackContext;
  3617. webpackContext.id = 5235;
  3618. /***/ }),
  3619. /***/ 4606:
  3620. /***/ ((module, __unused_webpack_exports, __nested_webpack_require_137422__) => {
  3621. var map = {
  3622. "./filters/convolution": 1672,
  3623. "./filters/convolution.js": 1672,
  3624. "./filters/convolution1d.glsl": 8211,
  3625. "./filters/convolution2d.glsl": 7360,
  3626. "./filters/fast-median.glsl": 8191,
  3627. "./filters/nightvision.glsl": 4438,
  3628. "./filters/normalize-image.glsl": 5867,
  3629. "./filters/rgb2grey.glsl": 9252,
  3630. "./include/colors.glsl": 8609,
  3631. "./include/filters.glsl": 4672,
  3632. "./include/fixed-point.glsl": 9778,
  3633. "./include/float16.glsl": 8710,
  3634. "./include/global.glsl": 2434,
  3635. "./include/int32.glsl": 439,
  3636. "./include/keypoint-descriptors.glsl": 8545,
  3637. "./include/keypoint-matches.glsl": 6762,
  3638. "./include/keypoints.glsl": 7639,
  3639. "./include/math.glsl": 431,
  3640. "./include/platform.glsl": 6822,
  3641. "./include/pyramids.glsl": 2728,
  3642. "./include/subpixel.glsl": 6823,
  3643. "./keypoints/allocate-descriptors.glsl": 1341,
  3644. "./keypoints/allocate-extra.glsl": 7833,
  3645. "./keypoints/apply-homography.glsl": 2352,
  3646. "./keypoints/bf-knn.glsl": 7541,
  3647. "./keypoints/clip-border.glsl": 4868,
  3648. "./keypoints/clip.glsl": 5591,
  3649. "./keypoints/distance-filter.glsl": 191,
  3650. "./keypoints/encode-keypoint-long-offsets.glsl": 5467,
  3651. "./keypoints/encode-keypoint-offsets.glsl": 336,
  3652. "./keypoints/encode-keypoint-positions.glsl": 8968,
  3653. "./keypoints/encode-keypoint-properties.glsl": 1733,
  3654. "./keypoints/encode-keypoints.glsl": 9674,
  3655. "./keypoints/encode-null-keypoints.glsl": 2090,
  3656. "./keypoints/fast.glsl": 1855,
  3657. "./keypoints/fast.vs.glsl": 4824,
  3658. "./keypoints/hamming-distance-filter.glsl": 2381,
  3659. "./keypoints/harris-cutoff.glsl": 6060,
  3660. "./keypoints/harris.glsl": 9974,
  3661. "./keypoints/knn-init.glsl": 3047,
  3662. "./keypoints/knn-transfer.glsl": 3266,
  3663. "./keypoints/laplacian.glsl": 8018,
  3664. "./keypoints/lk.glsl": 3168,
  3665. "./keypoints/lookup-of-locations.glsl": 3890,
  3666. "./keypoints/lookup-of-locations.vs.glsl": 8647,
  3667. "./keypoints/lsh-knn.glsl": 4776,
  3668. "./keypoints/mix-keypoints.glsl": 2648,
  3669. "./keypoints/nonmax-scale.glsl": 8825,
  3670. "./keypoints/nonmax-space.glsl": 5693,
  3671. "./keypoints/nonmax-suppression.glsl": 9280,
  3672. "./keypoints/orb-descriptor.glsl": 9108,
  3673. "./keypoints/orb-orientation.glsl": 7137,
  3674. "./keypoints/refine-scale.glsl": 9739,
  3675. "./keypoints/score-findmax.glsl": 8231,
  3676. "./keypoints/shuffle.glsl": 2518,
  3677. "./keypoints/sort-keypoints.glsl": 8096,
  3678. "./keypoints/subpixel-refinement.glsl": 5795,
  3679. "./keypoints/transfer-flow.glsl": 3169,
  3680. "./keypoints/transfer-orientation.glsl": 1337,
  3681. "./keypoints/transfer-to-extra.glsl": 6187,
  3682. "./keypoints/upload-keypoints.glsl": 477,
  3683. "./pyramids/downsample2.glsl": 4050,
  3684. "./pyramids/upsample2.glsl": 5545,
  3685. "./transforms/additive-mix.glsl": 7113,
  3686. "./transforms/resize.glsl": 1202,
  3687. "./transforms/warp-perspective.glsl": 7971,
  3688. "./utils/copy-components.glsl": 6122,
  3689. "./utils/copy-raster.glsl": 371,
  3690. "./utils/copy.glsl": 7307,
  3691. "./utils/fill-components.glsl": 8614,
  3692. "./utils/fill.glsl": 6271,
  3693. "./utils/flip-y.vs.glsl": 3016,
  3694. "./utils/scan-minmax2d.glsl": 3630,
  3695. "./utils/sobel-derivatives.glsl": 8508,
  3696. "./utils/sobel-derivatives.vs.glsl": 8073
  3697. };
  3698. function webpackContext(req) {
  3699. var id = webpackContextResolve(req);
  3700. return __nested_webpack_require_137422__(id);
  3701. }
  3702. function webpackContextResolve(req) {
  3703. if(!__nested_webpack_require_137422__.o(map, req)) {
  3704. var e = new Error("Cannot find module '" + req + "'");
  3705. e.code = 'MODULE_NOT_FOUND';
  3706. throw e;
  3707. }
  3708. return map[req];
  3709. }
  3710. webpackContext.keys = function webpackContextKeys() {
  3711. return Object.keys(map);
  3712. };
  3713. webpackContext.resolve = webpackContextResolve;
  3714. module.exports = webpackContext;
  3715. webpackContext.id = 4606;
  3716. /***/ }),
  3717. /***/ 8211:
  3718. /***/ ((module) => {
  3719. module.exports = "#if !defined(KERNEL_SIZE) || !defined(AXIS) || (AXIS != 0 && AXIS != 1)\n#error Undefined KERNEL_SIZE / AXIS\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE@];\nconst ivec2 axis = ivec2(1-AXIS, AXIS);\n#define S(x,k) result += pixelAtShortOffset(image, ivec2((x),(x)) * axis) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE == 3\nS(-1, 2);\nS( 0, 1);\nS( 1, 0);\n#elif KERNEL_SIZE == 5\nS(-2, 4);\nS(-1, 3);\nS( 0, 2);\nS( 1, 1);\nS( 2, 0);\n#elif KERNEL_SIZE == 7\nS(-3, 6);\nS(-2, 5);\nS(-1, 4);\nS( 0, 3);\nS( 1, 2);\nS( 2, 1);\nS( 3, 0);\n#elif KERNEL_SIZE == 9\nS(-4, 8);\nS(-3, 7);\nS(-2, 6);\nS(-1, 5);\nS( 0, 4);\nS( 1, 3);\nS( 2, 2);\nS( 3, 1);\nS( 4, 0);\n#elif KERNEL_SIZE == 11\nS(-5, 10);\nS(-4, 9);\nS(-3, 8);\nS(-2, 7);\nS(-1, 6);\nS( 0, 5);\nS( 1, 4);\nS( 2, 3);\nS( 3, 2);\nS( 4, 1);\nS( 5, 0);\n#elif KERNEL_SIZE == 13\nS(-6, 12);\nS(-5, 11);\nS(-4, 10);\nS(-3, 9);\nS(-2, 8);\nS(-1, 7);\nS( 0, 6);\nS( 1, 5);\nS( 2, 4);\nS( 3, 3);\nS( 4, 2);\nS( 5, 1);\nS( 6, 0);\n#elif KERNEL_SIZE == 15\nS(-7, 14);\nS(-6, 13);\nS(-5, 12);\nS(-4, 11);\nS(-3, 10);\nS(-2, 9);\nS(-1, 8);\nS( 0, 7);\nS( 1, 6);\nS( 2, 5);\nS( 3, 4);\nS( 4, 3);\nS( 5, 2);\nS( 6, 1);\nS( 7, 0);\n#else\n#error Invalid parameters\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3720. /***/ }),
  3721. /***/ 7360:
  3722. /***/ ((module) => {
  3723. module.exports = "#ifndef KERNEL_SIZE_SQUARED\n#error Must define KERNEL_SIZE_SQUARED\n#endif\nuniform sampler2D image;\nuniform float kernel[@KERNEL_SIZE_SQUARED@];\n#define S(x,y,k) result += pixelAtShortOffset(image, ivec2((x),(y))) * kernel[k]\nvoid main()\n{\nvec4 result = vec4(0.0f);\n#if KERNEL_SIZE_SQUARED == 9\nS(-1,-1, 8);\nS(-1, 0, 7);\nS(-1, 1, 6);\nS( 0,-1, 5);\nS( 0, 0, 4);\nS( 0, 1, 3);\nS( 1,-1, 2);\nS( 1, 0, 1);\nS( 1, 1, 0);\n#elif KERNEL_SIZE_SQUARED == 25\nS(-2,-2, 24);\nS(-2,-1, 23);\nS(-2, 0, 22);\nS(-2, 1, 21);\nS(-2, 2, 20);\nS(-1,-2, 19);\nS(-1,-1, 18);\nS(-1, 0, 17);\nS(-1, 1, 16);\nS(-1, 2, 15);\nS( 0,-2, 14);\nS( 0,-1, 13);\nS( 0, 0, 12);\nS( 0, 1, 11);\nS( 0, 2, 10);\nS( 1,-2, 9);\nS( 1,-1, 8);\nS( 1, 0, 7);\nS( 1, 1, 6);\nS( 1, 2, 5);\nS( 2,-2, 4);\nS( 2,-1, 3);\nS( 2, 0, 2);\nS( 2, 1, 1);\nS( 2, 2, 0);\n#elif KERNEL_SIZE_SQUARED == 49\nS(-3,-3, 48);\nS(-3,-2, 47);\nS(-3,-1, 46);\nS(-3, 0, 45);\nS(-3, 1, 44);\nS(-3, 2, 43);\nS(-3, 3, 42);\nS(-2,-3, 41);\nS(-2,-2, 40);\nS(-2,-1, 39);\nS(-2, 0, 38);\nS(-2, 1, 37);\nS(-2, 2, 36);\nS(-2, 3, 35);\nS(-1,-3, 34);\nS(-1,-2, 33);\nS(-1,-1, 32);\nS(-1, 0, 31);\nS(-1, 1, 30);\nS(-1, 2, 29);\nS(-1, 3, 28);\nS( 0,-3, 27);\nS( 0,-2, 26);\nS( 0,-1, 25);\nS( 0, 0, 24);\nS( 0, 1, 23);\nS( 0, 2, 22);\nS( 0, 3, 21);\nS( 1,-3, 20);\nS( 1,-2, 19);\nS( 1,-1, 18);\nS( 1, 0, 17);\nS( 1, 1, 16);\nS( 1, 2, 15);\nS( 1, 3, 14);\nS( 2,-3, 13);\nS( 2,-2, 12);\nS( 2,-1, 11);\nS( 2, 0, 10);\nS( 2, 1, 9);\nS( 2, 2, 8);\nS( 2, 3, 7);\nS( 3,-3, 6);\nS( 3,-2, 5);\nS( 3,-1, 4);\nS( 3, 0, 3);\nS( 3, 1, 2);\nS( 3, 2, 1);\nS( 3, 3, 0);\n#else\n#error Invalid KERNEL_SIZE_SQUARED\n#endif\ncolor = vec4(result.rgb, 1.0f);\n}"
  3724. /***/ }),
  3725. /***/ 8191:
  3726. /***/ ((module) => {
  3727. module.exports = "uniform sampler2D image;\n#define X(i,j) t = vec2(min(p[i], p[j]), max(p[i], p[j])); p[i] = t.x; p[j] = t.y;\n#define S(i,x,y) p[i] = pixelAtShortOffset(image, ivec2((x),(y))).g\nvoid main()\n{\nfloat median;\nvec2 t;\n#if !defined(KERNEL_SIZE)\n#error Must define KERNEL_SIZE\n#elif KERNEL_SIZE == 3\nfloat p[9];\nS(0,-1,-1);\nS(1, 0,-1);\nS(2, 1,-1);\nS(3,-1, 0);\nS(4, 0, 0);\nS(5, 1, 0);\nS(6,-1, 1);\nS(7, 0, 1);\nS(8, 1, 1);\nX(1,2);X(4,5);X(7,8);X(0,1);X(3,4);X(6,7);X(1,2);X(4,5);X(7,8);X(0,3);X(5,8);X(4,7);X(3,6);X(1,4);X(2,5);X(4,7);X(4,2);X(6,4);X(4,2);\nmedian = p[4];\n#elif KERNEL_SIZE == 5\nfloat p[25];\nS( 0,-2,-2);\nS( 1,-1,-2);\nS( 2, 0,-2);\nS( 3, 1,-2);\nS( 4, 2,-2);\nS( 5,-2,-1);\nS( 6,-1,-1);\nS( 7, 0,-1);\nS( 8, 1,-1);\nS( 9, 2,-1);\nS(10,-2, 0);\nS(11,-1, 0);\nS(12, 0, 0);\nS(13, 1, 0);\nS(14, 2, 0);\nS(15,-2, 1);\nS(16,-1, 1);\nS(17, 0, 1);\nS(18, 1, 1);\nS(19, 2, 1);\nS(20,-2, 2);\nS(21,-1, 2);\nS(22, 0, 2);\nS(23, 1, 2);\nS(24, 2, 2);\nX(0,1);X(3,4);X(2,4);X(2,3);X(6,7);X(5,7);X(5,6);X(9,10);X(8,10);X(8,9);X(12,13);X(11,13);X(11,12);X(15,16);X(14,16);X(14,15);X(18,19);X(17,19);X(17,18);X(21,22);X(20,22);X(20,21);X(23,24);X(2,5);X(3,6);X(0,6);X(0,3);X(4,7);X(1,7);X(1,4);X(11,14);X(8,14);X(8,11);X(12,15);X(9,15);X(9,12);X(13,16);X(10,16);X(10,13);X(20,23);X(17,23);X(17,20);X(21,24);X(18,24);X(18,21);X(19,22);X(8,17);X(9,18);X(0,18);X(0,9);X(10,19);X(1,19);X(1,10);X(11,20);X(2,20);X(2,11);X(12,21);X(3,21);X(3,12);X(13,22);X(4,22);X(4,13);X(14,23);X(5,23);X(5,14);X(15,24);X(6,24);X(6,15);X(7,16);X(7,19);X(13,21);X(15,23);X(7,13);X(7,15);X(1,9);X(3,11);X(5,17);X(11,17);X(9,17);X(4,10);X(6,12);X(7,14);X(4,6);X(4,7);X(12,14);X(10,14);X(6,7);X(10,12);X(6,10);X(6,17);X(12,17);X(7,17);X(7,10);X(12,18);X(7,12);X(10,18);X(12,20);X(10,20);X(10,12);\nmedian = p[12];\n#elif KERNEL_SIZE == 7\nfloat p[49];\nS( 0,-3,-3);\nS( 1,-2,-3);\nS( 2,-1,-3);\nS( 3, 0,-3);\nS( 4, 1,-3);\nS( 5, 2,-3);\nS( 6, 3,-3);\nS( 7,-3,-2);\nS( 8,-2,-2);\nS( 9,-1,-2);\nS(10, 0,-2);\nS(11, 1,-2);\nS(12, 2,-2);\nS(13, 3,-2);\nS(14,-3,-1);\nS(15,-2,-1);\nS(16,-1,-1);\nS(17, 0,-1);\nS(18, 1,-1);\nS(19, 2,-1);\nS(20, 3,-1);\nS(21,-3, 0);\nS(22,-2, 0);\nS(23,-1, 0);\nS(24, 0, 0);\nS(25, 1, 0);\nS(26, 2, 0);\nS(27, 3, 0);\nS(28,-3, 1);\nS(29,-2, 1);\nS(30,-1, 1);\nS(31, 0, 1);\nS(32, 1, 1);\nS(33, 2, 1);\nS(34, 3, 1);\nS(35,-3, 2);\nS(36,-2, 2);\nS(37,-1, 2);\nS(38, 0, 2);\nS(39, 1, 2);\nS(40, 2, 2);\nS(41, 3, 2);\nS(42,-3, 3);\nS(43,-2, 3);\nS(44,-1, 3);\nS(45, 0, 3);\nS(46, 1, 3);\nS(47, 2, 3);\nS(48, 3, 3);\nX(0,1);X(2,3);X(0,2);X(1,3);X(1,2);X(4,5);X(6,7);X(4,6);X(5,7);X(5,6);X(0,4);X(2,6);X(2,4);X(1,5);X(3,7);X(3,5);X(1,2);X(3,4);X(5,6);X(8,9);X(10,11);X(8,10);X(9,11);X(9,10);X(12,13);X(14,15);X(12,14);X(13,15);X(13,14);X(8,12);X(10,14);X(10,12);X(9,13);X(11,15);X(11,13);X(9,10);X(11,12);X(13,14);X(0,8);X(4,12);X(4,8);X(2,10);X(6,14);X(6,10);X(2,4);X(6,8);X(10,12);X(1,9);X(5,13);X(5,9);X(3,11);X(7,15);X(7,11);X(3,5);X(7,9);X(11,13);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(16,17);X(18,19);X(16,18);X(17,19);X(17,18);X(20,21);X(22,23);X(20,22);X(21,23);X(21,22);X(16,20);X(18,22);X(18,20);X(17,21);X(19,23);X(19,21);X(17,18);X(19,20);X(21,22);X(24,25);X(26,27);X(24,26);X(25,27);X(25,26);X(28,29);X(30,31);X(28,30);X(29,31);X(29,30);X(24,28);X(26,30);X(26,28);X(25,29);X(27,31);X(27,29);X(25,26);X(27,28);X(29,30);X(16,24);X(20,28);X(20,24);X(18,26);X(22,30);X(22,26);X(18,20);X(22,24);X(26,28);X(17,25);X(21,29);X(21,25);X(19,27);X(23,31);X(23,27);X(19,21);X(23,25);X(27,29);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(0,16);X(8,24);X(8,16);X(4,20);X(12,28);X(12,20);X(4,8);X(12,16);X(20,24);X(2,18);X(10,26);X(10,18);X(6,22);X(14,30);X(14,22);X(6,10);X(14,18);X(22,26);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(1,17);X(9,25);X(9,17);X(5,21);X(13,29);X(13,21);X(5,9);X(13,17);X(21,25);X(3,19);X(11,27);X(11,19);X(7,23);X(15,31);X(15,23);X(7,11);X(15,19);X(23,27);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);X(25,26);X(27,28);X(29,30);X(32,33);X(34,35);X(32,34);X(33,35);X(33,34);X(36,37);X(38,39);X(36,38);X(37,39);X(37,38);X(32,36);X(34,38);X(34,36);X(33,37);X(35,39);X(35,37);X(33,34);X(35,36);X(37,38);X(40,41);X(42,43);X(40,42);X(41,43);X(41,42);X(44,45);X(46,47);X(44,46);X(45,47);X(45,46);X(40,44);X(42,46);X(42,44);X(41,45);X(43,47);X(43,45);X(41,42);X(43,44);X(45,46);X(32,40);X(36,44);X(36,40);X(34,42);X(38,46);X(38,42);X(34,36);X(38,40);X(42,44);X(33,41);X(37,45);X(37,41);X(35,43);X(39,47);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(32,48);X(40,48);X(36,40);X(44,48);X(38,42);X(34,36);X(38,40);X(42,44);X(46,48);X(37,41);X(39,43);X(35,37);X(39,41);X(43,45);X(33,34);X(35,36);X(37,38);X(39,40);X(41,42);X(43,44);X(45,46);X(47,48);X(0,32);X(16,48);X(16,32);X(8,40);X(24,40);X(8,16);X(24,32);X(40,48);X(4,36);X(20,36);X(12,44);X(28,44);X(12,20);X(28,36);X(4,8);X(12,16);X(20,24);X(28,32);X(36,40);X(44,48);X(2,34);X(18,34);X(10,42);X(26,42);X(10,18);X(26,34);X(6,38);X(22,38);X(14,46);X(30,46);X(14,22);X(30,38);X(6,10);X(14,18);X(22,26);X(30,34);X(38,42);X(2,4);X(6,8);X(10,12);X(14,16);X(18,20);X(22,24);X(26,28);X(30,32);X(34,36);X(38,40);X(42,44);X(46,48);X(1,33);X(17,33);X(9,41);X(25,41);X(9,17);X(25,33);X(5,37);X(21,37);X(13,45);X(29,45);X(13,21);X(29,37);X(5,9);X(13,17);X(21,25);X(29,33);X(37,41);X(3,35);X(19,35);X(11,43);X(27,43);X(11,19);X(27,35);X(7,39);X(23,39);X(15,47);X(31,47);X(15,23);X(31,39);X(7,11);X(15,19);X(23,27);X(31,35);X(39,43);X(3,5);X(7,9);X(11,13);X(15,17);X(19,21);X(23,25);X(27,29);X(31,33);X(35,37);X(39,41);X(43,45);X(1,2);X(3,4);X(5,6);X(7,8);X(9,10);X(11,12);X(13,14);X(15,16);X(17,18);X(19,20);X(21,22);X(23,24);\nmedian = p[24];\n#else\n#error Unsupported kernel size\n#endif\ncolor = vec4(median, median, median, 1.0f);\n}"
  3728. /***/ }),
  3729. /***/ 4438:
  3730. /***/ ((module) => {
  3731. module.exports = "uniform sampler2D image;\nuniform sampler2D illuminationMap;\nuniform float gain;\nuniform float offset;\nuniform float decay;\n#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE == 0\nconst mat3 rgb2yuv = mat3(\n0.299f, -0.14713f, 0.615f,\n0.587f, -0.28886f, -0.51499f,\n0.114f, 0.436f, -0.10001f\n);\nconst mat3 yuv2rgb = mat3(\n1.0f, 1.0f, 1.0f,\n0.0f, -0.39465f, 2.03211f,\n1.13983f, -0.58060f, 0.0f\n);\n#endif\nconst float eps = 0.0001f;\nconst float sqrt2 = 1.4142135623730951f;\nconst float magic = 20.0f;\nconst vec2 center = vec2(0.5f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nvec4 imapPixel = threadPixel(illuminationMap);\nfloat lambda = -sqrt2 * log(max(1.0f - decay, eps));\nfloat dist = length(texCoord - center);\nfloat vgain = gain * exp(-lambda * dist);\nfloat normalizedGain = 2.0f * vgain;\nfloat normalizedOffset = 2.0f * offset - 1.0f;\n#if GREYSCALE != 0\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (pixel.g - imapPixel.g)));\nluma = clamp(luma + normalizedOffset, 0.0f, 1.0f);\ncolor = vec4(luma, luma, luma, 1.0f);\n#else\nvec3 yuvPixel = rgb2yuv * pixel.rgb;\nvec3 yuvImapPixel = rgb2yuv * imapPixel.rgb;\nfloat luma = 1.0 / (1.0 + exp(-normalizedGain * magic * (yuvPixel.r - yuvImapPixel.r)));\nluma += normalizedOffset;\nvec3 rgbCorrectedPixel = yuv2rgb * vec3(luma, yuvPixel.gb);\nrgbCorrectedPixel = clamp(rgbCorrectedPixel, 0.0f, 1.0f);\ncolor = vec4(rgbCorrectedPixel, 1.0f);\n#endif\n}"
  3732. /***/ }),
  3733. /***/ 5867:
  3734. /***/ ((module) => {
  3735. module.exports = "#ifndef GREYSCALE\n#error Must define GREYSCALE\n#endif\n#if GREYSCALE != 0\nuniform sampler2D minmax2d;\n#else\nuniform sampler2D minmax2dRGB[3];\n#endif\nuniform float minValue;\nuniform float maxValue;\nconst float eps = 1.0f / 255.0f;\nvoid main()\n{\nvec2 minmax = clamp(vec2(minValue, maxValue), 0.0f, 255.0f) / 255.0f;\nvec4 newMin = vec4(minmax.x);\nvec4 newRange = vec4(minmax.y - minmax.x);\nvec4 alpha = vec4(1.0f, newMin.x, newRange.x, 1.0f);\n#if GREYSCALE != 0\nvec4 pixel = threadPixel(minmax2d);\nmat4 channel = mat4(pixel, pixel, pixel, alpha);\n#else\nmat4 channel = mat4(\nthreadPixel(minmax2dRGB[0]),\nthreadPixel(minmax2dRGB[1]),\nthreadPixel(minmax2dRGB[2]),\nalpha\n);\n#endif\nvec4 oldMin = vec4(channel[0].g, channel[1].g, channel[2].g, channel[3].g);\nvec4 oldRange = max(vec4(channel[0].b, channel[1].b, channel[2].b, channel[3].b), eps);\nvec4 oldIntensity = vec4(channel[0].a, channel[1].a, channel[2].a, channel[3].a);\nvec4 newIntensity = (oldIntensity - oldMin) * newRange / oldRange + newMin;\ncolor = newIntensity;\n}"
  3736. /***/ }),
  3737. /***/ 9252:
  3738. /***/ ((module) => {
  3739. module.exports = "const vec4 grey = vec4(0.299f, 0.587f, 0.114f, 0.0f);\nuniform sampler2D image;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat g = dot(pixel, grey);\ncolor = vec4(g, g, g, 1.0f);\n}"
  3740. /***/ }),
  3741. /***/ 8609:
  3742. /***/ ((module) => {
  3743. module.exports = "#ifndef _COLORS_GLSL\n#define _COLORS_GLSL\n#define PIXELCOMPONENT_RED @PIXELCOMPONENT_RED@\n#define PIXELCOMPONENT_GREEN @PIXELCOMPONENT_GREEN@\n#define PIXELCOMPONENT_BLUE @PIXELCOMPONENT_BLUE@\n#define PIXELCOMPONENT_ALPHA @PIXELCOMPONENT_ALPHA@\n#endif"
  3744. /***/ }),
  3745. /***/ 4672:
  3746. /***/ ((module) => {
  3747. module.exports = "#ifndef _FILTERS_GLSL\n#define _FILTERS_GLSL\nfloat laplacian(sampler2D pyramid, vec2 position, float lod)\n{\nfloat pot = exp2(lod);\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nconst vec3 ones = vec3(1.0f);\nconst mat3 kernel = mat3(\n0,-1, 0,\n-1, 4,-1,\n0,-1, 0\n);\n#define LPC(x,y) pyrSubpixelAtExOffset(pyramid, position, lod, pot, ivec2((x),(y)), pyrBaseSize).g\nmat3 neighborhood = mat3(\n0.0f, LPC(0,-1), 0.0f,\nLPC(-1,0), LPC(0,0), LPC(1,0),\n0.0f, LPC(0,1), 0.0f\n);\nmat3 m = matrixCompMult(neighborhood, kernel);\nreturn dot(ones, vec3(\ndot(m[0], ones),\ndot(m[1], ones),\ndot(m[2], ones)\n)) * (1.0f + lod);\n}\n#endif"
  3748. /***/ }),
  3749. /***/ 9778:
  3750. /***/ ((module) => {
  3751. module.exports = "#ifndef _FIXEDPOINT_GLSL\n#define _FIXEDPOINT_GLSL\n#define fixed_t int\n#define fixed2_t ivec2\nconst int FIX_BITS = int(@FIX_BITS@);\nconst float FIX_RESOLUTION = float(@FIX_RESOLUTION@);\n#define itofix(x) fixed_t((x) << FIX_BITS)\n#define fixtoi(f) int((x) >> FIX_BITS)\n#define ftofix(x) fixed_t((x) * FIX_RESOLUTION + 0.5f)\n#define fixtof(f) (float(f) / FIX_RESOLUTION)\n#define ivec2tofix(x) fixed2_t((x) << FIX_BITS)\n#define fixtoivec2(f) ivec2((f) >> FIX_BITS)\n#define vec2tofix(v) fixed2_t((v) * FIX_RESOLUTION + vec2(0.5f))\n#define fixtovec2(f) (vec2(f) / FIX_RESOLUTION)\n#endif"
  3752. /***/ }),
  3753. /***/ 8710:
  3754. /***/ ((module) => {
  3755. module.exports = "#ifndef _FLOAT16_GLSL\n#define _FLOAT16_GLSL\n#define encodeFloat16(f) (vec2(packf16(f)) / 255.0f)\n#define decodeFloat16(v) unpackf16(uvec2((v) * 255.0f))\n#define encodePairOfFloat16(f) vec4(encodeFloat16((f).x), encodeFloat16((f).y))\n#define decodePairOfFloat16(v) vec2(decodeFloat16((v).rg), decodeFloat16((v).ba))\n#define encodeNullPairOfFloat16() vec4(1.0f)\n#define isNullPairOfFloat16(v) all(equal((v), encodeNullPairOfFloat16()))\n#define encodeDiscardedPairOfFloat16() vec4(0.0f, 1.0f, 0.0f, 1.0f)\n#define isDiscardedPairOfFloat16(v) all(equal((v), encodeDiscardedPairOfFloat16()))\n#define encodeFloat16NaN() vec2(0.5f, 1.0f)\n#define isEncodedFloat16NaN(v) all(equal((v), encodeFloat16NaN()))\nuvec2 packf16( float f)\n{\nuint y = packHalf2x16(vec2(f, 0.0f));\nreturn uvec2(y, y >> 8u) & 0xFFu;\n}\nfloat unpackf16(uvec2 v)\n{\nv &= 0xFFu;\nreturn unpackHalf2x16(v.x | (v.y << 8u)).x;\n}\nbool isEncodedFloat16Zero(vec2 v)\n{\nuvec2 w = uvec2(v * 255.0f);\nreturn 0u == w.x + w.y * (0x80u - w.y);\n}\n#endif"
  3756. /***/ }),
  3757. /***/ 2434:
  3758. /***/ ((module) => {
  3759. module.exports = "#ifndef _GLOBAL_GLSL\n#define _GLOBAL_GLSL\n#define threadLocation() ivec2(texCoord * texSize)\n#define outputSize() ivec2(texSize)\n#define threadPixel(img) textureLod((img), texCoord, 0.0f)\n#define pixelAt(img, pos) texelFetch((img), (pos), 0)\n#define pixelAtShortOffset(img, offset) textureLodOffset((img), texCoord, 0.0f, (offset))\n#define pixelAtLongOffset(img, offset) textureLod((img), texCoord + vec2(offset) / texSize, 0.0f)\n#endif"
  3760. /***/ }),
  3761. /***/ 439:
  3762. /***/ ((module) => {
  3763. module.exports = "#ifndef _INT32_GLSL\n#define _INT32_GLSL\n@include \"platform.glsl\"\nuint decodeUint32(vec4 rgba)\n{\nuvec4 v = uvec4(rgba * 255.0f) & 255u;\nreturn v.x | (v.y << 8u) | (v.z << 16u) | (v.w << 24u);\n}\nvec4 encodeUint32(uint value)\n{\n#if defined(APPLE_GPU) || (defined(APPLE) && defined(INTEL_GRAPHICS))\nuvec4 v = uvec4(value, value / 256u, value / 65536u, value / 16777216u) % 256u;\nreturn vec4(v) / 255.0f;\n#else\nuvec4 v = uvec4(value, value >> 8u, value >> 16u, value >> 24u) & 255u;\nreturn vec4(v) / 255.0f;\n#endif\n}\n#endif"
  3764. /***/ }),
  3765. /***/ 8545:
  3766. /***/ ((module) => {
  3767. module.exports = "#ifndef _KEYPOINT_DESCRIPTORS_GLSL\n#define _KEYPOINT_DESCRIPTORS_GLSL\n#if !defined(DESCRIPTOR_SIZE)\n#error Must define DESCRIPTOR_SIZE\n#elif !defined(_KEYPOINTS_GLSL)\n#error Must include keypoints.glsl\n#endif\nuint[DESCRIPTOR_SIZE] readKeypointDescriptor(sampler2D encodedKeypoints, int descriptorSize, int extraSize, int encoderLength, KeypointAddress address)\n{\nint descriptorOffset = sizeofEncodedKeypoint(0, extraSize) / 4;\nKeypointAddress descriptorAddress = KeypointAddress(address.base, descriptorOffset);\nuint[DESCRIPTOR_SIZE] descriptor;\nvec4 pixel; uvec4 bytes;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npixel = readKeypointData(encodedKeypoints, encoderLength, descriptorAddress);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\ndescriptorAddress.offset++;\n}\nreturn descriptor;\n}\nuint[DESCRIPTOR_SIZE] readKeypointDescriptorFromDB(sampler2D descriptorDB, int descriptorDBStride, int index)\n{\nuint[DESCRIPTOR_SIZE] descriptor;\nint rasterIndex = index * (DESCRIPTOR_SIZE / 4) * int(index >= 0);\nvec4 pixel; uvec4 bytes; ivec2 pos;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\npos = ivec2(rasterIndex % descriptorDBStride, rasterIndex / descriptorDBStride);\npixel = (index >= 0) ? texelFetch(descriptorDB, pos, 0) : vec4(0.0f);\nbytes = uvec4(pixel * 255.0f);\ndescriptor[i] = bytes.r;\ndescriptor[i+1] = bytes.g;\ndescriptor[i+2] = bytes.b;\ndescriptor[i+3] = bytes.a;\nrasterIndex++;\n}\nreturn descriptor;\n}\nint distanceBetweenKeypointDescriptors(uint[DESCRIPTOR_SIZE] a, uint[DESCRIPTOR_SIZE] b)\n{\nconst int[256] POPCNT = int[256](0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8);\nuvec4 xor, u, v;\nint dist = 0;\nivec4 bits;\n@unroll\nfor(int i = 0; i < DESCRIPTOR_SIZE; i += 4) {\nu = uvec4(a[i], a[i+1], a[i+2], a[i+3]);\nv = uvec4(b[i], b[i+1], b[i+2], b[i+3]);\nxor = (u ^ v) & 255u;\nbits = ivec4(POPCNT[xor.x], POPCNT[xor.y], POPCNT[xor.z], POPCNT[xor.w]);\ndist += bits.x + bits.y + bits.z + bits.w;\n}\nreturn dist;\n}\n#endif"
  3768. /***/ }),
  3769. /***/ 6762:
  3770. /***/ ((module) => {
  3771. module.exports = "#ifndef _KEYPOINT_MATCHES_GLSL\n#define _KEYPOINT_MATCHES_GLSL\n@include \"int32.glsl\"\nconst int MATCH_INDEX_BITS = int(@MATCH_INDEX_BITS@);\nconst int MATCH_INDEX_MASK = int(@MATCH_INDEX_MASK@);\nconst int MATCH_MAX_INDEX = int(@MATCH_MAX_INDEX@);\nconst int MATCH_MAX_DISTANCE = int(@MATCH_MAX_DISTANCE@);\nstruct KeypointMatch\n{\nint index;\nint dist;\n};\nvec4 encodeKeypointMatch(KeypointMatch candidate)\n{\nuint index = uint(candidate.index) & uint(MATCH_INDEX_MASK);\nuint dist = uint(clamp(candidate.dist, 0, MATCH_MAX_DISTANCE));\nuint u32 = index | (dist << MATCH_INDEX_BITS);\nreturn encodeUint32(u32);\n}\nKeypointMatch decodeKeypointMatch(vec4 rgba)\n{\nuint u32 = decodeUint32(rgba);\nint dist = int(u32 >> MATCH_INDEX_BITS);\nint index = int(u32 & uint(MATCH_INDEX_MASK));\nreturn KeypointMatch(index, dist);\n}\nconst KeypointMatch MATCH_NOT_FOUND = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif"
  3772. /***/ }),
  3773. /***/ 7639:
  3774. /***/ ((module) => {
  3775. module.exports = "#ifndef _KEYPOINTS_GLSL\n#define _KEYPOINTS_GLSL\n@include \"math.glsl\"\n@include \"fixed-point.glsl\"\n@include \"float16.glsl\"\n@include \"pyramids.glsl\"\nstruct Keypoint\n{\nvec2 position;\nfloat lod;\nfloat orientation;\nfloat score;\nuint flags;\n};\nstruct KeypointAddress\n{\nint base;\nint offset;\n};\nconst int MIN_KEYPOINT_SIZE = int(@MIN_KEYPOINT_SIZE@);\nconst int MAX_DESCRIPTOR_SIZE = int(@MAX_DESCRIPTOR_SIZE@);\nconst uint KPF_NONE = 0u;\nconst uint KPF_NULL = 1u;\nconst uint KPF_DISCARDED = 2u;\n#define encodeKeypointScore(score) encodeFloat16(score)\n#define decodeKeypointScore(encodedScore) decodeFloat16(encodedScore)\n#define encodeKeypointOrientation(angle) ((angle) * INV_PI_OVER_2 + 0.5f)\n#define decodeKeypointOrientation(value) ((value) * TWO_PI - PI)\n#define encodeNullKeypoint() (vec4(1.0f))\n#define encodeDiscardedKeypoint() (vec4(0.0f))\n#define isNullKeypoint(keypoint) ((((keypoint).flags) & KPF_NULL) != 0u)\n#define isDiscardedKeypoint(keypoint) ((((keypoint).flags) & KPF_DISCARDED) != 0u)\n#define isBadKeypoint(keypoint) ((keypoint).score < 0.0f)\n#define sizeofEncodedKeypoint(descriptorSize, extraSize) (MIN_KEYPOINT_SIZE + (descriptorSize) + (extraSize))\n#define sizeofEncodedKeypointHeader() sizeofEncodedKeypoint(0,0)\n#define findKeypointIndex(address, descriptorSize, extraSize) ((address).base / ((sizeofEncodedKeypoint((descriptorSize), (extraSize))) / 4))\nvec4 readKeypointData(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nint rasterIndex = address.base + address.offset;\nvec4 data = pixelAt(encodedKeypoints, ivec2(rasterIndex % encoderLength, rasterIndex / encoderLength));\nreturn rasterIndex < encoderLength * encoderLength ? data : encodeNullKeypoint();\n}\nKeypointAddress findKeypointAddress(ivec2 thread, int encoderLength, int descriptorSize, int extraSize)\n{\nint threadRaster = thread.y * encoderLength + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint keypointIndex = int(threadRaster / pixelsPerKeypoint);\nKeypointAddress address = KeypointAddress(\nkeypointIndex * pixelsPerKeypoint,\nthreadRaster % pixelsPerKeypoint\n);\nreturn address;\n}\nKeypoint decodeKeypoint(sampler2D encodedKeypoints, int encoderLength, KeypointAddress address)\n{\nKeypoint keypoint;\nKeypointAddress positionAddress = KeypointAddress(address.base, 0);\nKeypointAddress propertiesAddress = KeypointAddress(address.base, 1);\nvec4 rawEncodedPosition = readKeypointData(encodedKeypoints, encoderLength, positionAddress);\nivec4 encodedPosition = ivec4(rawEncodedPosition * 255.0f);\nkeypoint.position = fixtovec2(fixed2_t(\nencodedPosition.r | (encodedPosition.g << 8),\nencodedPosition.b | (encodedPosition.a << 8)\n));\nvec4 rawEncodedProperties = readKeypointData(encodedKeypoints, encoderLength, propertiesAddress);\nkeypoint.lod = decodeLod(rawEncodedProperties.r);\nkeypoint.orientation = decodeKeypointOrientation(rawEncodedProperties.g);\nkeypoint.score = decodeKeypointScore(rawEncodedProperties.ba);\nbool isNull = all(equal(rawEncodedPosition, vec4(1)));\nbool isDiscarded = all(equal(rawEncodedPosition + rawEncodedProperties, vec4(0)));\nkeypoint.score = (isNull || isDiscarded) ? -1.0f : keypoint.score;\nkeypoint.flags = KPF_NONE;\nkeypoint.flags |= KPF_NULL * uint(isNull);\nkeypoint.flags |= KPF_DISCARDED * uint(isDiscarded);\nreturn keypoint;\n}\nvec4 encodeKeypointPosition(vec2 position)\n{\nconst vec2 zeros = vec2(0.0f);\nfixed2_t pos = vec2tofix(max(position, zeros));\nfixed2_t lo = pos & 255;\nfixed2_t hi = (pos >> 8) & 255;\nreturn vec4(lo.x, hi.x, lo.y, hi.y) / 255.0f;\n}\n#endif"
  3776. /***/ }),
  3777. /***/ 431:
  3778. /***/ ((module) => {
  3779. module.exports = "#ifndef _MATH_GLSL\n#define _MATH_GLSL\n#define TWO_PI 6.28318530718f\n#define PI 3.14159265359f\n#define PI_OVER_2 1.57079632679f\n#define PI_OVER_4 0.78539816339f\n#define INV_PI 0.3183098861837907f\n#define INV_PI_OVER_2 0.15915494309189535f\nconst highp float INFINITY = 1.0f / 0.0f;\nfloat fastAtan(float x)\n{\nfloat w = 1.0f - abs(x);\nreturn (w >= 0.0f) ? ((PI_OVER_4 + 0.273f * w) * x) :\n(sign(x) * PI_OVER_2 - (PI_OVER_4 + 0.273f * (1.0f - abs(1.0f / x))) / x);\n}\nfloat fastAtan2(float y, float x)\n{\nreturn (x == 0.0f) ? PI_OVER_2 * sign(y) : fastAtan(y / x) + float(x < 0.0f) * PI * sign(y);\n}\n#endif"
  3780. /***/ }),
  3781. /***/ 6822:
  3782. /***/ ((module) => {
  3783. module.exports = "#ifndef _PLATFORM_GLSL\n#define _PLATFORM_GLSL\n#if @APPLE@\n#define APPLE 1\n#endif\n#if @APPLE_GPU@\n#define APPLE_GPU 1\n#endif\n#if @INTEL_GRAPHICS@\n#define INTEL_GRAPHICS 1\n#endif\n#endif"
  3784. /***/ }),
  3785. /***/ 2728:
  3786. /***/ ((module) => {
  3787. module.exports = "#ifndef _PYRAMIDS_GLSL\n#define _PYRAMIDS_GLSL\n#define pyrPixel(pyr, lod) textureLod((pyr), texCoord, (lod))\n#define pyrPixelAtOffset(pyr, lod, pot, offset) textureLod((pyr), texCoord + ((pot) * vec2(offset)) / texSize, (lod))\n#define pyrPixelAt(pyr, pos, lod) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / texSize, (lod))\n#define pyrPixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), (vec2(pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtEx(pyr, pos, lod, pyrBaseSize) textureLod((pyr), ((pos) + vec2(0.5f)) / vec2(pyrBaseSize), (lod))\n#define pyrSubpixelAtExOffset(pyr, pos, lod, pot, offset, pyrBaseSize) textureLod((pyr), (((pos) + vec2(0.5f)) + ((pot) * vec2(offset))) / vec2(pyrBaseSize), (lod))\nconst int PYRAMID_MAX_LEVELS = int(@PYRAMID_MAX_LEVELS@);\nconst float F_PYRAMID_MAX_LEVELS = float(@PYRAMID_MAX_LEVELS@);\nconst float LOG2_PYRAMID_MAX_SCALE = float(@LOG2_PYRAMID_MAX_SCALE@);\n#define encodeLod(lod) ((LOG2_PYRAMID_MAX_SCALE + (lod)) / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS))\nfloat decodeLod(float encodedLod)\n{\nfloat lod = encodedLod * (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS) - LOG2_PYRAMID_MAX_SCALE;\nreturn lod - lod * step(1.0f, encodedLod);\n}\n#define LOD_EPS 0.0625f\nconst float ENCODED_LOD_EPS = (LOD_EPS / (LOG2_PYRAMID_MAX_SCALE + F_PYRAMID_MAX_LEVELS));\n#define isSameLod(lod1, lod2) (abs((lod1) - (lod2)) < LOD_EPS)\n#define isSameEncodedLod(alpha1, alpha2) (abs((alpha1) - (alpha2)) < ENCODED_LOD_EPS)\n#endif"
  3788. /***/ }),
  3789. /***/ 6823:
  3790. /***/ ((module) => {
  3791. module.exports = "#ifndef _SUBPIXEL_GLSL\n#define _SUBPIXEL_GLSL\n#define subpixelAt(image, pos) textureLod((image), ((pos) + vec2(0.5f)) / texSize, 0.0f)\nvec4 subpixelAtBI(sampler2D image, vec2 pos)\n{\nvec2 frc = fract(pos);\nvec2 ifrc = vec2(1.0f) - frc;\nvec2 p = (floor(pos) + vec2(0.5f)) / vec2(textureSize(image, 0));\nvec4 pix00 = textureLod(image, p, 0.0f);\nvec4 pix10 = textureLodOffset(image, p, 0.0f, ivec2(1,0));\nvec4 pix01 = textureLodOffset(image, p, 0.0f, ivec2(0,1));\nvec4 pix11 = textureLodOffset(image, p, 0.0f, ivec2(1,1));\nmat4 pix = mat4(pix00, pix10, pix01, pix11);\nvec4 mul = vec4(ifrc.x * ifrc.y, frc.x * ifrc.y, ifrc.x * frc.y, frc.x * frc.y);\nreturn pix * mul;\n}\n#endif"
  3792. /***/ }),
  3793. /***/ 1341:
  3794. /***/ ((module) => {
  3795. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nconst vec4 EMPTY_DESCRIPTOR = vec4(0.0f);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nint addressOffset = myAddress.offset;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isDescriptor ? EMPTY_DESCRIPTOR : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3796. /***/ }),
  3797. /***/ 7833:
  3798. /***/ ((module) => {
  3799. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D inputEncodedKeypoints;\nuniform int inputDescriptorSize;\nuniform int inputExtraSize;\nuniform int inputEncoderLength;\nuniform int outputDescriptorSize;\nuniform int outputExtraSize;\nuniform int outputEncoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, outputEncoderLength, outputDescriptorSize, outputExtraSize);\nint myIndex = findKeypointIndex(myAddress, outputDescriptorSize, outputExtraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nbool isHead = (myAddress.offset < headerSize / 4);\nbool isDescriptor = (myAddress.offset >= (headerSize + outputExtraSize) / 4);\nbool isExtra = (!isHead && !isDescriptor);\nint numberOfExtraPixels = outputExtraSize / 4;\nint addressOffset = myAddress.offset - int(isDescriptor) * numberOfExtraPixels;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(inputDescriptorSize, inputExtraSize) / 4;\nKeypointAddress otherAddress = KeypointAddress(myIndex * pixelsPerKeypoint, addressOffset);\ncolor = isExtra ? vec4(0.0f) : readKeypointData(inputEncodedKeypoints, inputEncoderLength, otherAddress);\n}"
  3800. /***/ }),
  3801. /***/ 2352:
  3802. /***/ ((module) => {
  3803. module.exports = "@include \"keypoints.glsl\"\nuniform mat3 homography;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 pos3 = homography * vec3(keypoint.position, 1.0f);\ncolor = encodeKeypointPosition(pos3.xy / pos3.z);\n}"
  3804. /***/ }),
  3805. /***/ 7541:
  3806. /***/ ((module) => {
  3807. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\n@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedFilters;\nuniform int matcherLength;\nuniform sampler2D dbEncodedKeypoints;\nuniform int dbDescriptorSize;\nuniform int dbExtraSize;\nuniform int dbEncoderLength;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int passId;\n#ifndef NUMBER_OF_KEYPOINTS_PER_PASS\n#error Undefined NUMBER_OF_KEYPOINTS_PER_PASS\n#endif\nconst int INFINITE_DISTANCE = MATCH_MAX_DISTANCE + 1;\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch bestMatch = decodeKeypointMatch(threadPixel(encodedMatches));\nKeypointMatch filterMatch = decodeKeypointMatch(threadPixel(encodedFilters));\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nuint[DESCRIPTOR_SIZE] dbDescriptor;\nint dbPixelsPerKeypoint = sizeofEncodedKeypoint(dbDescriptorSize, dbExtraSize) / 4;\nfor(int i = 0; i < NUMBER_OF_KEYPOINTS_PER_PASS; i++) {\nint dbKeypointIndex = passId * NUMBER_OF_KEYPOINTS_PER_PASS + i;\nKeypointAddress dbAddress = KeypointAddress(dbKeypointIndex * dbPixelsPerKeypoint, 0);\nKeypoint dbKeypoint = decodeKeypoint(dbEncodedKeypoints, dbEncoderLength, dbAddress);\ndbDescriptor = readKeypointDescriptor(dbEncodedKeypoints, dbDescriptorSize, dbExtraSize, dbEncoderLength, dbAddress);\nint dist = !isBadKeypoint(dbKeypoint) ? distanceBetweenKeypointDescriptors(descriptor, dbDescriptor) : INFINITE_DISTANCE;\nbestMatch.index = all(bvec2(\ndist < bestMatch.dist || (dist == bestMatch.dist && dbKeypointIndex > bestMatch.index),\ndist > filterMatch.dist || (dist == filterMatch.dist && dbKeypointIndex < filterMatch.index)\n)) ? dbKeypointIndex : bestMatch.index;\nbestMatch.dist = dbKeypointIndex == bestMatch.index ? dist : bestMatch.dist;\n}\ncolor = encodeKeypointMatch(bestMatch);\n}"
  3808. /***/ }),
  3809. /***/ 4868:
  3810. /***/ ((module) => {
  3811. module.exports = "@include \"keypoints.glsl\"\nuniform int imageWidth;\nuniform int imageHeight;\nuniform int borderTop;\nuniform int borderRight;\nuniform int borderBottom;\nuniform int borderLeft;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nvec2 p = keypoint.position;\nbool withinBorder = any(lessThan(\nvec4(p.x, p.y, -p.x, -p.y),\nvec4(borderLeft, borderTop, borderRight - (imageWidth - 1), borderBottom - (imageHeight - 1))\n));\nvec4 pixel = threadPixel(encodedKeypoints);\nvec4 nullPixel = encodeNullKeypoint();\ncolor = withinBorder ? nullPixel : pixel;\n}"
  3812. /***/ }),
  3813. /***/ 5591:
  3814. /***/ ((module) => {
  3815. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\nvoid main()\n{\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress address = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nvec4 pixel = readKeypointData(encodedKeypoints, encoderLength, address);\ncolor = index < maxKeypoints ? pixel : encodeNullKeypoint();\n}"
  3816. /***/ }),
  3817. /***/ 191:
  3818. /***/ ((module) => {
  3819. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform float threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nvec2 delta = keypointA.position - keypointB.position;\nbool shouldKeep = (dot(delta, delta) <= threshold * threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3820. /***/ }),
  3821. /***/ 5467:
  3822. /***/ ((module) => {
  3823. module.exports = "@include \"float16.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\n#ifndef MAX_ITERATIONS\n#error Undefined MAX_ITERATIONS\n#endif\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\n#define encodeSkipOffset(offset) (vec2((offset) & 255, (offset) >> 8) / 255.0f)\nvoid main()\n{\nvec4 pixel = threadPixel(offsetsImage);\nivec2 thread = threadLocation();\nint rasterIndex = thread.y * imageSize.x + thread.x;\nint offset = decodeSkipOffset(pixel);\nint totalOffset = offset;\nvec2 encodedScore = pixel.rb;\nivec2 pos = thread; int allow = 1;\n@unroll\nfor(int i = 0; i < MAX_ITERATIONS; i++) {\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb));\nrasterIndex += allow * offset;\npos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = pixelAt(offsetsImage, pos);\noffset = decodeSkipOffset(pixel);\ntotalOffset += allow * offset;\n}\ntotalOffset = min(totalOffset, 65535);\ncolor.rb = encodedScore;\ncolor.ga = encodeSkipOffset(totalOffset);\n}"
  3824. /***/ }),
  3825. /***/ 336:
  3826. /***/ ((module) => {
  3827. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform ivec2 imageSize;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nivec2 pos = threadLocation();\nvec2 encodedScore = pixel.rb;\nint offset = 0, allow = 1, jumped = 0;\n#define READ(j) ; \\\nallow *= int(pos.y < imageSize.y) * int(isEncodedFloat16Zero(pixel.rb)); \\\noffset += allow; \\\npos.x = (pos.x + 1) % imageSize.x; \\\npos.y += int(pos.x == 0); \\\npixel = (0 != (jumped |= int(pos.x == 0))) ? pixelAtShortOffset(corners, ivec2((j),1)) : pixelAtShortOffset(corners, ivec2((j),0))\nREAD(1); READ(2); READ(3); READ(4); READ(5); READ(6); READ(7);\ncolor.rb = encodedScore;\ncolor.ga = vec2(offset, 0) / 255.0f;\n}"
  3828. /***/ }),
  3829. /***/ 8968:
  3830. /***/ ((module) => {
  3831. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D offsetsImage;\nuniform ivec2 imageSize;\nuniform int passId;\nuniform int numPasses;\nuniform int keypointLimit;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define decodeSkipOffset(pixel) (int((pixel).g * 255.0f) | (int((pixel).a * 255.0f) << 8))\nbool findQthKeypoint(int q, int p, inout ivec2 position, out vec4 pixel)\n{\nint notFirstPass = int(passId > 0);\nposition *= notFirstPass;\np |= -(1 - notFirstPass);\np -= notFirstPass;\nint rasterIndex = position.y * imageSize.x + position.x;\nwhile(position.y < imageSize.y && p != q) {\nposition = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\npixel = texelFetch(offsetsImage, position, 0);\np += int(!isEncodedFloat16Zero(pixel.rb));\nrasterIndex += max(1, decodeSkipOffset(pixel));\n}\nreturn (p == q);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = vec4(0.0f);\nif(address.offset != 0)\nreturn;\ncolor = threadPixel(encodedKeypoints);\nint numPixels = encoderLength * encoderLength;\nint maxKeypoints = numPixels / pixelsPerKeypoint;\nint maxKeypointsPerPass = maxKeypoints / numPasses + int(maxKeypoints % numPasses != 0);\nint targetPassId = q / maxKeypointsPerPass;\nif(passId != targetPassId)\nreturn;\nint lastIndexFromPrevPass = passId * maxKeypointsPerPass - 1;\nKeypointAddress lastAddressFromPrevPass = KeypointAddress(max(0, lastIndexFromPrevPass) * pixelsPerKeypoint, 0);\nKeypoint lastKeypointFromPrevPass = decodeKeypoint(encodedKeypoints, encoderLength, lastAddressFromPrevPass);\nivec2 position = passId > 0 ? ivec2(lastKeypointFromPrevPass.position) : ivec2(0);\nvec4 pixel;\ncolor = encodeNullKeypoint();\nif(q >= min(maxKeypoints, keypointLimit) || !findQthKeypoint(q, lastIndexFromPrevPass, position, pixel))\nreturn;\ncolor = encodeKeypointPosition(vec2(position));\n}"
  3832. /***/ }),
  3833. /***/ 1733:
  3834. /***/ ((module) => {
  3835. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(encodedKeypoints);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint q = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec4 kpix = pixelAt(corners, ivec2(keypoint.position));\nkeypoint.score = decodeFloat16(kpix.rb);\ncolor.r = kpix.a;\ncolor.g = encodeKeypointOrientation(0.0f);\ncolor.ba = encodeKeypointScore(keypoint.score);\n}"
  3836. /***/ }),
  3837. /***/ 9674:
  3838. /***/ ((module) => {
  3839. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D corners;\nuniform mediump usampler2D lookupTable;\nuniform int stride;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int encoderCapacity;\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nivec2 pos = ivec2(index % stride, index / stride);\nuvec4 entry = texelFetch(lookupTable, pos, 0);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nint rasterIndex = address.base + address.offset;\nint numberOfPixels = encoderLength * encoderLength;\nint numberOfValidPixels = numberOfPixels - (numberOfPixels % pixelsPerKeypoint);\nint maxEncoderCapacity = numberOfValidPixels / pixelsPerKeypoint;\ncolor = encodeNullKeypoint();\nif(all(equal(entry.xy, NULL_ELEMENT)) || index >= min(encoderCapacity, maxEncoderCapacity))\nreturn;\ncolor = encodeKeypointPosition(vec2(entry.xy));\nif(address.offset == 0)\nreturn;\ncolor = vec4(0.0f);\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nvec4 pixel = texelFetch(corners, ivec2(entry.xy), 0);\nvec2 encodedScore = encodeKeypointScore(decodeFloat16(pixel.rb));\nfloat encodedOrientation = encodeKeypointOrientation(0.0f);\nfloat encodedLod = pixel.a;\ncolor = vec4(encodedLod, encodedOrientation, encodedScore);\n}"
  3840. /***/ }),
  3841. /***/ 2090:
  3842. /***/ ((module) => {
  3843. module.exports = "@include \"keypoints.glsl\"\nvoid main()\n{\ncolor = encodeNullKeypoint();\n}"
  3844. /***/ }),
  3845. /***/ 1855:
  3846. /***/ ((module) => {
  3847. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lod;\nuniform int threshold;\n#define USE_VARYINGS 1\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nin vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nfloat pixel = threadPixel(pyramid).g;\nvec4 prev = threadPixel(corners);\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\nfloat ct = pixel + t, c_t = pixel - t;\ncolor = vec4(prev.r, pixel, prev.ba);\n#if FAST_TYPE == 916\nconst ivec4 margin = ivec4(3, 3, 4, 4);\nif(any(lessThan(ivec4(thread, size - thread), margin)))\nreturn;\n#if USE_VARYINGS\nfloat p0 = XIP(v_pix0), p4 = XIP(v_pix4), p8 = XIP(v_pix8), p12 = XIP(v_pix12);\n#else\nfloat p0 = PIX(0,3), p4 = PIX(3,0), p8 = PIX(0,-3), p12 = PIX(-3,0);\n#endif\nbvec4 brighter = bvec4(p0 > ct, p4 > ct, p8 > ct, p12 > ct);\nbvec4 darker = bvec4(p0 < c_t, p4 < c_t, p8 < c_t, p12 < c_t);\nbvec4 bpairs = bvec4(all(brighter.xy), all(brighter.yz), all(brighter.zw), all(brighter.wx));\nbvec4 dpairs = bvec4(all(darker.xy), all(darker.yz), all(darker.zw), all(darker.wx));\nif(!(any(bpairs) || any(dpairs)))\nreturn;\n#if USE_VARYINGS\nfloat p1 = XIP(v_pix1), p2 = XIP(v_pix2), p3 = XIP(v_pix3),\np5 = XIP(v_pix5), p6 = XIP(v_pix6), p7 = XIP(v_pix7),\np9 = XIP(v_pix9), p10 = XIP(v_pix10), p11 = XIP(v_pix11),\np13 = XIP(v_pix13), p14 = XIP(v_pix14), p15 = XIP(v_pix15);\n#else\nfloat p1 = PIX(1,3), p2 = PIX(2,2), p3 = PIX(3,1),\np5 = PIX(3,-1), p6 = PIX(2,-2), p7 = PIX(1,-3),\np9 = PIX(-1,-3), p10 = PIX(-2,-2), p11 = PIX(-3,-1),\np13 = PIX(-3,1), p14 = PIX(-2,2), p15 = PIX(-1,3);\n#endif\nbool A=(p0>ct),B=(p1>ct),C=(p2>ct),D=(p3>ct),E=(p4>ct),F=(p5>ct),G=(p6>ct),H=(p7>ct),I=(p8>ct),J=(p9>ct),K=(p10>ct),L=(p11>ct),M=(p12>ct),N=(p13>ct),O=(p14>ct),P=(p15>ct),a=(p0<c_t),b=(p1<c_t),c=(p2<c_t),d=(p3<c_t),e=(p4<c_t),f=(p5<c_t),g=(p6<c_t),h=(p7<c_t),i=(p8<c_t),j=(p9<c_t),k=(p10<c_t),l=(p11<c_t),m=(p12<c_t),n=(p13<c_t),o=(p14<c_t),p=(p15<c_t);\nbool isCorner=A&&(B&&(K&&L&&J&&(M&&N&&O&&P||G&&H&&I&&(M&&N&&O||F&&(M&&N||E&&(M||D))))||C&&(K&&L&&M&&(N&&O&&P||G&&H&&I&&J&&(N&&O||F&&(N||E)))||D&&(N&&(L&&M&&(K&&G&&H&&I&&J&&(O||F)||O&&P)||k&&l&&m&&e&&f&&g&&h&&i&&j)||E&&(O&&(M&&N&&(K&&L&&G&&H&&I&&J||P)||k&&l&&m&&n&&f&&g&&h&&i&&j)||F&&(P&&(N&&O||k&&l&&m&&n&&o&&g&&h&&i&&j)||G&&(O&&P||H&&(P||I)||k&&l&&m&&n&&o&&p&&h&&i&&j)||k&&l&&m&&n&&o&&h&&i&&j&&(p||g))||k&&l&&m&&n&&h&&i&&j&&(o&&(p||g)||f&&(o&&p||g)))||k&&l&&m&&h&&i&&j&&(n&&(o&&p||g&&(o||f))||e&&(n&&o&&p||g&&(n&&o||f))))||k&&l&&h&&i&&j&&(m&&(n&&o&&p||g&&(n&&o||f&&(n||e)))||d&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e)))))||k&&h&&i&&j&&(l&&(m&&n&&o&&p||g&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d))))))||K&&I&&J&&(L&&M&&N&&O&&P||G&&H&&(L&&M&&N&&O||F&&(L&&M&&N||E&&(L&&M||D&&(L||C)))))||h&&i&&j&&(b&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c)))))||k&&(l&&m&&n&&o&&p||g&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c)))))))||B&&(H&&I&&J&&(K&&L&&M&&N&&O&&P&&a||G&&(K&&L&&M&&N&&O&&a||F&&(K&&L&&M&&N&&a||E&&(K&&L&&M&&a||D&&(K&&L&&a||C)))))||a&&k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||C&&(K&&H&&I&&J&&(L&&M&&N&&O&&P&&a&&b||G&&(L&&M&&N&&O&&a&&b||F&&(L&&M&&N&&a&&b||E&&(L&&M&&a&&b||D))))||a&&b&&k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d)))))||D&&(K&&L&&H&&I&&J&&(M&&N&&O&&P&&a&&b&&c||G&&(M&&N&&O&&a&&b&&c||F&&(M&&N&&a&&b&&c||E)))||a&&b&&k&&l&&m&&c&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e))))||E&&(K&&L&&M&&H&&I&&J&&(N&&O&&P&&a&&b&&c&&d||G&&(N&&O&&a&&b&&c&&d||F))||a&&b&&l&&m&&n&&c&&d&&(k&&g&&h&&i&&j&&(o||f)||o&&p))||F&&(K&&L&&M&&N&&H&&I&&J&&(O&&P&&a&&b&&c&&d&&e||G)||a&&b&&m&&n&&o&&c&&d&&e&&(k&&l&&g&&h&&i&&j||p))||G&&(K&&L&&M&&N&&O&&H&&I&&J||a&&b&&n&&o&&p&&c&&d&&e&&f)||H&&(K&&L&&M&&N&&O&&P&&I&&J||a&&b&&o&&p&&c&&d&&e&&f&&g)||a&&(b&&(k&&l&&j&&(m&&n&&o&&p||g&&h&&i&&(m&&n&&o||f&&(m&&n||e&&(m||d))))||c&&(k&&l&&m&&(n&&o&&p||g&&h&&i&&j&&(n&&o||f&&(n||e)))||d&&(l&&m&&n&&(k&&g&&h&&i&&j&&(o||f)||o&&p)||e&&(m&&n&&o&&(k&&l&&g&&h&&i&&j||p)||f&&(n&&o&&p||g&&(o&&p||h&&(p||i)))))))||k&&i&&j&&(l&&m&&n&&o&&p||g&&h&&(l&&m&&n&&o||f&&(l&&m&&n||e&&(l&&m||d&&(l||c))))))||h&&i&&j&&(k&&l&&m&&n&&o&&p||g&&(k&&l&&m&&n&&o||f&&(k&&l&&m&&n||e&&(k&&l&&m||d&&(k&&l||c&&(b||k))))));\nif(!isCorner)\nreturn;\nmat4 mp = mat4(p0,p1,p2,p3,p4,p5,p6,p7,p8,p9,p10,p11,p12,p13,p14,p15);\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nfloat thisScore = max(dot(bs, ones), dot(ds, ones)) / 16.0f;\nfloat prevScore = decodeFloat16(prev.rb);\nvec3 thisResult = vec3(encodeFloat16(thisScore), encodeLod(lod));\ncolor.rba = thisScore > prevScore ? thisResult : color.rba;\n#endif\n}"
  3848. /***/ }),
  3849. /***/ 4824:
  3850. /***/ ((module) => {
  3851. module.exports = "uniform mediump float lod;\n#if !defined(FAST_TYPE)\n#error Undefined FAST_TYPE\n#elif FAST_TYPE == 916\nout vec2 v_pix0, v_pix1, v_pix2, v_pix3, v_pix4, v_pix5, v_pix6, v_pix7,\nv_pix8, v_pix9, v_pix10,v_pix11,v_pix12,v_pix13,v_pix14,v_pix15;\n#else\n#error Invalid FAST_TYPE\n#endif\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\n#if FAST_TYPE == 916\nv_pix0 = PIX(0,3); v_pix1 = PIX(1,3), v_pix2 = PIX(2,2), v_pix3 = PIX(3,1);\nv_pix4 = PIX(3,0); v_pix5 = PIX(3,-1), v_pix6 = PIX(2,-2), v_pix7 = PIX(1,-3);\nv_pix8 = PIX(0,-3); v_pix9 = PIX(-1,-3), v_pix10 = PIX(-2,-2), v_pix11 = PIX(-3,-1);\nv_pix12 = PIX(-3,0); v_pix13 = PIX(-3,1), v_pix14 = PIX(-2,2), v_pix15 = PIX(-1,3);\n#endif\n}"
  3852. /***/ }),
  3853. /***/ 2381:
  3854. /***/ ((module) => {
  3855. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D encodedKeypointsA;\nuniform int encoderLengthA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int threshold;\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nvec4 data = readKeypointData(encodedKeypointsA, encoderLengthA, address);\ncolor = data;\nif(address.offset >= sizeofEncodedKeypointHeader() / 4)\nreturn;\nKeypoint keypointA = decodeKeypoint(encodedKeypointsA, encoderLengthA, address);\nKeypoint keypointB = decodeKeypoint(encodedKeypointsB, encoderLengthB, address);\ncolor = encodeNullKeypoint();\nif(isNullKeypoint(keypointA) && isNullKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isDiscardedKeypoint(keypointA) || isDiscardedKeypoint(keypointB))\nreturn;\ncolor = encodeDiscardedKeypoint();\nif(isNullKeypoint(keypointA) || isNullKeypoint(keypointB))\nreturn;\nuint[DESCRIPTOR_SIZE] descriptorA, descriptorB;\ndescriptorA = readKeypointDescriptor(encodedKeypointsA, descriptorSize, extraSize, encoderLengthA, address);\ndescriptorB = readKeypointDescriptor(encodedKeypointsB, descriptorSize, extraSize, encoderLengthB, address);\nint dist = distanceBetweenKeypointDescriptors(descriptorA, descriptorB);\nbool shouldKeep = (dist <= threshold);\ncolor = shouldKeep ? data : encodeDiscardedKeypoint();\n}"
  3856. /***/ }),
  3857. /***/ 6060:
  3858. /***/ ((module) => {
  3859. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform sampler2D maxScore;\nuniform float quality;\nvoid main()\n{\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat maxval = decodeFloat16(threadPixel(maxScore).rb);\nfloat threshold = maxval * clamp(quality, 0.0f, 1.0f);\ncolor = pixel;\ncolor.rb = score >= threshold ? color.rb : encodeFloat16(0.0f);\n}"
  3860. /***/ }),
  3861. /***/ 9974:
  3862. /***/ ((module) => {
  3863. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(WINDOW_SIZE)\n#error Undefined WINDOW_SIZE\n#endif\n#define WINDOW_RADIUS ((WINDOW_SIZE - 1) / 2)\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform sampler2D derivatives;\nuniform float lod;\nuniform float lodStep;\nuniform float gaussian[@WINDOW_SIZE@];\n#define G(x) gaussian[(x) + WINDOW_RADIUS]\n#define W(x,y) (G(x) * G(y))\n#define H(ox,oy) dpix = pixelAtShortOffset(derivatives, ivec2((ox),(oy))); \\\ndf = (1.0f + lod) * decodePairOfFloat16(dpix); \\\nh += vec3(df.x * df.x, df.x * df.y, df.y * df.y) * W((ox),(oy))\nvoid main()\n{\nfloat intensity = 0.0f;\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nvec4 dpix = vec4(0.0f);\nvec2 df = vec2(0.0f);\nvec3 h = vec3(0.0f);\ncolor = pixel;\n#if WINDOW_SIZE == 1\nH(0,0);\n#elif WINDOW_SIZE == 3\nH(-1,-1); H(0,-1); H(1,-1);\nH(-1,0); H(0,0); H(1,0);\nH(-1,1); H(0,1); H(1,1);\n#elif WINDOW_SIZE == 5\nH(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2);\nH(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1);\nH(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0);\nH(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1);\nH(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2);\n#elif WINDOW_SIZE == 7\nH(-3,-3); H(-2,-3); H(-1,-3); H(0,-3); H(1,-3); H(2,-3); H(3,-3);\nH(-3,-2); H(-2,-2); H(-1,-2); H(0,-2); H(1,-2); H(2,-2); H(3,-2);\nH(-3,-1); H(-2,-1); H(-1,-1); H(0,-1); H(1,-1); H(2,-1); H(3,-1);\nH(-3,0); H(-2,0); H(-1,0); H(0,0); H(1,0); H(2,0); H(3,0);\nH(-3,1); H(-2,1); H(-1,1); H(0,1); H(1,1); H(2,1); H(3,1);\nH(-3,2); H(-2,2); H(-1,2); H(0,2); H(1,2); H(2,2); H(3,2);\nH(-3,3); H(-2,3); H(-1,3); H(0,3); H(1,3); H(2,3); H(3,3);\n#else\n#error Invalid WINDOW_SIZE\n#endif\nfloat response = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= float(WINDOW_SIZE * WINDOW_SIZE);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep);\nfloat currentScaleStrength = abs(laplacian(pyramid, vec2(thread), lod));\nfloat previousScaleStrength = abs(laplacian(pyramid, vec2(thread), lodPlus));\nfloat previousResponse = decodeFloat16(pixel.rb);\nvec4 result = vec4(encodeFloat16(response), encodeLod(lod), intensity);\ncolor.rbag = (currentScaleStrength >= previousScaleStrength || previousResponse == 0.0f) ? result : pixel.rbag;\n}"
  3864. /***/ }),
  3865. /***/ 3047:
  3866. /***/ ((module) => {
  3867. module.exports = "@include \"keypoint-matches.glsl\"\nvoid main()\n{\n#if ENCODE_FILTERS != 0\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, 0);\n#else\nKeypointMatch initial = KeypointMatch(MATCH_MAX_INDEX, MATCH_MAX_DISTANCE);\n#endif\ncolor = encodeKeypointMatch(initial);\n}"
  3868. /***/ }),
  3869. /***/ 3266:
  3870. /***/ ((module) => {
  3871. module.exports = "@include \"keypoint-matches.glsl\"\nuniform sampler2D encodedMatches;\nuniform sampler2D encodedKthMatches;\nuniform int numberOfMatchesPerKeypoint;\nuniform int kthMatch;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 matcherSize = textureSize(encodedMatches, 0);\nivec2 kthMatcherSize = textureSize(encodedKthMatches, 0);\nint rasterIndex = thread.y * matcherSize.x + thread.x;\nint matchIndex = rasterIndex / numberOfMatchesPerKeypoint;\nint matchCell = rasterIndex % numberOfMatchesPerKeypoint;\ncolor = threadPixel(encodedMatches);\nif(matchCell != kthMatch)\nreturn;\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(matchIndex >= kthMatcherSize.x * kthMatcherSize.y)\nreturn;\nivec2 pos = ivec2(matchIndex % kthMatcherSize.x, matchIndex / kthMatcherSize.x);\ncolor = texelFetch(encodedKthMatches, pos, 0);\n}"
  3872. /***/ }),
  3873. /***/ 8018:
  3874. /***/ ((module) => {
  3875. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform float lodOffset;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat lod = decodeLod(pixel.a);\nfloat lodMinus = max(0.0f, lod - lodStep + lodOffset);\nfloat lodPlus = min(float(PYRAMID_MAX_LEVELS - 1), lod + lodStep + lodOffset);\nfloat lapMinus = laplacian(pyramid, vec2(thread), lodMinus);\nfloat lapPlus = abs(lodPlus - lodMinus) < 1e-5 ? lapMinus : laplacian(pyramid, vec2(thread), lodPlus);\ncolor = encodePairOfFloat16(vec2(lapMinus, lapPlus));\n}"
  3876. /***/ }),
  3877. /***/ 3168:
  3878. /***/ ((module) => {
  3879. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D nextPyramid;\nuniform sampler2D prevPyramid;\nuniform sampler2D encodedFlow;\nuniform sampler2D prevKeypoints;\nuniform int level;\nuniform int depth;\nuniform int numberOfIterations;\nuniform float discardThreshold;\nuniform float epsilon;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef WINDOW_SIZE\n#error Undefined WINDOW_SIZE\n#endif\n#define NEXT_IMAGE 1\n#define PREV_IMAGE 0\nconst int WINDOW_RADIUS = (WINDOW_SIZE - 1) / 2;\nconst int WINDOW_SIZE_SQUARED = (WINDOW_SIZE) * (WINDOW_SIZE);\nconst int WINDOW_SIZE_PLUS = (WINDOW_SIZE) + 2;\nconst int WINDOW_SIZE_PLUS_SQUARED = WINDOW_SIZE_PLUS * WINDOW_SIZE_PLUS;\nconst int DBL_WINDOW_SIZE_PLUS_SQUARED = 2 * WINDOW_SIZE_PLUS_SQUARED;\nconst int WINDOW_RADIUS_PLUS = (WINDOW_SIZE_PLUS - 1) / 2;\nconst highp float FLT_SCALE = 9.5367431640625e-7;\nconst highp float FLT_EPSILON = 0.00000011920929f;\nint pixelBuffer[DBL_WINDOW_SIZE_PLUS_SQUARED];\n#define prevPixel(index) pixelBuffer[(index)]\n#define nextPixel(index) pixelBuffer[WINDOW_SIZE_PLUS_SQUARED + (index)]\n#define pixelIndex(i, j) (((j) + WINDOW_RADIUS_PLUS) * WINDOW_SIZE_PLUS + ((i) + WINDOW_RADIUS_PLUS))\nivec2 derivBuffer[WINDOW_SIZE_SQUARED];\n#define derivativesAt(x, y) derivBuffer[((y) + WINDOW_RADIUS) * WINDOW_SIZE + ((x) + WINDOW_RADIUS)]\nvoid readWindow(vec2 center, float lod)\n{\nconst int r = WINDOW_RADIUS;\nivec2 pyrBaseSize = textureSize(prevPyramid, 0);\nfloat pot = exp2(lod);\nivec2 offset; int idx;\n#define readPixelsAt(ox, oy) offset = ivec2((ox), (oy)); \\\nidx = pixelIndex(offset.x, offset.y); \\\nnextPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(nextPyramid, center, lod, pot, offset, pyrBaseSize).g); \\\nprevPixel(idx) = int(255.0f * pyrSubpixelAtExOffset(prevPyramid, center, lod, pot, offset, pyrBaseSize).g)\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nreadPixelsAt(i-r, j-r);\n}\n}\nint r1 = r+1;\nfor(int k = 0; k < WINDOW_SIZE; k++) {\nreadPixelsAt(-r1, k-r);\nreadPixelsAt( r1, k-r);\nreadPixelsAt(k-r,-r1);\nreadPixelsAt(k-r, r1);\n}\nreadPixelsAt(-r1,-r1);\nreadPixelsAt( r1,-r1);\nreadPixelsAt(-r1, r1);\nreadPixelsAt( r1, r1);\n}\nivec2 computeDerivatives(int imageCode, ivec2 offset)\n{\nconst mat3 dx = mat3(\n3, 0, -3,\n10, 0, -10,\n3, 0, -3\n);\nconst mat3 dy = mat3(\n3, 10, 3,\n0, 0, 0,\n-3, -10, -3\n);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nmat3 window = mat3(\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y-1)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+0)],\n0.0f,\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+0)],\npixelBuffer[indexOffset + pixelIndex(offset.x-1, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+0, offset.y+1)],\npixelBuffer[indexOffset + pixelIndex(offset.x+1, offset.y+1)]\n);\nmat3 fx = matrixCompMult(dx, window);\nmat3 fy = matrixCompMult(dy, window);\nconst vec3 ones = vec3(1.0f);\nreturn ivec2(\ndot(fx[0], ones) + dot(fx[1], ones) + dot(fx[2], ones),\ndot(fy[0], ones) + dot(fy[1], ones) + dot(fy[2], ones)\n);\n}\nint readBufferedPixel(int imageCode, ivec2 offset)\n{\nconst int r = WINDOW_RADIUS;\noffset = clamp(offset, -r, r);\nint indexOffset = imageCode * WINDOW_SIZE_PLUS_SQUARED;\nreturn pixelBuffer[indexOffset + pixelIndex(offset.x, offset.y)];\n}\nint readBufferedSubpixel(int imageCode, vec2 offset)\n{\nivec2 p = ivec2(floor(offset));\nvec2 frc = fract(offset);\nvec2 ifrc = vec2(1.0f) - frc;\nvec4 pix = vec4(\nreadBufferedPixel(imageCode, p),\nreadBufferedPixel(imageCode, p + ivec2(1,0)),\nreadBufferedPixel(imageCode, p + ivec2(0,1)),\nreadBufferedPixel(imageCode, p + ivec2(1,1))\n);\nvec4 sub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\nreturn int(0.5f + dot(sub*pix, vec4(1.0f)));\n}\nvec2 computeMismatch(vec2 pyrGuess, vec2 localGuess)\n{\nconst int r = WINDOW_RADIUS;\nint timeDerivative;\nivec2 mismatch = ivec2(0);\nint x, y, _x, _y;\nvec2 d = pyrGuess + localGuess;\n#define innerLoop() \\\nfor(_x = 0; _x < WINDOW_SIZE; _x++) { \\\nx = _x - r; y = _y - r; \\\ntimeDerivative = ( \\\nreadBufferedSubpixel(NEXT_IMAGE, vec2(x, y) + d) - \\\nreadBufferedPixel(PREV_IMAGE, ivec2(x, y)) \\\n); \\\nmismatch += derivativesAt(x, y) * timeDerivative; \\\n}\n@unroll\nfor(_y = 0; _y < WINDOW_SIZE; _y++) {\ninnerLoop();\n}\nreturn vec2(mismatch) * FLT_SCALE;\n}\nbool isInsideImage(vec2 position)\n{\nvec2 imageSize = vec2(textureSize(nextPyramid, 0));\nvec2 border = vec2(WINDOW_SIZE);\nreturn all(bvec4(\ngreaterThanEqual(position, border),\nlessThan(position, imageSize - border)\n));\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedFlow);\nivec2 thread = threadLocation();\nfloat windowArea = float(WINDOW_SIZE * WINDOW_SIZE);\nconst int r = WINDOW_RADIUS;\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(prevKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nvec2 pyrGuess = (level < depth - 1) ? decodePairOfFloat16(pixel) : vec2(0.0f);\npyrGuess *= 2.0f;\nreadWindow(keypoint.position, float(level));\nivec2 derivatives;\nivec3 harris3i = ivec3(0);\nfor(int j = 0; j < WINDOW_SIZE; j++) {\nfor(int i = 0; i < WINDOW_SIZE; i++) {\nderivatives = computeDerivatives(PREV_IMAGE, ivec2(i-r, j-r));\nharris3i += ivec3(\nderivatives.x * derivatives.x,\nderivatives.x * derivatives.y,\nderivatives.y * derivatives.y\n);\nderivativesAt(i-r, j-r) = derivatives;\n}\n}\nhighp vec3 harris = vec3(harris3i) * FLT_SCALE;\nhighp mat2 invHarris = mat2(harris.z, -harris.y, -harris.y, harris.x);\nhighp float det = harris.x * harris.z - harris.y * harris.y;\nhighp float invDet = abs(det) >= FLT_EPSILON ? 1.0f / det : 0.0f;\nhighp float minEigenvalue = 0.5f * ((harris.x + harris.z) - sqrt(\n(harris.x - harris.z) * (harris.x - harris.z) + 4.0f * (harris.y * harris.y)\n));\nint niceNumbers = int(abs(det) >= FLT_EPSILON && minEigenvalue >= discardThreshold * windowArea);\nbool goodKeypoint = (level > 0) || (niceNumbers != 0);\nhighp float eps2 = epsilon * epsilon;\nhighp vec2 mismatch, delta, localGuess = vec2(0.0f);\nfor(int k = 0; k < numberOfIterations; k++) {\nmismatch = niceNumbers != 0 ? computeMismatch(pyrGuess, localGuess) : vec2(0.0f);\ndelta = mismatch * invHarris * invDet;\nniceNumbers *= int(eps2 <= dot(delta, delta));\nlocalGuess += float(niceNumbers) * delta;\n}\nvec2 opticalFlow = pyrGuess + localGuess;\nbool mustDiscard = (level == 0) && any(bvec2(\n!goodKeypoint,\n!isInsideImage(keypoint.position + opticalFlow)\n));\ncolor = !mustDiscard ? encodePairOfFloat16(opticalFlow) : encodeDiscardedPairOfFloat16();\n}"
  3880. /***/ }),
  3881. /***/ 3890:
  3882. /***/ ((module) => {
  3883. module.exports = "#if @FS_USE_CUSTOM_PRECISION@\nprecision mediump int;\nprecision mediump float;\n#endif\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\n@include \"float16.glsl\"\nuniform sampler2D corners;\n#elif STAGE < 1\nuniform mediump usampler2D lookupTable;\n#else\n#define SKIP_TEXTURE_READS 1\n#define DENSITY_FACTOR 0.10\nuniform mediump usampler2D lookupTable;\nuniform int blockSize;\nuniform int width;\nuniform int height;\nin vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\n#endif\nconst uvec2 NULL_ELEMENT = uvec2(0xFFFFu);\nvoid main()\n{\n#if STAGE == 1\nuvec2 outSize = uvec2(outputSize());\nuvec2 thread = uvec2(threadLocation());\nuvec2 size = uvec2(textureSize(corners, 0));\nuint location = thread.y * outSize.x + thread.x;\nivec2 pos = ivec2(location % size.x, location / size.x);\nvec4 pixel = location < size.x * size.y ? texelFetch(corners, pos, 0) : vec4(0.0f);\nbool isCorner = !isEncodedFloat16Zero(pixel.rb);\ncolor = isCorner ? uvec4(uvec2(pos), 1u, 0u) : uvec4(NULL_ELEMENT, 0u, 0u);\n#elif STAGE > 1\nint dblBlockSize = 2 * blockSize;\nivec2 thread = threadLocation();\nivec2 offset = thread % dblBlockSize;\nivec2 delta = thread - offset;\n#if SKIP_TEXTURE_READS\nif(blockSize >= 8) {\nuint sb = texture(lookupTable, texCoord).z;\nfloat p = max((float(sb) / float(blockSize)) / float(blockSize), DENSITY_FACTOR);\nfloat rowthr = float(dblBlockSize) * p + 3.0f * sqrt(p * (1.0f - p));\ncolor = uvec4(NULL_ELEMENT, 4u * sb, 0u);\nif(offset.y >= max(1, int(ceil(rowthr))))\nreturn;\n}\n#endif\n#define deltaCenter ivec2(0,0)\n#define deltaTop ivec2(0,-blockSize)\n#define deltaTopRight ivec2(blockSize,-blockSize)\n#define deltaRight ivec2(blockSize,0)\n#define deltaBottomRight ivec2(blockSize,blockSize)\n#define deltaBottom ivec2(0,blockSize)\n#define deltaBottomLeft ivec2(-blockSize,blockSize)\n#define deltaLeft ivec2(-blockSize,0)\n#define deltaTopLeft ivec2(-blockSize,-blockSize)\nivec2 boundary = ivec2(width - 1, height - 1) / blockSize;\nivec2 bottomRightPos = thread + deltaBottomRight;\nuvec2 valid = uvec2(\nbottomRightPos.x < width || bottomRightPos.x / blockSize == boundary.x,\nbottomRightPos.y < height || bottomRightPos.y / blockSize == boundary.y\n);\nuvec4 mask[4];\nmask[0] = uvec4(1u, valid.x, valid.y, valid.x * valid.y);\nmask[1] = uvec4(1u, 1u, valid.y, valid.y);\nmask[2] = uvec4(1u, valid.x, 1u, valid.x);\nmask[3] = uvec4(1u);\n#if SKIP_TEXTURE_READS\n#define calcSb(delta) texelFetch(lookupTable, blockSize * ((thread + (delta)) / blockSize), 0).z\nuint center = calcSb(deltaCenter);\nuint top = calcSb(deltaTop);\nuint topRight = calcSb(deltaTopRight);\nuint right = calcSb(deltaRight);\nuint bottomRight = calcSb(deltaBottomRight);\nuint bottom = calcSb(deltaBottom);\nuint bottomLeft = calcSb(deltaBottomLeft);\nuint left = calcSb(deltaLeft);\nuint topLeft = calcSb(deltaTopLeft);\n#else\n#define calcSb(pos) texture(lookupTable, (pos)).z\nuint center = calcSb(v_center);\nuint top = calcSb(v_top);\nuint topRight = calcSb(v_topRight);\nuint right = calcSb(v_right);\nuint bottomRight = calcSb(v_bottomRight);\nuint bottom = calcSb(v_bottom);\nuint bottomLeft = calcSb(v_bottomLeft);\nuint left = calcSb(v_left);\nuint topLeft = calcSb(v_topLeft);\n#endif\nuvec4 sums[4];\nsums[0] = uvec4(center, right, bottom, bottomRight);\nsums[1] = uvec4(left, center, bottomLeft, bottom);\nsums[2] = uvec4(top, topRight, center, right);\nsums[3] = uvec4(topLeft, top, left, center);\nivec2 cmp = ivec2(greaterThanEqual(offset, ivec2(blockSize)));\nint option = 2 * cmp.y + cmp.x;\nuvec4 cdef = sums[option] * mask[option];\nuint c2b = cdef.x, d2b = cdef.y, e2b = cdef.z, f2b = cdef.w;\nuint sb = center;\nuint s2b = c2b + d2b + e2b + f2b;\ns2b = s2b < sb ? 0xFFFFu : min(0xFFFFu, s2b);\nuint w2b = uint(min(dblBlockSize, width - delta.x));\nuvec2 uoffset = uvec2(offset);\nuint ceiling = s2b >= uoffset.x ? (s2b - uoffset.x) / w2b + uint((s2b - uoffset.x) % w2b > 0u) : 0u;\ncolor = uvec4(NULL_ELEMENT, s2b, 0u);\nif(uoffset.y >= ceiling)\nreturn;\nuint i2b = uoffset.y * w2b + uoffset.x;\nuint j2b = i2b >= c2b ? i2b - c2b : 0u;\nuint k2b = j2b >= d2b ? j2b - d2b : 0u;\nuint l2b = k2b >= e2b ? k2b - e2b : 0u;\nuint wl = uint(min(blockSize, width - delta.x));\nuint wr = uint(min(blockSize, width - delta.x - blockSize));\nivec2 magicOffset = (\n(i2b < c2b) ? ivec2(i2b % wl, i2b / wl) : (\n(j2b < d2b) ? ivec2(j2b % wr, j2b / wr) + ivec2(blockSize, 0) : (\n(k2b < e2b) ? ivec2(k2b % wl, k2b / wl) + ivec2(0, blockSize) : (\n(l2b < f2b) ? ivec2(l2b % wr, l2b / wr) + ivec2(blockSize) : ivec2(0)\n))));\nuvec2 a2b = texelFetch(lookupTable, delta + magicOffset, 0).xy;\ncolor = uvec4(a2b, s2b, 0u);\n#else\nuvec4 pix = texture(lookupTable, texCoord);\ncolor = all(equal(pix.xy, NULL_ELEMENT)) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3884. /***/ }),
  3885. /***/ 8647:
  3886. /***/ ((module) => {
  3887. module.exports = "#if !defined(STAGE) || STAGE < 1\n#error Invalid STAGE\n#else\nuniform mediump int blockSize;\nout vec2 v_topLeft, v_top, v_topRight,\nv_left, v_center, v_right,\nv_bottomLeft, v_bottom, v_bottomRight;\nvoid vsmain()\n{\nfloat b = float(blockSize);\n#define V(x,y) (texCoord + (vec2((x),(y)) * b) / texSize)\nv_topLeft = V(-1,-1); v_top = V(0,-1); v_topRight = V(1,-1);\nv_left = V(-1,0); v_center = V(0,0); v_right = V(1,0);\nv_bottomLeft = V(-1,1); v_bottom = V(0,1); v_bottomRight = V(1,1);\n}\n#endif"
  3888. /***/ }),
  3889. /***/ 4776:
  3890. /***/ ((module) => {
  3891. module.exports = "@include \"keypoints.glsl\"\n@include \"keypoint-matches.glsl\"\n@include \"keypoint-descriptors.glsl\"\nuniform sampler2D candidates;\nuniform sampler2D filters;\nuniform int matcherLength;\nuniform sampler2D tables;\nuniform sampler2D descriptorDB;\nuniform int tableIndex;\nuniform int bucketCapacity;\nuniform int bucketsPerTable;\nuniform int tablesStride;\nuniform int descriptorDBStride;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if HASH_SIZE > SEQUENCE_MAXLEN\n#error LSH: invalid HASH_SIZE\n#elif SEQUENCE_COUNT * SEQUENCE_MAXLEN * 4 > 16384\n#error LSH: sequences are too large!\n#elif (SEQUENCE_COUNT * SEQUENCE_MAXLEN) % 4 > 0\n#error LSH: sequences of invalid size!\n#endif\nlayout(std140) uniform LSHSequences\n{\nuvec4 sequences[(SEQUENCE_COUNT * SEQUENCE_MAXLEN) / 4];\n};\n#if HASH_SIZE == 10\nconst int SWAP_COUNT[3] = int[3](1, 11, 56);\nconst int[56] SWAP = int[56](0,1,2,4,8,16,32,64,128,256,512,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768);\n#elif HASH_SIZE == 11\nconst int SWAP_COUNT[3] = int[3](1, 12, 67);\nconst int[67] SWAP = int[67](0,1,2,4,8,16,32,64,128,256,512,1024,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536);\n#elif HASH_SIZE == 12\nconst int SWAP_COUNT[3] = int[3](1, 13, 79);\nconst int[79] SWAP = int[79](0,1,2,4,8,16,32,64,128,256,512,1024,2048,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072);\n#elif HASH_SIZE == 13\nconst int SWAP_COUNT[3] = int[3](1, 14, 92);\nconst int[92] SWAP = int[92](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144);\n#elif HASH_SIZE == 14\nconst int SWAP_COUNT[3] = int[3](1, 15, 106);\nconst int[106] SWAP = int[106](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288);\n#elif HASH_SIZE == 15\nconst int SWAP_COUNT[3] = int[3](1, 16, 121);\nconst int[121] SWAP = int[121](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576);\n#elif HASH_SIZE == 16\nconst int SWAP_COUNT[3] = int[3](1, 17, 137);\nconst int[137] SWAP = int[137](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152);\n#elif HASH_SIZE == 17\nconst int SWAP_COUNT[3] = int[3](1, 18, 154);\nconst int[154] SWAP = int[154](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304);\n#elif HASH_SIZE == 18\nconst int SWAP_COUNT[3] = int[3](1, 19, 172);\nconst int[172] SWAP = int[172](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608);\n#elif HASH_SIZE == 19\nconst int SWAP_COUNT[3] = int[3](1, 20, 191);\nconst int[191] SWAP = int[191](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216);\n#elif HASH_SIZE == 20\nconst int SWAP_COUNT[3] = int[3](1, 21, 211);\nconst int[211] SWAP = int[211](0,1,2,4,8,16,32,64,128,256,512,1024,2048,4096,8192,16384,32768,65536,131072,262144,524288,3,5,6,9,10,12,17,18,20,24,33,34,36,40,48,65,66,68,72,80,96,129,130,132,136,144,160,192,257,258,260,264,272,288,320,384,513,514,516,520,528,544,576,640,768,1025,1026,1028,1032,1040,1056,1088,1152,1280,1536,2049,2050,2052,2056,2064,2080,2112,2176,2304,2560,3072,4097,4098,4100,4104,4112,4128,4160,4224,4352,4608,5120,6144,8193,8194,8196,8200,8208,8224,8256,8320,8448,8704,9216,10240,12288,16385,16386,16388,16392,16400,16416,16448,16512,16640,16896,17408,18432,20480,24576,32769,32770,32772,32776,32784,32800,32832,32896,33024,33280,33792,34816,36864,40960,49152,65537,65538,65540,65544,65552,65568,65600,65664,65792,66048,66560,67584,69632,73728,81920,98304,131073,131074,131076,131080,131088,131104,131136,131200,131328,131584,132096,133120,135168,139264,147456,163840,196608,262145,262146,262148,262152,262160,262176,262208,262272,262400,262656,263168,264192,266240,270336,278528,294912,327680,393216,524289,524290,524292,524296,524304,524320,524352,524416,524544,524800,525312,526336,528384,532480,540672,557056,589824,655360,786432);\n#else\n#error Invalid HASH_SIZE\n#endif\n#if LEVEL < 0 || LEVEL > 2\n#error Invalid LEVEL\n#endif\nconst uint END_OF_LIST = 0xFFFFFFFFu;\nconst int NUMBER_OF_HASHES = SWAP_COUNT[LEVEL];\nuint sequenceElement(int sequenceIndex, int elementIndex)\n{\nint offset = (SEQUENCE_MAXLEN) * sequenceIndex + elementIndex;\nuvec4 tuple = sequences[offset / 4];\nreturn tuple[offset & 3];\n}\nint descriptorHash(uint[DESCRIPTOR_SIZE] descriptor, int sequenceIndex)\n{\nuint bit, b, m;\nint hash = 0;\n@unroll\nfor(int i = 0; i < HASH_SIZE; i++) {\nbit = sequenceElement(sequenceIndex, i);\nb = bit >> 3u;\nm = 1u << (bit & 7u);\nhash = (hash << 1) | int((descriptor[b] & m) != 0u);\n}\nreturn hash;\n}\n#define readTableData(tables, tablesStride, rasterIndex) decodeUint32(texelFetch((tables), ivec2((rasterIndex) % (tablesStride), (rasterIndex) / (tablesStride)), 0))\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * matcherLength;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeKeypointMatch(MATCH_NOT_FOUND);\nif(isBadKeypoint(keypoint))\nreturn;\nKeypointMatch candidate = decodeKeypointMatch(threadPixel(candidates));\nKeypointMatch mfilter = decodeKeypointMatch(threadPixel(filters));\nuint[DESCRIPTOR_SIZE] candidateDescriptor;\nuint[DESCRIPTOR_SIZE] descriptor = readKeypointDescriptor(encodedKeypoints, descriptorSize, extraSize, encoderLength, address);\nint hash0 = descriptorHash(descriptor, tableIndex);\nfor(int h = 0; h < NUMBER_OF_HASHES; h++) {\nint hash = hash0 ^ SWAP[h];\nint tableAddress = tableIndex * bucketsPerTable * bucketCapacity;\nint bucketAddress = tableAddress + hash * bucketCapacity;\nbool validEntry = true;\nfor(int b = 0; b < bucketCapacity; b++) {\nint entryAddress = bucketAddress + b;\nuint entry = validEntry ? readTableData(tables, tablesStride, entryAddress) : END_OF_LIST;\nvalidEntry = (validEntry && entry != END_OF_LIST);\nint candidateIndex = int(entry);\ncandidateDescriptor = readKeypointDescriptorFromDB(descriptorDB, descriptorDBStride, validEntry ? candidateIndex : -1);\nint descriptorDistance = distanceBetweenKeypointDescriptors(descriptor, candidateDescriptor);\nKeypointMatch match = KeypointMatch(candidateIndex, descriptorDistance);\nbool betterThanCandidate = (match.dist < candidate.dist) || (match.dist == candidate.dist && match.index > candidate.index);\nbool worseThanFilter = (match.dist > mfilter.dist) || (match.dist == mfilter.dist && match.index < mfilter.index);\nbool nicerMatch = (validEntry && betterThanCandidate && worseThanFilter);\nivec2 v = nicerMatch ? ivec2(match.index, match.dist) : ivec2(candidate.index, candidate.dist);\ncandidate = KeypointMatch(v.x, v.y);\n}\n}\ncolor = encodeKeypointMatch(candidate);\n}"
  3892. /***/ }),
  3893. /***/ 2648:
  3894. /***/ ((module) => {
  3895. module.exports = "@include \"keypoints.glsl\"\n@include \"int32.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypointsA;\nuniform sampler2D encodedKeypointsB;\nuniform int encoderLengthA;\nuniform int encoderLengthB;\nuniform int encoderCapacityA;\nuniform int encoderCapacityB;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxKeypoints;\n#elif STAGE == 3\nuniform sampler2D array;\nuniform int blockSize;\n#elif STAGE == 4\nuniform sampler2D array;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 5\nuniform sampler2D array;\n#else\n#error Invalid STAGE\n#endif\n#define NULL_KEYPOINT_INDEX 0xFFFF\nconst highp uint UNIT = 0x10000u;\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\nint newKeypointIndex = keypointIndex < encoderCapacityA ? keypointIndex : keypointIndex - encoderCapacityA;\ncolor = encodeNullKeypoint();\nif(newKeypointIndex >= max(encoderCapacityA, encoderCapacityB))\nreturn;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\naddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\nvec4 dataA = readKeypointData(encodedKeypointsA, encoderLengthA, addr);\nvec4 dataB = readKeypointData(encodedKeypointsB, encoderLengthB, addr);\ncolor = keypointIndex < encoderCapacityA ? dataA : dataB;\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint keypointIndex = thread.y * outputSize().x + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, addr);\nbool isValid = !isNullKeypoint(keypoint) && keypointIndex < maxKeypoints;\nkeypointIndex = isValid ? keypointIndex : NULL_KEYPOINT_INDEX;\ncolor = encodeUint32(uint(keypointIndex & 0xFFFF) | (isValid ? UNIT : 0u));\n#elif STAGE == 3\nivec2 thread = threadLocation();\nivec2 size = outputSize();\nint arrayLength = size.x * size.y;\nint arrayIndex = thread.y * size.x + thread.x;\nint arrayIndexLeft = arrayIndex - blockSize;\nint arrayIndexRight = arrayIndex + blockSize;\nint mask = int(arrayIndexRight < arrayLength || arrayIndexRight / blockSize == (arrayLength - 1) / blockSize);\narrayIndexLeft = max(0, arrayIndexLeft);\narrayIndexRight = min(arrayLength - 1, arrayIndexRight);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nuvec3 entries32 = uvec3(\ndecodeUint32(threadPixel(array)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexLeft), 0)),\ndecodeUint32(texelFetch(array, raster2pos(arrayIndexRight), 0))\n);\nivec3 sb = ivec3((entries32 >> 16u) & 0xFFFFu);\nsb.z *= mask;\nint dblBlockSize = 2 * blockSize;\nint offset = arrayIndex % dblBlockSize;\nint s2b = sb.x + (offset < blockSize ? sb.z : sb.y);\nint l2b = offset < blockSize ? sb.x : sb.y;\nuint keypointIndex = entries32.x & 0xFFFFu;\nuint shiftedS2b = uint(s2b) << 16u;\ncolor = encodeUint32(uint(NULL_KEYPOINT_INDEX) | shiftedS2b);\nif(offset >= s2b)\nreturn;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\nif(offset < l2b)\nreturn;\nvec4 entry = texelFetch(array, raster2pos(arrayIndex + blockSize - l2b), 0);\nkeypointIndex = decodeUint32(entry) & 0xFFFFu;\ncolor = encodeUint32(keypointIndex | shiftedS2b);\n#elif STAGE == 4\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress addr = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint keypointIndex = findKeypointIndex(addr, descriptorSize, extraSize);\n#define raster2pos(k) ivec2((k) % size.x, (k) / size.x)\nivec2 size = textureSize(array, 0);\nuint sortedPair = decodeUint32(texelFetch(array, raster2pos(keypointIndex), 0));\nint newKeypointIndex = int(sortedPair & 0xFFFFu);\ncolor = encodeNullKeypoint();\nif(newKeypointIndex == NULL_KEYPOINT_INDEX || keypointIndex >= size.x * size.y)\nreturn;\nKeypointAddress newAddr = KeypointAddress(newKeypointIndex * pixelsPerKeypoint, addr.offset);\ncolor = readKeypointData(encodedKeypoints, encoderLength, newAddr);\n#elif STAGE == 5\nuint val = decodeUint32(threadPixel(array));\ncolor = (val & 0xFFFFu) == uint(NULL_KEYPOINT_INDEX) ? vec4(0,1,1,1) : vec4(1,0,0,1);\n#endif\n}"
  3896. /***/ }),
  3897. /***/ 8825:
  3898. /***/ ((module) => {
  3899. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\n@include \"filters.glsl\"\n#if !defined(USE_LAPLACIAN)\n#error Undefined USE_LAPLACIAN\n#endif\nuniform sampler2D corners;\nuniform sampler2D pyramid;\nuniform float lodStep;\n#if USE_LAPLACIAN\nuniform sampler2D pyrLaplacian;\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat score = decodeFloat16(pixel.rb);\nfloat myEncodedLod = pixel.a;\nfloat lod = decodeLod(myEncodedLod);\nfloat lodPlus = lod + lodStep;\nfloat lodMinus = lod - lodStep;\nfloat pot = exp2(lod);\nfloat potPlus = exp2(lodPlus);\nfloat potMinus = exp2(lodMinus);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#define P(p,u,v) textureLod(corners, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nvec4 pix[18];\n#define D(u,v) P(potMinus,(u),(v))\npix[0] = D(-1,-1); pix[1] = D(0,-1); pix[2] = D(1,-1);\npix[3] = D(-1,0); pix[4] = D(0,0); pix[5] = D(1,0);\npix[6] = D(-1,1); pix[7] = D(0,1); pix[8] = D(1,1);\n#define U(u,v) P(potPlus,(u),(v))\npix[9] = U(-1,-1); pix[10] = U(0,-1); pix[11] = U(1,-1);\npix[12] = U(-1,0); pix[13] = U(0,0); pix[14] = U(1,0);\npix[15] = U(-1,1); pix[16] = U(0,1); pix[17] = U(1,1);\nfloat scores[18];\n#define C(j) decodeFloat16(pix[j].rb)\nscores[0] = C(0); scores[1] = C(1); scores[2] = C(2);\nscores[3] = C(3); scores[4] = C(4); scores[5] = C(5);\nscores[6] = C(6); scores[7] = C(7); scores[8] = C(8);\nscores[9] = C(9); scores[10] = C(10); scores[11] = C(11);\nscores[12] = C(12); scores[13] = C(13); scores[14] = C(14);\nscores[15] = C(15); scores[16] = C(16); scores[17] = C(17);\nfloat lods[18];\n#define E(j) decodeLod(pix[j].a)\nlods[0] = E(0); lods[1] = E(1); lods[2] = E(2);\nlods[3] = E(3); lods[4] = E(4); lods[5] = E(5);\nlods[6] = E(6); lods[7] = E(7); lods[8] = E(8);\nlods[9] = E(9); lods[10] = E(10); lods[11] = E(11);\nlods[12] = E(12); lods[13] = E(13); lods[14] = E(14);\nlods[15] = E(15); lods[16] = E(16); lods[17] = E(17);\n#if USE_LAPLACIAN\n#define L(p,u,v) textureLod(pyrLaplacian, texCoord + (p) * vec2((u),(v)) / texSize, 0.0f)\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) abs(decodeFloat16(L(potMinus,(u),(v)).xy))\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) abs(decodeFloat16(L(potPlus,(u),(v)).zw))\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = abs(laplacian(pyramid, vec2(thread), lod));\n#else\n#define L(u,v) (((v)+1)*3 + ((u)+1))\nmat3 strengths[2];\nstrengths[0] = mat3(\n#define Lm(u,v) scores[L((u),(v))]\nLm(-1,-1), Lm(0,-1), Lm(1,-1),\nLm(-1,0), Lm(0,0), Lm(1,0),\nLm(-1,1), Lm(0,1), Lm(1,1)\n);\nstrengths[1] = mat3(\n#define Lp(u,v) scores[9 + L((u),(v))]\nLp(-1,-1), Lp(0,-1), Lp(1,-1),\nLp(-1,0), Lp(0,0), Lp(1,0),\nLp(-1,1), Lp(0,1), Lp(1,1)\n);\nfloat myStrength = score;\n#endif\n#define B(j,lod) float(isSameLod(lods[j], (lod))) * float(scores[j] > 0.0f)\nmat3 nearLod[2];\nnearLod[0] = mat3(\n#define Bm(j) B((j), lodMinus)\nBm(0), Bm(1), Bm(2),\nBm(3), Bm(4), Bm(5),\nBm(6), Bm(7), Bm(8)\n);\nnearLod[1] = mat3(\n#define Bp(j) B((j), lodPlus)\nBp(9), Bp(10), Bp(11),\nBp(12), Bp(13), Bp(14),\nBp(15), Bp(16), Bp(17)\n);\nmat3 upStrengths = matrixCompMult(strengths[1], nearLod[1]);\nmat3 downStrengths = matrixCompMult(strengths[0], nearLod[0]);\nvec3 maxUpStrength3 = max(upStrengths[0], max(upStrengths[1], upStrengths[2]));\nvec3 maxDownStrength3 = max(downStrengths[0], max(downStrengths[1], downStrengths[2]));\nvec3 maxStrength3 = max(maxUpStrength3, maxDownStrength3);\nfloat maxStrength = max(maxStrength3.x, max(maxStrength3.y, maxStrength3.z));\ncolor.rb = encodeFloat16(score * step(maxStrength, myStrength));\n}"
  3900. /***/ }),
  3901. /***/ 5693:
  3902. /***/ ((module) => {
  3903. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D corners;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = threadPixel(corners);\nfloat encodedLod = pixel.a;\nfloat score = decodeFloat16(pixel.rb);\nfloat lod = decodeLod(encodedLod);\nfloat pot = exp2(lod);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\n#if 1\nvec2 gridSize = vec2(pot);\nvec2 gridLocation = floor(mod(texCoord * texSize, gridSize));\nvec2 gridDelta = gridLocation / gridSize - vec2(0.5f);\nfloat gridStep = 1.0f / pot;\nconst float adjustment = 1.25f;\ncolor.rb = encodeFloat16(0.0f);\nif(max(abs(gridDelta.x), abs(gridDelta.y)) > adjustment * gridStep)\nreturn;\n#endif\n#define P(x,y) textureLod(corners, texCoord + pot * vec2((x), (y)) / texSize, 0.0f)\nvec4 pix[9];\npix[0] = P(-1,-1); pix[1] = P(0,-1); pix[2] = P(1,-1);\npix[3] = P(-1, 0); pix[4] = pixel; pix[5] = P(1, 0);\npix[6] = P(-1, 1); pix[7] = P(0, 1); pix[8] = P(1, 1);\n#define S(j) decodeFloat16(pix[j].rb)\nmat3 scores = mat3(\nS(0), S(1), S(2),\nS(3), S(4), S(5),\nS(6), S(7), S(8)\n);\n#define B(j) float(isSameLod(decodeLod(pix[j].a), lod))\nmat3 sameLod = mat3(\nB(0), B(1), B(2),\nB(3), B(4), B(5),\nB(6), B(7), B(8)\n);\nmat3 sameLodScores = matrixCompMult(scores, sameLod);\nvec3 maxScore3 = max(sameLodScores[0], max(sameLodScores[1], sameLodScores[2]));\nfloat maxScore = max(maxScore3.x, max(maxScore3.y, maxScore3.z));\ncolor.rb = encodeFloat16(score * step(maxScore, score));\n}"
  3904. /***/ }),
  3905. /***/ 9280:
  3906. /***/ ((module) => {
  3907. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D image;\nuniform float lodStep;\n#if !defined(MULTISCALE)\n#error Must define MULTISCALE\n#elif MULTISCALE != 0\n#define LOD_STEP (lodStep)\n#define USE_MIDDLE_RING\n#else\n#define LOD_STEP (0.0f)\n#endif\n#define PIX(x,y) pixelAtShortOffset(image, ivec2((x),(y)))\n#define L2(v,i) bvec2(isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define L3(v,i) bvec3(isSameEncodedLod(v[i].a, alpha), isSameEncodedLod(v[i].a, alphaMinus), isSameEncodedLod(v[i].a, alphaPlus))\n#define S3(v,i) decodeFloat16(v[i].rb) * float(any(L3(v,i)))\n#define S2(v,i) decodeFloat16(v[i].rb) * float(any(L2(v,i)))\n#define P(i) S3(p,i)\n#define Q(i) S2(q,i)\n#define R(i) S2(r,i)\nconst vec4 O = vec4(0.0f);\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nfloat lod = decodeLod(pixel.a);\nfloat score = decodeFloat16(pixel.rb);\ncolor = pixel;\nif(score == 0.0f)\nreturn;\nvec4 p[8];\np[0] = PIX(0,1); p[1] = PIX(1,1); p[2] = PIX(1,0); p[3] = PIX(1,-1);\np[4] = PIX(0,-1); p[5] = PIX(-1,-1); p[6] = PIX(-1,0); p[7] = PIX(-1,1);\n#ifdef USE_MIDDLE_RING\nvec4 q[16];\nq[0] = PIX(0,2); q[1] = PIX(1,2); q[2] = PIX(2,2); q[3] = PIX(2,1);\nq[4] = PIX(2,0); q[5] = PIX(2,-1); q[6] = PIX(2,-2); q[7] = PIX(1,-2);\nq[8] = PIX(0,-2); q[9] = PIX(-1,-2); q[10] = PIX(-2,-2); q[11] = PIX(-2,-1);\nq[12] = PIX(-2,0); q[13] = PIX(-2,1); q[14] = PIX(-2,2); q[15] = PIX(-1,2);\n#else\nvec4 q[16];\nq[0] = O; q[1] = O; q[2] = O; q[3] = O;\nq[4] = O; q[5] = O; q[6] = O; q[7] = O;\nq[8] = O; q[9] = O; q[10] = O; q[11] = O;\nq[12] = O; q[13] = O; q[14] = O; q[15] = O;\n#endif\n#ifdef USE_OUTER_RING\nvec4 r[16];\nr[0] = PIX(0,3); r[1] = PIX(1,3); r[2] = PIX(3,1); r[3] = PIX(3,0);\nr[4] = PIX(3,-1); r[5] = PIX(1,-3); r[6] = PIX(0,-3); r[7] = PIX(-1,-3);\nr[8] = PIX(-3,-1); r[9] = PIX(-3,0); r[10] = PIX(-3,1); r[11] = PIX(-1,3);\nr[12] = PIX(0,4); r[13] = PIX(4,0); r[14] = PIX(0,-4); r[15] = PIX(-4,0);\n#else\nvec4 r[16];\nr[0] = O; r[1] = O; r[2] = O; r[3] = O;\nr[4] = O; r[5] = O; r[6] = O; r[7] = O;\nr[8] = O; r[9] = O; r[10] = O; r[11] = O;\nr[12] = O; r[13] = O; r[14] = O; r[15] = O;\n#endif\nfloat alphaPlus = encodeLod(lod + LOD_STEP);\nfloat alphaMinus = encodeLod(lod - LOD_STEP);\nfloat alpha = encodeLod(lod);\nmat3 innerScore = mat3(\nP(0), P(1), P(2), P(3),\nP(4), P(5), P(6), P(7),\n0.0f);\nmat4 middleScore = mat4(\nQ(0), Q(1), Q(2), Q(3),\nQ(4), Q(5), Q(6), Q(7),\nQ(8), Q(9), Q(10), Q(11),\nQ(12), Q(13), Q(14), Q(15)\n);\nmat4 outerScore = mat4(\nR(0), R(1), R(2), R(3),\nR(4), R(5), R(6), R(7),\nR(8), R(9), R(10), R(11),\nR(12), R(13), R(14), R(15)\n);\nvec3 maxInnerScore3 = max(innerScore[0], max(innerScore[1], innerScore[2]));\nvec4 maxMiddleScore4 = max(max(middleScore[0], middleScore[1]), max(middleScore[2], middleScore[3]));\nvec4 maxOuterScore4 = max(max(outerScore[0], outerScore[1]), max(outerScore[2], outerScore[3]));\nfloat maxInnerScore = max(maxInnerScore3.x, max(maxInnerScore3.y, maxInnerScore3.z));\nfloat maxMiddleScore = max(max(maxMiddleScore4.x, maxMiddleScore4.y), max(maxMiddleScore4.z, maxMiddleScore4.w));\nfloat maxOuterScore = max(max(maxOuterScore4.x, maxOuterScore4.y), max(maxOuterScore4.z, maxOuterScore4.w));\nfloat maxScore = max(maxInnerScore, max(maxMiddleScore, maxOuterScore));\nfloat finalScore = step(maxScore, score) * score;\ncolor.rb = encodeFloat16(finalScore);\n}"
  3908. /***/ }),
  3909. /***/ 9108:
  3910. /***/ ((module) => {
  3911. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedCorners;\nuniform int encoderLength;\nuniform sampler2D image;\nuniform int extraSize;\nconst int descriptorSize = 32;\n#define P(a,b,c,d) ivec4((a),(b),(c),(d))\nconst ivec4 pat31[256] = ivec4[256](\nP(8,-3,9,5),\nP(4,2,7,-12),\nP(-11,9,-8,2),\nP(7,-12,12,-13),\nP(2,-13,2,12),\nP(1,-7,1,6),\nP(-2,-10,-2,-4),\nP(-13,-13,-11,-8),\nP(-13,-3,-12,-9),\nP(10,4,11,9),\nP(-13,-8,-8,-9),\nP(-11,7,-9,12),\nP(7,7,12,6),\nP(-4,-5,-3,0),\nP(-13,2,-12,-3),\nP(-9,0,-7,5),\nP(12,-6,12,-1),\nP(-3,6,-2,12),\nP(-6,-13,-4,-8),\nP(11,-13,12,-8),\nP(4,7,5,1),\nP(5,-3,10,-3),\nP(3,-7,6,12),\nP(-8,-7,-6,-2),\nP(-2,11,-1,-10),\nP(-13,12,-8,10),\nP(-7,3,-5,-3),\nP(-4,2,-3,7),\nP(-10,-12,-6,11),\nP(5,-12,6,-7),\nP(5,-6,7,-1),\nP(1,0,4,-5),\nP(9,11,11,-13),\nP(4,7,4,12),\nP(2,-1,4,4),\nP(-4,-12,-2,7),\nP(-8,-5,-7,-10),\nP(4,11,9,12),\nP(0,-8,1,-13),\nP(-13,-2,-8,2),\nP(-3,-2,-2,3),\nP(-6,9,-4,-9),\nP(8,12,10,7),\nP(0,9,1,3),\nP(7,-5,11,-10),\nP(-13,-6,-11,0),\nP(10,7,12,1),\nP(-6,-3,-6,12),\nP(10,-9,12,-4),\nP(-13,8,-8,-12),\nP(-13,0,-8,-4),\nP(3,3,7,8),\nP(5,7,10,-7),\nP(-1,7,1,-12),\nP(3,-10,5,6),\nP(2,-4,3,-10),\nP(-13,0,-13,5),\nP(-13,-7,-12,12),\nP(-13,3,-11,8),\nP(-7,12,-4,7),\nP(6,-10,12,8),\nP(-9,-1,-7,-6),\nP(-2,-5,0,12),\nP(-12,5,-7,5),\nP(3,-10,8,-13),\nP(-7,-7,-4,5),\nP(-3,-2,-1,-7),\nP(2,9,5,-11),\nP(-11,-13,-5,-13),\nP(-1,6,0,-1),\nP(5,-3,5,2),\nP(-4,-13,-4,12),\nP(-9,-6,-9,6),\nP(-12,-10,-8,-4),\nP(10,2,12,-3),\nP(7,12,12,12),\nP(-7,-13,-6,5),\nP(-4,9,-3,4),\nP(7,-1,12,2),\nP(-7,6,-5,1),\nP(-13,11,-12,5),\nP(-3,7,-2,-6),\nP(7,-8,12,-7),\nP(-13,-7,-11,-12),\nP(1,-3,12,12),\nP(2,-6,3,0),\nP(-4,3,-2,-13),\nP(-1,-13,1,9),\nP(7,1,8,-6),\nP(1,-1,3,12),\nP(9,1,12,6),\nP(-1,-9,-1,3),\nP(-13,-13,-10,5),\nP(7,7,10,12),\nP(12,-5,12,9),\nP(6,3,7,11),\nP(5,-13,6,10),\nP(2,-12,2,3),\nP(3,8,4,-6),\nP(2,6,12,-13),\nP(9,-12,10,3),\nP(-8,4,-7,9),\nP(-11,12,-4,-6),\nP(1,12,2,-8),\nP(6,-9,7,-4),\nP(2,3,3,-2),\nP(6,3,11,0),\nP(3,-3,8,-8),\nP(7,8,9,3),\nP(-11,-5,-6,-4),\nP(-10,11,-5,10),\nP(-5,-8,-3,12),\nP(-10,5,-9,0),\nP(8,-1,12,-6),\nP(4,-6,6,-11),\nP(-10,12,-8,7),\nP(4,-2,6,7),\nP(-2,0,-2,12),\nP(-5,-8,-5,2),\nP(7,-6,10,12),\nP(-9,-13,-8,-8),\nP(-5,-13,-5,-2),\nP(8,-8,9,-13),\nP(-9,-11,-9,0),\nP(1,-8,1,-2),\nP(7,-4,9,1),\nP(-2,1,-1,-4),\nP(11,-6,12,-11),\nP(-12,-9,-6,4),\nP(3,7,7,12),\nP(5,5,10,8),\nP(0,-4,2,8),\nP(-9,12,-5,-13),\nP(0,7,2,12),\nP(-1,2,1,7),\nP(5,11,7,-9),\nP(3,5,6,-8),\nP(-13,-4,-8,9),\nP(-5,9,-3,-3),\nP(-4,-7,-3,-12),\nP(6,5,8,0),\nP(-7,6,-6,12),\nP(-13,6,-5,-2),\nP(1,-10,3,10),\nP(4,1,8,-4),\nP(-2,-2,2,-13),\nP(2,-12,12,12),\nP(-2,-13,0,-6),\nP(4,1,9,3),\nP(-6,-10,-3,-5),\nP(-3,-13,-1,1),\nP(7,5,12,-11),\nP(4,-2,5,-7),\nP(-13,9,-9,-5),\nP(7,1,8,6),\nP(7,-8,7,6),\nP(-7,-4,-7,1),\nP(-8,11,-7,-8),\nP(-13,6,-12,-8),\nP(2,4,3,9),\nP(10,-5,12,3),\nP(-6,-5,-6,7),\nP(8,-3,9,-8),\nP(2,-12,2,8),\nP(-11,-2,-10,3),\nP(-12,-13,-7,-9),\nP(-11,0,-10,-5),\nP(5,-3,11,8),\nP(-2,-13,-1,12),\nP(-1,-8,0,9),\nP(-13,-11,-12,-5),\nP(-10,-2,-10,11),\nP(-3,9,-2,-13),\nP(2,-3,3,2),\nP(-9,-13,-4,0),\nP(-4,6,-3,-10),\nP(-4,12,-2,-7),\nP(-6,-11,-4,9),\nP(6,-3,6,11),\nP(-13,11,-5,5),\nP(11,11,12,6),\nP(7,-5,12,-2),\nP(-1,12,0,7),\nP(-4,-8,-3,-2),\nP(-7,1,-6,7),\nP(-13,-12,-8,-13),\nP(-7,-2,-6,-8),\nP(-8,5,-6,-9),\nP(-5,-1,-4,5),\nP(-13,7,-8,10),\nP(1,5,5,-13),\nP(1,0,10,-13),\nP(9,12,10,-1),\nP(5,-8,10,-9),\nP(-1,11,1,-13),\nP(-9,-3,-6,2),\nP(-1,-10,1,12),\nP(-13,1,-8,-10),\nP(8,-11,10,-6),\nP(2,-13,3,-6),\nP(7,-13,12,-9),\nP(-10,-10,-5,-7),\nP(-10,-8,-8,-13),\nP(4,-6,8,5),\nP(3,12,8,-13),\nP(-4,2,-3,-3),\nP(5,-13,10,-12),\nP(4,-13,5,-1),\nP(-9,9,-4,3),\nP(0,3,3,-9),\nP(-12,1,-6,1),\nP(3,2,4,-8),\nP(-10,-10,-10,9),\nP(8,-13,12,12),\nP(-8,-12,-6,-5),\nP(2,2,3,7),\nP(10,6,11,-8),\nP(6,8,8,-12),\nP(-7,10,-6,5),\nP(-3,-9,-3,9),\nP(-1,-13,-1,5),\nP(-3,-7,-3,4),\nP(-8,-2,-8,3),\nP(4,2,12,12),\nP(2,-5,3,11),\nP(6,-9,11,-13),\nP(3,-1,7,12),\nP(11,-1,12,4),\nP(-3,0,-3,6),\nP(4,-11,4,12),\nP(2,-4,2,1),\nP(-10,-6,-8,1),\nP(-13,7,-11,1),\nP(-13,12,-11,-13),\nP(6,0,11,-13),\nP(0,-1,1,4),\nP(-13,3,-9,-2),\nP(-9,8,-6,-3),\nP(-13,-6,-8,-2),\nP(5,-9,8,10),\nP(2,7,3,-9),\nP(-1,-6,-1,-1),\nP(9,5,11,-2),\nP(11,-3,12,-8),\nP(3,0,3,5),\nP(-1,4,0,10),\nP(3,-6,4,5),\nP(-13,0,-10,5),\nP(5,8,12,11),\nP(8,9,9,-6),\nP(7,-4,8,-12),\nP(-10,4,-10,9),\nP(7,3,12,4),\nP(9,-7,10,-2),\nP(7,0,12,-2),\nP(-1,-6,0,-11)\n);\nvoid getPair(int index, mat2 rot, out vec2 p, out vec2 q)\n{\nivec4 data = pat31[index];\nvec2 op = vec2(data.xy);\nvec2 oq = vec2(data.zw);\np = rot * op;\nq = rot * oq;\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedCorners);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint descriptorCell = address.offset - sizeofEncodedKeypoint(0, extraSize) / 4;\ncolor = pixel;\nif(descriptorCell < 0)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedCorners, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nfloat degreesOrientation = round(360.0f + degrees(keypoint.orientation));\nfloat orientation = radians(degreesOrientation - mod(degreesOrientation, 12.0f));\nfloat kcos = cos(orientation);\nfloat ksin = sin(orientation);\nmat2 rot = mat2(kcos, ksin, -ksin, kcos);\nfloat pot = exp2(keypoint.lod);\nint patternStart = 32 * descriptorCell;\nuint test[4] = uint[4](0u, 0u, 0u, 0u);\nfor(int t = 0; t < 4; t++) {\nuint bits = 0u;\nvec2 p, q;\nvec4 a, b;\nint i = t * 8;\n@unroll\nfor(int j = 0; j < 8; j++) {\ngetPair(patternStart + i + j, rot, p, q);\na = texelFetch(image, ivec2(round(keypoint.position + pot * p)), 0);\nb = texelFetch(image, ivec2(round(keypoint.position + pot * q)), 0);\nbits |= uint(a.g < b.g) << j;\n}\ntest[t] = bits;\n}\ncolor = vec4(test[0], test[1], test[2], test[3]) / 255.0f;\n}"
  3912. /***/ }),
  3913. /***/ 7137:
  3914. /***/ ((module) => {
  3915. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D image;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#define P(x,y) ivec2((x),(y))\nconst int diskPointCount[16] = int[16](0, 4, 12, 28, 48, 80, 112, 148, 196, 252, 316, 376, 440, 528, 612, 708);\nconst ivec2 diskPoint[708] = ivec2[708](\nP(0,-1),P(-1,0),P(1,0),P(0,1),\nP(-1,-1),P(1,-1),P(-1,1),P(1,1),P(0,-2),P(-2,0),P(2,0),P(0,2),\nP(-1,-2),P(1,-2),P(-2,-1),P(2,-1),P(-2,1),P(2,1),P(-1,2),P(1,2),P(-2,-2),P(2,-2),P(-2,2),P(2,2),P(0,-3),P(-3,0),P(3,0),P(0,3),\nP(-1,-3),P(1,-3),P(-3,-1),P(3,-1),P(-3,1),P(3,1),P(-1,3),P(1,3),P(-2,-3),P(2,-3),P(-3,-2),P(3,-2),P(-3,2),P(3,2),P(-2,3),P(2,3),P(0,-4),P(-4,0),P(4,0),P(0,4),\nP(-1,-4),P(1,-4),P(-4,-1),P(4,-1),P(-4,1),P(4,1),P(-1,4),P(1,4),P(-3,-3),P(3,-3),P(-3,3),P(3,3),P(-2,-4),P(2,-4),P(-4,-2),P(4,-2),P(-4,2),P(4,2),P(-2,4),P(2,4),P(0,-5),P(-3,-4),P(3,-4),P(-4,-3),P(4,-3),P(-5,0),P(5,0),P(-4,3),P(4,3),P(-3,4),P(3,4),P(0,5),\nP(-1,-5),P(1,-5),P(-5,-1),P(5,-1),P(-5,1),P(5,1),P(-1,5),P(1,5),P(-2,-5),P(2,-5),P(-5,-2),P(5,-2),P(-5,2),P(5,2),P(-2,5),P(2,5),P(-4,-4),P(4,-4),P(-4,4),P(4,4),P(-3,-5),P(3,-5),P(-5,-3),P(5,-3),P(-5,3),P(5,3),P(-3,5),P(3,5),P(0,-6),P(-6,0),P(6,0),P(0,6),\nP(-1,-6),P(1,-6),P(-6,-1),P(6,-1),P(-6,1),P(6,1),P(-1,6),P(1,6),P(-2,-6),P(2,-6),P(-6,-2),P(6,-2),P(-6,2),P(6,2),P(-2,6),P(2,6),P(-4,-5),P(4,-5),P(-5,-4),P(5,-4),P(-5,4),P(5,4),P(-4,5),P(4,5),P(-3,-6),P(3,-6),P(-6,-3),P(6,-3),P(-6,3),P(6,3),P(-3,6),P(3,6),P(0,-7),P(-7,0),P(7,0),P(0,7),\nP(-1,-7),P(1,-7),P(-5,-5),P(5,-5),P(-7,-1),P(7,-1),P(-7,1),P(7,1),P(-5,5),P(5,5),P(-1,7),P(1,7),P(-4,-6),P(4,-6),P(-6,-4),P(6,-4),P(-6,4),P(6,4),P(-4,6),P(4,6),P(-2,-7),P(2,-7),P(-7,-2),P(7,-2),P(-7,2),P(7,2),P(-2,7),P(2,7),P(-3,-7),P(3,-7),P(-7,-3),P(7,-3),P(-7,3),P(7,3),P(-3,7),P(3,7),P(-5,-6),P(5,-6),P(-6,-5),P(6,-5),P(-6,5),P(6,5),P(-5,6),P(5,6),P(0,-8),P(-8,0),P(8,0),P(0,8),\nP(-1,-8),P(1,-8),P(-4,-7),P(4,-7),P(-7,-4),P(7,-4),P(-8,-1),P(8,-1),P(-8,1),P(8,1),P(-7,4),P(7,4),P(-4,7),P(4,7),P(-1,8),P(1,8),P(-2,-8),P(2,-8),P(-8,-2),P(8,-2),P(-8,2),P(8,2),P(-2,8),P(2,8),P(-6,-6),P(6,-6),P(-6,6),P(6,6),P(-3,-8),P(3,-8),P(-8,-3),P(8,-3),P(-8,3),P(8,3),P(-3,8),P(3,8),P(-5,-7),P(5,-7),P(-7,-5),P(7,-5),P(-7,5),P(7,5),P(-5,7),P(5,7),P(-4,-8),P(4,-8),P(-8,-4),P(8,-4),P(-8,4),P(8,4),P(-4,8),P(4,8),P(0,-9),P(-9,0),P(9,0),P(0,9),\nP(-1,-9),P(1,-9),P(-9,-1),P(9,-1),P(-9,1),P(9,1),P(-1,9),P(1,9),P(-2,-9),P(2,-9),P(-6,-7),P(6,-7),P(-7,-6),P(7,-6),P(-9,-2),P(9,-2),P(-9,2),P(9,2),P(-7,6),P(7,6),P(-6,7),P(6,7),P(-2,9),P(2,9),P(-5,-8),P(5,-8),P(-8,-5),P(8,-5),P(-8,5),P(8,5),P(-5,8),P(5,8),P(-3,-9),P(3,-9),P(-9,-3),P(9,-3),P(-9,3),P(9,3),P(-3,9),P(3,9),P(-4,-9),P(4,-9),P(-9,-4),P(9,-4),P(-9,4),P(9,4),P(-4,9),P(4,9),P(-7,-7),P(7,-7),P(-7,7),P(7,7),P(0,-10),P(-6,-8),P(6,-8),P(-8,-6),P(8,-6),P(-10,0),P(10,0),P(-8,6),P(8,6),P(-6,8),P(6,8),P(0,10),\nP(-1,-10),P(1,-10),P(-10,-1),P(10,-1),P(-10,1),P(10,1),P(-1,10),P(1,10),P(-2,-10),P(2,-10),P(-10,-2),P(10,-2),P(-10,2),P(10,2),P(-2,10),P(2,10),P(-5,-9),P(5,-9),P(-9,-5),P(9,-5),P(-9,5),P(9,5),P(-5,9),P(5,9),P(-3,-10),P(3,-10),P(-10,-3),P(10,-3),P(-10,3),P(10,3),P(-3,10),P(3,10),P(-7,-8),P(7,-8),P(-8,-7),P(8,-7),P(-8,7),P(8,7),P(-7,8),P(7,8),P(-4,-10),P(4,-10),P(-10,-4),P(10,-4),P(-10,4),P(10,4),P(-4,10),P(4,10),P(-6,-9),P(6,-9),P(-9,-6),P(9,-6),P(-9,6),P(9,6),P(-6,9),P(6,9),P(0,-11),P(-11,0),P(11,0),P(0,11),\nP(-1,-11),P(1,-11),P(-11,-1),P(11,-1),P(-11,1),P(11,1),P(-1,11),P(1,11),P(-2,-11),P(2,-11),P(-5,-10),P(5,-10),P(-10,-5),P(10,-5),P(-11,-2),P(11,-2),P(-11,2),P(11,2),P(-10,5),P(10,5),P(-5,10),P(5,10),P(-2,11),P(2,11),P(-8,-8),P(8,-8),P(-8,8),P(8,8),P(-3,-11),P(3,-11),P(-7,-9),P(7,-9),P(-9,-7),P(9,-7),P(-11,-3),P(11,-3),P(-11,3),P(11,3),P(-9,7),P(9,7),P(-7,9),P(7,9),P(-3,11),P(3,11),P(-6,-10),P(6,-10),P(-10,-6),P(10,-6),P(-10,6),P(10,6),P(-6,10),P(6,10),P(-4,-11),P(4,-11),P(-11,-4),P(11,-4),P(-11,4),P(11,4),P(-4,11),P(4,11),P(0,-12),P(-12,0),P(12,0),P(0,12),\nP(-1,-12),P(1,-12),P(-8,-9),P(8,-9),P(-9,-8),P(9,-8),P(-12,-1),P(12,-1),P(-12,1),P(12,1),P(-9,8),P(9,8),P(-8,9),P(8,9),P(-1,12),P(1,12),P(-5,-11),P(5,-11),P(-11,-5),P(11,-5),P(-11,5),P(11,5),P(-5,11),P(5,11),P(-2,-12),P(2,-12),P(-12,-2),P(12,-2),P(-12,2),P(12,2),P(-2,12),P(2,12),P(-7,-10),P(7,-10),P(-10,-7),P(10,-7),P(-10,7),P(10,7),P(-7,10),P(7,10),P(-3,-12),P(3,-12),P(-12,-3),P(12,-3),P(-12,3),P(12,3),P(-3,12),P(3,12),P(-6,-11),P(6,-11),P(-11,-6),P(11,-6),P(-11,6),P(11,6),P(-6,11),P(6,11),P(-4,-12),P(4,-12),P(-12,-4),P(12,-4),P(-12,4),P(12,4),P(-4,12),P(4,12),P(-9,-9),P(9,-9),P(-9,9),P(9,9),P(-8,-10),P(8,-10),P(-10,-8),P(10,-8),P(-10,8),P(10,8),P(-8,10),P(8,10),P(0,-13),P(-5,-12),P(5,-12),P(-12,-5),P(12,-5),P(-13,0),P(13,0),P(-12,5),P(12,5),P(-5,12),P(5,12),P(0,13),\nP(-1,-13),P(1,-13),P(-7,-11),P(7,-11),P(-11,-7),P(11,-7),P(-13,-1),P(13,-1),P(-13,1),P(13,1),P(-11,7),P(11,7),P(-7,11),P(7,11),P(-1,13),P(1,13),P(-2,-13),P(2,-13),P(-13,-2),P(13,-2),P(-13,2),P(13,2),P(-2,13),P(2,13),P(-3,-13),P(3,-13),P(-13,-3),P(13,-3),P(-13,3),P(13,3),P(-3,13),P(3,13),P(-6,-12),P(6,-12),P(-12,-6),P(12,-6),P(-12,6),P(12,6),P(-6,12),P(6,12),P(-9,-10),P(9,-10),P(-10,-9),P(10,-9),P(-10,9),P(10,9),P(-9,10),P(9,10),P(-4,-13),P(4,-13),P(-8,-11),P(8,-11),P(-11,-8),P(11,-8),P(-13,-4),P(13,-4),P(-13,4),P(13,4),P(-11,8),P(11,8),P(-8,11),P(8,11),P(-4,13),P(4,13),P(-7,-12),P(7,-12),P(-12,-7),P(12,-7),P(-12,7),P(12,7),P(-7,12),P(7,12),P(-5,-13),P(5,-13),P(-13,-5),P(13,-5),P(-13,5),P(13,5),P(-5,13),P(5,13),P(0,-14),P(-14,0),P(14,0),P(0,14),\nP(-1,-14),P(1,-14),P(-14,-1),P(14,-1),P(-14,1),P(14,1),P(-1,14),P(1,14),P(-2,-14),P(2,-14),P(-10,-10),P(10,-10),P(-14,-2),P(14,-2),P(-14,2),P(14,2),P(-10,10),P(10,10),P(-2,14),P(2,14),P(-9,-11),P(9,-11),P(-11,-9),P(11,-9),P(-11,9),P(11,9),P(-9,11),P(9,11),P(-3,-14),P(3,-14),P(-6,-13),P(6,-13),P(-13,-6),P(13,-6),P(-14,-3),P(14,-3),P(-14,3),P(14,3),P(-13,6),P(13,6),P(-6,13),P(6,13),P(-3,14),P(3,14),P(-8,-12),P(8,-12),P(-12,-8),P(12,-8),P(-12,8),P(12,8),P(-8,12),P(8,12),P(-4,-14),P(4,-14),P(-14,-4),P(14,-4),P(-14,4),P(14,4),P(-4,14),P(4,14),P(-7,-13),P(7,-13),P(-13,-7),P(13,-7),P(-13,7),P(13,7),P(-7,13),P(7,13),P(-5,-14),P(5,-14),P(-10,-11),P(10,-11),P(-11,-10),P(11,-10),P(-14,-5),P(14,-5),P(-14,5),P(14,5),P(-11,10),P(11,10),P(-10,11),P(10,11),P(-5,14),P(5,14),P(0,-15),P(-9,-12),P(9,-12),P(-12,-9),P(12,-9),P(-15,0),P(15,0),P(-12,9),P(12,9),P(-9,12),P(9,12),P(0,15)\n);\nconst int DEFAULT_PATCH_RADIUS = 15;\nconst int MIN_PATCH_RADIUS = 2;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nvec2 m = vec2(0.0f);\nfloat pot = exp2(keypoint.lod);\nvec2 imageSize = vec2(textureSize(image, 0));\nint scaledRadius = int(ceil(float(DEFAULT_PATCH_RADIUS) / pot));\nint radius = max(scaledRadius, MIN_PATCH_RADIUS);\nint count = diskPointCount[radius];\nfor(int j = 0; j < count; j++) {\nvec2 offset = vec2(diskPoint[j]);\nvec2 position = keypoint.position + round(pot * offset);\nvec4 patchPixel = texture(image, (position + vec2(0.5f)) / imageSize);\nm += offset * patchPixel.g;\n}\nfloat angle = fastAtan2(m.y, m.x);\nfloat encodedOrientation = encodeKeypointOrientation(angle);\ncolor = vec4(0.0f, encodedOrientation, 0.0f, 0.0f);\n}"
  3916. /***/ }),
  3917. /***/ 9739:
  3918. /***/ ((module) => {
  3919. module.exports = "@include \"keypoints.glsl\"\n@include \"filters.glsl\"\n#if !defined(METHOD)\n#error Undefined METHOD\n#endif\nuniform sampler2D pyramid;\nuniform float lodStep;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if METHOD == 1\nuniform int threshold;\n#endif\nconst float eps = 1e-6;\nfloat cornerStrength(vec2 position, float lod)\n{\n#if METHOD == 0\nreturn laplacian(pyramid, position, lod);\n#elif METHOD == 1\nfloat pot = exp2(lod);\nfloat t = float(clamp(threshold, 0, 255)) / 255.0f;\n#define P(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\nmat4 mp = mat4(\nP(0,3),P(3,0),P(0,-3),P(-3,0),\nP(1,3),P(2,2),P(3,1),P(3,-1),\nP(2,-2),P(1,-3),P(-1,-3),P(-2,-2),\nP(-3,-1),P(-3,1),P(-2,2),P(-1,3)\n);\nfloat c = P(0,0);\nfloat ct = c + t, c_t = c - t;\nmat4 mct = mp - mat4(ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct,ct);\nmat4 mc_t = mat4(c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t,c_t) - mp;\nconst vec4 zeros = vec4(0.0f), ones = vec4(1.0f);\nvec4 bs = max(mct[0], zeros), ds = max(mc_t[0], zeros);\nbs += max(mct[1], zeros); ds += max(mc_t[1], zeros);\nbs += max(mct[2], zeros); ds += max(mc_t[2], zeros);\nbs += max(mct[3], zeros); ds += max(mc_t[3], zeros);\nreturn max(dot(bs, ones), dot(ds, ones)) / 16.0f;\n#else\n#error Invalid method\n#endif\n}\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\ncolor = pixel;\nif(address.offset != 1)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nif(isBadKeypoint(keypoint))\nreturn;\nvec3 strength = vec3(\ncornerStrength(keypoint.position, max(0.0f, keypoint.lod - lodStep)),\ncornerStrength(keypoint.position, keypoint.lod),\ncornerStrength(keypoint.position, keypoint.lod + lodStep)\n);\nvec3 p = mat3(\n2, -3, 1,\n-4, 4, 0,\n2, -1, 0\n) * strength;\nfloat maxStrength = max(strength.x, max(strength.y, strength.z));\nvec3 diffStrength = abs(strength - vec3(maxStrength));\nvec3 strengthIndicators = vec3(lessThan(diffStrength, vec3(eps)));\nfloat maxPoint = min(1.0f, dot(vec3(0.0f, 0.5f, 1.0f), strengthIndicators));\nbool hasMax = p.x < -eps;\nfloat pmax = hasMax ? -0.5f * p.y / p.x : maxPoint;\nfloat alpha = abs(pmax - 0.5f) <= 0.5f ? pmax : maxPoint;\nfloat lodOffset = mix(-lodStep, lodStep, alpha);\nfloat lod = keypoint.lod + lodOffset;\ncolor.r = encodeLod(lod);\n}"
  3920. /***/ }),
  3921. /***/ 8231:
  3922. /***/ ((module) => {
  3923. module.exports = "@include \"float16.glsl\"\nuniform sampler2D corners;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 bounds = outputSize();\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = threadPixel(corners);\nvec4 p1 = texelFetch(corners, next1 % bounds, 0);\nvec4 p2 = texelFetch(corners, next2 % bounds, 0);\nvec4 p3 = texelFetch(corners, next3 % bounds, 0);\nfloat s0 = decodeFloat16(p0.rb);\nfloat s1 = decodeFloat16(p1.rb);\nfloat s2 = decodeFloat16(p2.rb);\nfloat s3 = decodeFloat16(p3.rb);\nbool b0 = s0 >= s1 && s0 >= s2 && s0 >= s3;\nbool b1 = s1 >= s0 && s1 >= s2 && s1 >= s3;\nbool b2 = s2 >= s0 && s2 >= s1 && s2 >= s3;\ncolor = vec4(0.0f);\ncolor.rb = b0 ? p0.rb : (\nb1 ? p1.rb : (\nb2 ? p2.rb : p3.rb\n)\n);\n}"
  3924. /***/ }),
  3925. /***/ 2518:
  3926. /***/ ((module) => {
  3927. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#if PERMUTATION_MAXLEN % 4 > 0 || PERMUTATION_MAXLEN * 4 > 16384\n#error Invalid PERMUTATION_MAXLEN\n#endif\nlayout(std140) uniform Permutation\n{\nivec4 permutation[PERMUTATION_MAXLEN / 4];\n};\nint permutationElement(int index)\n{\nint base = index - (index % PERMUTATION_MAXLEN);\nint offset = index - base;\nivec4 tuple = permutation[offset / 4];\nint newOffset = tuple[offset & 3];\nreturn base + newOffset;\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint otherIndex = permutationElement(myIndex);\nKeypointAddress otherAddress = KeypointAddress(otherIndex * pixelsPerKeypoint, myAddress.offset);\nKeypoint myKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nKeypoint otherKeypoint = decodeKeypoint(encodedKeypoints, encoderLength, otherAddress);\ncolor = readKeypointData(encodedKeypoints, encoderLength, otherAddress);\n}"
  3928. /***/ }),
  3929. /***/ 8096:
  3930. /***/ ((module) => {
  3931. module.exports = "@include \"keypoints.glsl\"\n#if !defined(STAGE)\n#error Undefined STAGE\n#elif STAGE == 1\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#elif STAGE == 2\nuniform sampler2D permutation;\nuniform int blockSize;\nuniform int dblLog2BlockSize;\n#elif STAGE == 3\nuniform sampler2D permutation;\nuniform int maxKeypoints;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\n#else\n#error Invalid STAGE\n#endif\nstruct PermutationElement\n{\nint keypointIndex;\nfloat score;\nbool valid;\n};\nvec4 encodePermutationElement(PermutationElement element)\n{\nconst vec2 ONES = vec2(1.0f);\nvec2 encodedScore = element.valid ? encodeFloat16(element.score) : ONES;\nvec2 encodedIndex = vec2(element.keypointIndex & 255, (element.keypointIndex >> 8) & 255) / 255.0f;\nreturn vec4(encodedIndex, encodedScore);\n}\nPermutationElement decodePermutationElement(vec4 pixel)\n{\nconst vec2 ONES = vec2(1.0f);\nPermutationElement element;\nelement.keypointIndex = int(pixel.r * 255.0f) | (int(pixel.g * 255.0f) << 8);\nelement.valid = !all(equal(pixel.ba, ONES));\nelement.score = element.valid ? decodeFloat16(pixel.ba) : -1.0f;\nreturn element;\n}\nPermutationElement readPermutationElement(sampler2D permutation, int elementIndex, int stride, int height)\n{\nconst vec4 INVALID_PIXEL = vec4(1.0f);\nivec2 pos = ivec2(elementIndex % stride, elementIndex / stride);\nvec4 pixel = pos.y < height ? pixelAt(permutation, pos) : INVALID_PIXEL;\nreturn decodePermutationElement(pixel);\n}\n#if STAGE == 2\nPermutationElement selectKth(sampler2D permutation, int k, int la, int ra, int lb, int rb)\n{\nfloat scoreA, scoreB;\nint ha, hb, ma, mb;\nbool discard1stHalf, altb;\nbool locked = false;\nint tmp, result = 0;\nint stride = outputSize().x;\nint height = outputSize().y;\nfor(int i = 0; i < dblLog2BlockSize; i++) {\ntmp = (lb > rb && !locked) ? (la+k) : result;\nresult = (la > ra && !locked) ? (lb+k) : tmp;\nlocked = locked || (la > ra) || (lb > rb);\nha = (ra - la + 1) / 2;\nhb = (rb - lb + 1) / 2;\nma = la + ha;\nmb = lb + hb;\nscoreA = readPermutationElement(permutation, ma, stride, height).score;\nscoreB = readPermutationElement(permutation, mb, stride, height).score;\ndiscard1stHalf = (k > ha + hb);\naltb = (-scoreA < -scoreB);\nk -= int(discard1stHalf && altb) * (ha + 1);\nk -= int(discard1stHalf && !altb) * (hb + 1);\nla += int(discard1stHalf && altb) * (ma + 1 - la);\nlb += int(discard1stHalf && !altb) * (mb + 1 - lb);\nra += int(!discard1stHalf && !altb) * (ma - 1 - ra);\nrb += int(!discard1stHalf && altb) * (mb - 1 - rb);\n}\nreturn readPermutationElement(permutation, result, stride, height);\n}\n#endif\nvoid main()\n{\n#if STAGE == 1\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint keypointIndex = thread.y * stride + thread.x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\nPermutationElement element;\nelement.keypointIndex = keypointIndex;\nelement.score = keypoint.score;\nelement.valid = !isBadKeypoint(keypoint);\ncolor = encodePermutationElement(element);\n#elif STAGE == 2\nivec2 thread = threadLocation();\nint stride = outputSize().x;\nint elementIndex = thread.y * stride + thread.x;\nint blockIndex = elementIndex / blockSize;\nint blockOffset = elementIndex % blockSize;\nint la = blockIndex * blockSize;\nint lb = la + blockSize / 2;\nint ra = lb - 1;\nint rb = (blockIndex + 1) * blockSize - 1;\nint k = blockOffset;\nPermutationElement element = selectKth(permutation, k, la, ra, lb, rb);\ncolor = encodePermutationElement(element);\n#elif STAGE == 3\nivec2 thread = threadLocation();\nint newEncoderLength = outputSize().x;\nKeypointAddress myAddress = findKeypointAddress(thread, newEncoderLength, descriptorSize, extraSize);\nint myKeypointIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nivec2 psize = textureSize(permutation, 0);\nPermutationElement element = readPermutationElement(permutation, myKeypointIndex, psize.x, psize.y);\nint oldEncoderLength = textureSize(encodedKeypoints, 0).x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(element.keypointIndex * pixelsPerKeypoint, myAddress.offset);\nvec4 keypointData = readKeypointData(encodedKeypoints, oldEncoderLength, address);\ncolor = myKeypointIndex < maxKeypoints && element.valid ? keypointData : encodeNullKeypoint();\n#endif\n}"
  3932. /***/ }),
  3933. /***/ 5795:
  3934. /***/ ((module) => {
  3935. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\n#if !defined(METHOD)\n#error Must define METHOD\n#endif\nuniform sampler2D pyramid;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nuniform int maxIterations;\nuniform float epsilon;\nconst int PATCH_RADIUS = 1;\nconst int PATCH_SIZE = 2 * PATCH_RADIUS + 1;\nconst int PATCH_SIZE_SQUARED = PATCH_SIZE * PATCH_SIZE;\nconst int LARGE_PATCH_RADIUS = PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE = 2 * LARGE_PATCH_RADIUS + 1;\nconst int LARGE_PATCH_SIZE_SQUARED = LARGE_PATCH_SIZE * LARGE_PATCH_SIZE;\nconst int LARGER_PATCH_RADIUS = LARGE_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE = 2 * LARGER_PATCH_RADIUS + 1;\nconst int LARGER_PATCH_SIZE_SQUARED = LARGER_PATCH_SIZE * LARGER_PATCH_SIZE;\nconst float EPS = 1e-5;\nfloat smoothPixelBuffer[LARGER_PATCH_SIZE_SQUARED];\nvec2 derivativesBuffer[LARGE_PATCH_SIZE_SQUARED];\nfloat responseBuffer[PATCH_SIZE_SQUARED];\n#define patchPixelAt(u,v) smoothPixelBuffer[((v) + LARGER_PATCH_RADIUS) * LARGER_PATCH_SIZE + ((u) + LARGER_PATCH_RADIUS)]\n#define derivativesAt(u,v) derivativesBuffer[((v) + LARGE_PATCH_RADIUS) * LARGE_PATCH_SIZE + ((u) + LARGE_PATCH_RADIUS)]\n#define responseAt(u,v) responseBuffer[((v) + PATCH_RADIUS) * PATCH_SIZE + ((u) + PATCH_RADIUS)]\nvoid readPixels(vec2 center, float lod)\n{\nivec2 pyrBaseSize = textureSize(pyramid, 0);\nfloat pot = exp2(lod);\nint u, v;\nfor(int j = 0; j < LARGER_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGER_PATCH_SIZE; i++) {\nu = i - LARGER_PATCH_RADIUS;\nv = j - LARGER_PATCH_RADIUS;\npatchPixelAt(u,v) = pyrSubpixelAtExOffset(pyramid, center, lod, pot, ivec2(u,v), pyrBaseSize).g;\n}\n}\n}\nvoid computeDerivatives()\n{\nconst mat3 dx = mat3(\n-1, 0, 1,\n-2, 0, 2,\n-1, 0, 1\n);\nconst mat3 dy = mat3(\n1, 2, 1,\n0, 0, 0,\n-1,-2,-1\n);\nint u, v;\nmat3 pix, convX, convY;\nconst vec3 ones = vec3(1.0f);\nfor(int j = 0; j < LARGE_PATCH_SIZE; j++) {\nfor(int i = 0; i < LARGE_PATCH_SIZE; i++) {\nu = i - LARGE_PATCH_RADIUS;\nv = j - LARGE_PATCH_RADIUS;\npix = mat3(\npatchPixelAt(u+1,v+1), patchPixelAt(u+0,v+1), patchPixelAt(u-1,v+1),\npatchPixelAt(u+1,v+0), patchPixelAt(u+0,v+0), patchPixelAt(u-1,v+0),\npatchPixelAt(u+1,v-1), patchPixelAt(u+0,v-1), patchPixelAt(u-1,v-1)\n);\nconvX = matrixCompMult(dx, pix);\nconvY = matrixCompMult(dy, pix);\nderivativesAt(u,v) = vec2(\ndot(ones, vec3(\ndot(convX[0], ones),\ndot(convX[1], ones),\ndot(convX[2], ones)\n)),\ndot(ones, vec3(\ndot(convY[0], ones),\ndot(convY[1], ones),\ndot(convY[2], ones)\n))\n);\n}\n}\n}\nvec2 computeResponseMap()\n{\nfloat patchArea = float(PATCH_SIZE * PATCH_SIZE);\nvec3 h; vec2 d, c = vec2(0.0f);\nconst vec3 ones = vec3(1.0f);\nfloat response, sum = 0.0f;\nint u, v;\n#define H(r,s) d = derivativesAt((r),(s)); h += vec3(d.x * d.x, d.x * d.y, d.y * d.y)\nfor(int j = 0; j < PATCH_SIZE; j++) {\nfor(int i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nh = vec3(0.0f);\nH(u-1,v-1); H(u+0,v-1); H(u+1,v-1);\nH(u-1,v+0); H(u+0,v+0); H(u+1,v+0);\nH(u-1,v+1); H(u+0,v+1); H(u+1,v+1);\nresponse = 0.5f * (h.x + h.z - sqrt((h.x - h.z) * (h.x - h.z) + 4.0f * h.y * h.y));\nresponse /= patchArea;\nresponseAt(u,v) = response;\nc += vec2(u,v) * response;\nsum += response;\n}\n}\nreturn abs(sum) > EPS ? c / sum : vec2(0.0f);\n}\n#if METHOD == 0\nvec2 quadratic1d()\n{\nfloat a = 0.5f * (responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0));\nfloat b = 0.5f * (responseAt(1,0) - responseAt(-1,0));\nfloat c = responseAt(0,0);\nfloat d = 0.5f * (responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1));\nfloat e = 0.5f * (responseAt(0,1) - responseAt(0,-1));\nfloat f = responseAt(0,0);\nbool hasMax = a < -EPS && d < -EPS;\nreturn hasMax ? -0.5f * vec2(b / a, e / d) : vec2(0.0f);\n}\n#endif\n#if METHOD == 1\nvec2 taylor2d()\n{\nfloat dx = (-responseAt(-1,0) + responseAt(1,0)) * 0.5f;\nfloat dy = (-responseAt(0,-1) + responseAt(0,1)) * 0.5f;\nfloat dxx = responseAt(-1,0) - 2.0f * responseAt(0,0) + responseAt(1,0);\nfloat dyy = responseAt(0,-1) - 2.0f * responseAt(0,0) + responseAt(0,1);\nfloat dxy = (responseAt(-1,-1) + responseAt(1,1) - responseAt(1,-1) - responseAt(-1,1)) * 0.25f;\nfloat det = dxx * dyy - dxy * dxy;\nmat2 inv = mat2(dyy, -dxy, -dxy, dxx);\nbool hasMax = det > EPS && dxx < 0.0f;\nreturn hasMax ? inv * vec2(dx, dy) / (-det) : vec2(0.0f);\n}\n#endif\n#if METHOD == 2\nvoid bilinearUpsample(ivec2 patchOffset, vec4 pixelsOfPatch)\n{\nint u, v, i, j;\nvec2 frc, ifrc; vec4 sub;\nconst vec4 ones = vec4(1.0f);\nfloat s = 1.0f / float(PATCH_SIZE - 1);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nfrc = vec2(i, j) * s;\nifrc = vec2(1.0f) - frc;\nsub = vec4(\nifrc.x * ifrc.y,\nfrc.x * ifrc.y,\nifrc.x * frc.y,\nfrc.x * frc.y\n);\npatchPixelAt(u+xoff,v+yoff) = dot(sub*pixelsOfPatch, ones);\n}\n}\n}\n#endif\n#if METHOD == 3\nvoid bicubicUpsample(ivec2 patchOffset, vec4 pixelsOfPatch, vec4 dx, vec4 dy, vec4 dxy)\n{\nfloat x, y, s = 1.0f / float(PATCH_SIZE - 1);\nint u, v, i, j;\nfloat f00 = pixelsOfPatch.x;\nfloat f10 = pixelsOfPatch.y;\nfloat f01 = pixelsOfPatch.z;\nfloat f11 = pixelsOfPatch.w;\nfloat fx00 = dx.x;\nfloat fx10 = dx.y;\nfloat fx01 = dx.z;\nfloat fx11 = dx.w;\nfloat fy00 = dy.x;\nfloat fy10 = dy.y;\nfloat fy01 = dy.z;\nfloat fy11 = dy.w;\nfloat fxy00 = dxy.x;\nfloat fxy10 = dxy.y;\nfloat fxy01 = dxy.z;\nfloat fxy11 = dxy.w;\nmat4 bicubic = mat4(\n1, 0, -3, 2,\n0, 0, 3, -2,\n0, 1, -2, 1,\n0, 0, -1, 1\n) * mat4(\nf00, f10, fx00, fx10,\nf01, f11, fx01, fx11,\nfy00, fy10, fxy00, fxy10,\nfy01, fy11, fxy01, fxy11\n) * mat4(\n1, 0, 0, 0,\n0, 0, 1, 0,\n-3, 3, -2, -1,\n2, -2, 1, 1\n);\nint xoff = 2 * patchOffset.x;\nint yoff = 2 * patchOffset.y;\nfor(j = 0; j < PATCH_SIZE; j++) {\nfor(i = 0; i < PATCH_SIZE; i++) {\nu = i - PATCH_RADIUS;\nv = j - PATCH_RADIUS;\nx = float(i) * s;\ny = float(j) * s;\npatchPixelAt(u+xoff,v+yoff) = dot(\nvec4(1, x, x*x, x*x*x),\nbicubic * vec4(1, y, y*y, y*y*y)\n);\n}\n}\n}\n#endif\n#if METHOD == 2 || METHOD == 3\nvoid upsamplePatch(int left, int top, int right, int bottom)\n{\nint x, y, k;\nvec4 ptch[9];\nvec2 d00, d10, d01, d11;\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\nptch[k] = vec4(\npatchPixelAt(left+x, top+y),\npatchPixelAt(right+x, top+y),\npatchPixelAt(left+x, bottom+y),\npatchPixelAt(right+x, bottom+y)\n);\n}\nfor(k = 0; k < 9; k++) {\nx = -1 + (k % 3);\ny = -1 + (k / 3);\n#if METHOD == 2\nbilinearUpsample(ivec2(x, y), ptch[k]);\n#elif METHOD == 3\nd00 = derivativesAt(left+x, top+y);\nd10 = derivativesAt(right+x, top+y);\nd01 = derivativesAt(left+x, bottom+y);\nd11 = derivativesAt(right+x, bottom+y);\nbicubicUpsample(ivec2(x, y), ptch[k],\nvec4(d00.x, d10.x, d01.x, d11.x),\nvec4(d00.y, d10.y, d01.y, d11.y),\n0.25f * vec4(\n(patchPixelAt(left+x + 1,top+y + 1) + patchPixelAt(left+x - 1, top+y - 1)) - (patchPixelAt(left+x + 1, top+y - 1) + patchPixelAt(left+x - 1, top+y + 1)),\n(patchPixelAt(right+x + 1,top+y + 1) + patchPixelAt(right+x - 1, top+y - 1)) - (patchPixelAt(right+x + 1, top+y - 1) + patchPixelAt(right+x - 1, top+y + 1)),\n(patchPixelAt(left+x + 1,bottom+y + 1) + patchPixelAt(left+x - 1, bottom+y - 1)) - (patchPixelAt(left+x + 1, bottom+y - 1) + patchPixelAt(left+x - 1, bottom+y + 1)),\n(patchPixelAt(right+x + 1,bottom+y + 1) + patchPixelAt(right+x - 1, bottom+y - 1)) - (patchPixelAt(right+x + 1, bottom+y - 1) + patchPixelAt(right+x - 1, bottom+y + 1))\n)\n);\n#endif\n}\n}\nvec2 upsampleResponseMap(int left, int top, int right, int bottom)\n{\nupsamplePatch(left, top, right, bottom);\ncomputeDerivatives();\nreturn computeResponseMap();\n}\nvec2 iterativeUpsample(vec2 initialGuess)\n{\nint refine = 1;\nfloat scale = 0.5f;\nfloat eps2 = epsilon * epsilon;\nvec2 guess = initialGuess, localGuess = initialGuess;\nfor(int k = 0; k < maxIterations; k++) {\nivec4 quad = ivec4(floor(localGuess.x), floor(localGuess.y), ceil(localGuess.x), ceil(localGuess.y));\nvec2 response = (refine != 0) ? upsampleResponseMap(quad.x, quad.y, quad.z, quad.w) : vec2(0.0f);\nlocalGuess = response * scale;\nguess += localGuess;\nscale *= 0.5f;\nrefine *= int(dot(localGuess, localGuess) >= eps2);\n}\nreturn guess;\n}\n#endif\nvoid main()\n{\nivec2 thread = threadLocation();\nint keypointIndex = thread.x + thread.y * outputSize().x;\nint pixelsPerKeypoint = sizeofEncodedKeypoint(descriptorSize, extraSize) / 4;\nKeypointAddress address = KeypointAddress(keypointIndex * pixelsPerKeypoint, 0);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, address);\ncolor = encodeNullPairOfFloat16();\nif(isNullKeypoint(keypoint))\nreturn;\ncolor = encodeDiscardedPairOfFloat16();\nif(isBadKeypoint(keypoint))\nreturn;\nreadPixels(keypoint.position, keypoint.lod);\ncomputeDerivatives();\nvec2 offset = computeResponseMap();\n#if METHOD == 0\noffset = quadratic1d();\n#elif METHOD == 1\noffset = taylor2d();\n#elif METHOD == 2 || METHOD == 3\noffset = iterativeUpsample(offset);\n#else\n#error Unknown METHOD\n#endif\nfloat pot = exp2(keypoint.lod);\ncolor = encodePairOfFloat16(offset * pot);\n}"
  3936. /***/ }),
  3937. /***/ 3169:
  3938. /***/ ((module) => {
  3939. module.exports = "@include \"keypoints.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D encodedFlow;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nint len = textureSize(encodedFlow, 0).x;\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\ncolor = pixel;\nif(isBadKeypoint(keypoint))\nreturn;\nivec2 location = ivec2(myIndex % len, myIndex / len);\nvec4 encodedFlow = myIndex < len * len ? pixelAt(encodedFlow, location) : encodeDiscardedKeypoint();\nbool discardFlow = isDiscardedPairOfFloat16(encodedFlow);\nvec2 flow = !discardFlow ? decodePairOfFloat16(encodedFlow) : vec2(0.0f);\nvec4 newPosition = encodeKeypointPosition(keypoint.position + flow);\nvec4 newPixel = myAddress.offset == 0 ? newPosition : pixel;\ncolor = !discardFlow ? newPixel : encodeDiscardedKeypoint();\n}"
  3940. /***/ }),
  3941. /***/ 1337:
  3942. /***/ ((module) => {
  3943. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedOrientations;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint orientationEncoderLength = textureSize(encodedOrientations, 0).x;\nivec2 location = ivec2(myIndex % orientationEncoderLength, myIndex / orientationEncoderLength);\nvec4 targetPixel = pixelAt(encodedOrientations, location);\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nbool isValid = !isBadKeypoint(keypoint);\nfloat encodedOrientation = targetPixel.g;\ncolor = isValid && myAddress.offset == 1 ? vec4(pixel.r, encodedOrientation, pixel.ba) : pixel;\n}"
  3944. /***/ }),
  3945. /***/ 6187:
  3946. /***/ ((module) => {
  3947. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedData;\nuniform int strideOfEncodedData;\nuniform sampler2D encodedKeypoints;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\nvec4 readEncodedData(sampler2D encodedData, int strideOfEncodedData, int elementId, int pixelsPerElement, int pixelOffset)\n{\nint rasterIndex = elementId * pixelsPerElement + pixelOffset;\nivec2 pos = ivec2(rasterIndex % strideOfEncodedData, rasterIndex / strideOfEncodedData);\nreturn texelFetch(encodedData, pos, 0);\n}\nvoid main()\n{\nivec2 thread = threadLocation();\nKeypointAddress myAddress = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint myIndex = findKeypointIndex(myAddress, descriptorSize, extraSize);\nint headerSize = sizeofEncodedKeypointHeader();\nint extraCell = myAddress.offset - headerSize / 4;\nint numberOfExtraCells = extraSize / 4;\ncolor = threadPixel(encodedKeypoints);\nif(extraCell < 0 || extraCell >= numberOfExtraCells)\nreturn;\nKeypoint keypoint = decodeKeypoint(encodedKeypoints, encoderLength, myAddress);\nif(isBadKeypoint(keypoint))\nreturn;\ncolor = readEncodedData(encodedData, strideOfEncodedData, myIndex, numberOfExtraCells, extraCell);\n}"
  3948. /***/ }),
  3949. /***/ 477:
  3950. /***/ ((module) => {
  3951. module.exports = "@include \"keypoints.glsl\"\nuniform sampler2D encodedKeypoints;\nuniform int startIndex;\nuniform int endIndex;\nuniform int descriptorSize;\nuniform int extraSize;\nuniform int encoderLength;\n#ifndef BUFFER_SIZE\n#error Undefined BUFFER_SIZE\n#endif\nlayout(std140) uniform KeypointBuffer\n{\nvec4 keypointBuffer[BUFFER_SIZE];\n};\nvoid main()\n{\nvec4 pixel = threadPixel(encodedKeypoints);\nivec2 thread = threadLocation();\nKeypointAddress address = findKeypointAddress(thread, encoderLength, descriptorSize, extraSize);\nint index = findKeypointIndex(address, descriptorSize, extraSize);\ncolor = pixel;\nif(index < startIndex)\nreturn;\ncolor = encodeNullKeypoint();\nif(index >= endIndex)\nreturn;\nvec4 data = keypointBuffer[index - startIndex];\nswitch(address.offset) {\ncase 0: {\ncolor = encodeKeypointPosition(data.xy);\nbreak;\n}\ncase 1: {\nvec2 score = encodeKeypointScore(max(data.w, 0.0f));\nfloat scale = encodeLod(data.z);\nfloat rotation = encodeKeypointOrientation(0.0f);\ncolor = vec4(scale, rotation, score);\nbreak;\n}\ndefault: {\ncolor = vec4(0.0f);\nbreak;\n}\n}\n}"
  3952. /***/ }),
  3953. /***/ 4050:
  3954. /***/ ((module) => {
  3955. module.exports = "uniform sampler2D image;\nvoid main()\n{\n#if 1\ncolor = texture(image, texCoord);\n#else\nivec2 thread = threadLocation();\nivec2 pos = min(thread * 2, textureSize(image, 0) - ivec2(1));\ncolor = pixelAt(image, pos);\n#endif\n}"
  3956. /***/ }),
  3957. /***/ 5545:
  3958. /***/ ((module) => {
  3959. module.exports = "uniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nvec4 pixel = pixelAt(image, thread / 2);\ncolor = (((thread.x + thread.y) & 1) == 0) ? pixel : vec4(0.0f, 0.0f, 0.0f, pixel.a);\n}"
  3960. /***/ }),
  3961. /***/ 7113:
  3962. /***/ ((module) => {
  3963. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image0;\nuniform sampler2D image1;\nuniform float alpha;\nuniform float beta;\nuniform float gamma;\nconst vec4 BACKGROUND = vec4(0.0f);\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size0 = textureSize(image0, 0);\nivec2 size1 = textureSize(image1, 0);\nvec4 pix0 = all(lessThan(location, size0)) ? pixelAt(image0, location) : BACKGROUND;\nvec4 pix1 = all(lessThan(location, size1)) ? pixelAt(image1, location) : BACKGROUND;\nvec4 pix = clamp(alpha * pix0 + beta * pix1 + vec4(gamma), 0.0f, 1.0f);\ncolor = vec4(pix.rgb, 1.0f);\n}"
  3964. /***/ }),
  3965. /***/ 1202:
  3966. /***/ ((module) => {
  3967. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nvoid main()\n{\nvec2 imageSize = vec2(textureSize(image, 0));\n#if !defined(INTERPOLATION_METHOD)\n#error Must define INTERPOLATION_METHOD\n#elif INTERPOLATION_METHOD == 0\nvec2 pos = texCoord * imageSize;\ncolor = textureLod(image, (round(pos) + vec2(0.5f)) / imageSize, 0.0f);\n#elif INTERPOLATION_METHOD == 1\ncolor = subpixelAtBI(image, texCoord * imageSize);\n#else\n#error Invalid INTERPOLATION_METHOD\n#endif\n}"
  3968. /***/ }),
  3969. /***/ 7971:
  3970. /***/ ((module) => {
  3971. module.exports = "@include \"subpixel.glsl\"\nuniform sampler2D image;\nuniform mat3 inverseHomography;\nconst vec4 emptyColor = vec4(0.0f, 0.0f, 0.0f, 1.0f);\nvec2 perspectiveWarp(mat3 homography, vec2 p)\n{\nvec3 q = homography * vec3(p, 1.0f);\nreturn q.xy / q.z;\n}\nvoid main()\n{\nivec2 location = threadLocation();\nivec2 size = outputSize();\nconst vec2 zero = vec2(0.0f);\nvec2 target = perspectiveWarp(inverseHomography, vec2(location));\nbool withinBounds = all(bvec4(greaterThanEqual(target, zero), lessThan(target, vec2(size))));\ncolor = withinBounds ? subpixelAtBI(image, target) : emptyColor;\n}"
  3972. /***/ }),
  3973. /***/ 6122:
  3974. /***/ ((module) => {
  3975. module.exports = "@include \"colors.glsl\"\nuniform sampler2D dest, src;\nuniform int destComponents;\nuniform int srcComponentId;\nvoid main()\n{\nvec4 destPixel = threadPixel(dest);\nvec4 srcPixel = threadPixel(src);\nbvec4 flags = bvec4(\n(destComponents & PIXELCOMPONENT_RED) != 0,\n(destComponents & PIXELCOMPONENT_GREEN) != 0,\n(destComponents & PIXELCOMPONENT_BLUE) != 0,\n(destComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(destPixel, vec4(srcPixel[srcComponentId]), flags);\n}"
  3976. /***/ }),
  3977. /***/ 371:
  3978. /***/ ((module) => {
  3979. module.exports = "#if !defined(TYPE)\n#error Undefined TYPE\n#elif TYPE == 1\n@include \"keypoints.glsl\"\n#define nullPixel() encodeNullKeypoint()\n#elif TYPE == 2\n@include \"float16.glsl\"\n#define nullPixel() encodeNullPairOfFloat16()\n#else\n#error Invalid TYPE\n#endif\nuniform sampler2D image;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 imageSize = textureSize(image, 0);\nint rasterIndex = thread.y * outputSize().x + thread.x;\nbool isValidPixel = rasterIndex < imageSize.x * imageSize.y;\nivec2 pos = ivec2(rasterIndex % imageSize.x, rasterIndex / imageSize.x);\nvec4 nullpix = nullPixel();\ncolor = isValidPixel ? texelFetch(image, pos, 0) : nullpix;\n}"
  3980. /***/ }),
  3981. /***/ 7307:
  3982. /***/ ((module) => {
  3983. module.exports = "uniform sampler2D image;\nvoid main()\n{\ncolor = threadPixel(image);\n}"
  3984. /***/ }),
  3985. /***/ 8614:
  3986. /***/ ((module) => {
  3987. module.exports = "@include \"colors.glsl\"\nuniform sampler2D image;\nuniform int pixelComponents;\nuniform float value;\nvoid main()\n{\nvec4 pixel = threadPixel(image);\nbvec4 flags = bvec4(\n(pixelComponents & PIXELCOMPONENT_RED) != 0,\n(pixelComponents & PIXELCOMPONENT_GREEN) != 0,\n(pixelComponents & PIXELCOMPONENT_BLUE) != 0,\n(pixelComponents & PIXELCOMPONENT_ALPHA) != 0\n);\ncolor = mix(pixel, vec4(value), flags);\n}"
  3988. /***/ }),
  3989. /***/ 6271:
  3990. /***/ ((module) => {
  3991. module.exports = "uniform float value;\nvoid main()\n{\ncolor = vec4(value);\n}"
  3992. /***/ }),
  3993. /***/ 3016:
  3994. /***/ ((module) => {
  3995. module.exports = "void vsmain()\n{\ngl_Position *= vec4(1,-1,1,1);\n}"
  3996. /***/ }),
  3997. /***/ 3630:
  3998. /***/ ((module) => {
  3999. module.exports = "uniform sampler2D image;\nuniform int iterationNumber;\nvoid main()\n{\nivec2 thread = threadLocation();\nivec2 last = outputSize() - ivec2(1);\nint jump = (1 << iterationNumber);\nint clusterLength = jump << 1;\nint clusterMask = clusterLength - 1;\nivec2 clusterPos = ivec2(thread >> (1 + iterationNumber)) << (1 + iterationNumber);\nivec2 next1 = clusterPos + ((thread - clusterPos + ivec2(jump, 0)) & clusterMask);\nivec2 next2 = clusterPos + ((thread - clusterPos + ivec2(0, jump)) & clusterMask);\nivec2 next3 = clusterPos + ((thread - clusterPos + ivec2(jump, jump)) & clusterMask);\nvec4 p0 = texelFetch(image, thread, 0);\nvec4 p1 = texelFetch(image, min(next1, last), 0);\nvec4 p2 = texelFetch(image, min(next2, last), 0);\nvec4 p3 = texelFetch(image, min(next3, last), 0);\nvec4 pmax = max(max(p0, p1), max(p2, p3));\nvec4 pmin = min(min(p0, p1), min(p2, p3));\ncolor = vec4(pmax.r, pmin.g, pmax.r - pmin.g, p0.a);\n}"
  4000. /***/ }),
  4001. /***/ 8508:
  4002. /***/ ((module) => {
  4003. module.exports = "@include \"pyramids.glsl\"\n@include \"float16.glsl\"\nuniform sampler2D pyramid;\nuniform float lod;\n#define USE_VARYINGS 1\nin vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\nconst mat3 hkern = mat3(\n1.0f, 0.0f,-1.0f,\n2.0f, 0.0f,-2.0f,\n1.0f, 0.0f,-1.0f\n), vkern = mat3(\n1.0f, 2.0f, 1.0f,\n0.0f, 0.0f, 0.0f,\n-1.0f,-2.0f,-1.0f\n);\n#define PIX(x,y) pyrPixelAtOffset(pyramid, lod, pot, ivec2((x),(y))).g\n#define XIP(v) textureLod(pyramid, (v), lod).g\nvoid main()\n{\nconst vec3 ones = vec3(1.0f);\nfloat pot = exp2(lod);\nmat3 win = mat3(\n#if USE_VARYINGS\nXIP(v_pix0), XIP(v_pix1), XIP(v_pix2),\nXIP(v_pix3), XIP(v_pix4), XIP(v_pix5),\nXIP(v_pix6), XIP(v_pix7), XIP(v_pix8)\n#else\nPIX(-1,-1), PIX(0,-1), PIX(1,-1),\nPIX(-1,0), PIX(0,0), PIX(1,0),\nPIX(-1,1), PIX(0,1), PIX(1,1)\n#endif\n);\nmat3 dx = matrixCompMult(hkern, win);\nmat3 dy = matrixCompMult(vkern, win);\nvec2 df = vec2(\ndot(dx[0] + dx[1] + dx[2], ones),\ndot(dy[0] + dy[1] + dy[2], ones)\n);\ncolor = encodePairOfFloat16(df);\n}"
  4004. /***/ }),
  4005. /***/ 8073:
  4006. /***/ ((module) => {
  4007. module.exports = "uniform mediump float lod;\nout vec2 v_pix0, v_pix1, v_pix2,\nv_pix3, v_pix4, v_pix5,\nv_pix6, v_pix7, v_pix8;\n#define PIX(x,y) (texCoord + ((pot) * vec2((x),(y))) / texSize)\nvoid vsmain()\n{\nfloat pot = exp2(lod);\nv_pix0 = PIX(-1,-1); v_pix1 = PIX(0,-1); v_pix2 = PIX(1,-1);\nv_pix3 = PIX(-1,0); v_pix4 = PIX(0,0); v_pix5 = PIX(1,0);\nv_pix6 = PIX(-1,1); v_pix7 = PIX(0,1); v_pix8 = PIX(1,1);\n}"
  4008. /***/ }),
  4009. /***/ 3575:
  4010. /***/ ((module) => {
  4011. module.exports = `AGFzbQEAAAABiwETYAABfmADf39/AX9gAX8AYAN/f38AYAF9AX9gAX8Bf2ACf38Bf2AFf39/f38B
  4012. f2AFf39/f38AYAZ/f39/f38Bf2AAAX9gAn99AX9gA39/fQF/YAJ/fwF9YAF/AX1gBH9/f38AYAR/
  4013. f39/AX9gEX98fHx8fHx8fHx8fHx8fHx8AGAHf39/f39/fQF/AjsEA2VudgZtZW1vcnkCAAIDZW52
  4014. BWZhdGFsAAIDZW52CGJ5dGVmaWxsAAMDZW52CmNvcHlXaXRoaW4AAwNAPwQFBgIGAQECBwgGAwAJ
  4015. AgYCBgYKBQUFCQsFBgEBDAEBBgYGAQEMAQ0OAwgPAxAIAwYBEQEBAQEBARIBEgEBDwQFAXABBQUG
  4016. CAF/AUHwmgQLB/QDHAZtYWxsb2MABARmcmVlAAYFc3JhbmQACgxNYXQzMl9jcmVhdGUAEA1NYXQz
  4017. Ml9kZXN0cm95ABcKTWF0MzJfZGF0YQAYDk1hdDMyX2RhdGFTaXplABkPTWF0MzJfdHJhbnNwb3Nl
  4018. AB0JTWF0MzJfYWRkAB4OTWF0MzJfc3VidHJhY3QAHwtNYXQzMl9zY2FsZQAgDk1hdDMyX2NvbXBt
  4019. dWx0ACEOTWF0MzJfbXVsdGlwbHkAIg5NYXQzMl9pbnZlcnNlMQAjDk1hdDMyX2ludmVyc2UyACQO
  4020. TWF0MzJfaW52ZXJzZTMAJQ1NYXQzMl9xcl9mdWxsACwQTWF0MzJfcXJfcmVkdWNlZAAvDE1hdDMy
  4021. X3FyX29scwAwEE1hdDMyX3FyX2ludmVyc2UAMxZNYXQzMl9ob21vZ3JhcGh5X25kbHQ0ADcVTWF0
  4022. MzJfaG9tb2dyYXBoeV9uZGx0ADgUTWF0MzJfYWZmaW5lX2RpcmVjdDMAOhNNYXQzMl9hZmZpbmVf
  4023. ZGlyZWN0ADsYTWF0MzJfcHJhbnNhY19ob21vZ3JhcGh5ADwUTWF0MzJfcHJhbnNhY19hZmZpbmUA
  4024. PhtNYXQzMl90cmFuc2Zvcm1fcGVyc3BlY3RpdmUAPxZNYXQzMl90cmFuc2Zvcm1fYWZmaW5lAEAJ
  4025. CgEAQQELBA8REz0Kh7oBPyMBAX8gALwiAUGAgID8B3FBgICA/AdGIAFB////A3FBAEdxC2kBAX9B
  4026. AEEAKALAmoCAAEEBajYCwJqAgABBAEEAKAK0moCAACIBQQdxIAFqIgEgAGo2ArSagIAAAkBB8JqE
  4027. gABBB3EgAWpB8JqEgABqIgA/AEEQdEkNAEGEiICAABCAgICAAEEADwsgAAt1AQJ/QQAhAkEAQQAo
  4028. AsCagIAAQQFqNgLAmoCAAEEAQQAoArSagIAAIgNBB3EgA2oiAyAAajYCtJqAgAACQAJAQfCahIAA
  4029. QQdxIANqQfCahIAAaiIAPwBBEHRJDQAgAUUNASABEICAgIAAQQAPCyAAIQILIAILRgECf0EAQQAo
  4030. AsCagIAAIgFBf2oiAjYCwJqAgAACQCACDQBBAEEINgK0moCAAA8LAkAgAUEASg0AQZOIgIAAEICA
  4031. gIAACwtGAQJ/QQBBACgCwJqAgAAiAkF/aiIDNgLAmoCAAAJAIAMNAEEAQQg2ArSagIAAQQAPCwJA
  4032. IAJBAEoNACABEICAgIAAC0EACxcAIAFB/wFxIAAgACACahCBgICAACAACxMAIAAgASABIAJqEIKA
  4033. gIAAIAALoQECAX8CfkEAKAK4moCAACIBIACtQiCGIABBf3OthCICQqrw0/Sv7ry3PHwiA0IeiCAD
  4034. hUK5y5Pn0e2RrL9/fiIDQhuIIAOFQuujxJmxt5LolH9+IgNCH4ggA4U3AwggASACQpX4qfqXt96b
  4035. nn98IgJCHoggAoVCucuT59Htkay/f34iAkIbiCAChULro8SZsbeS6JR/fiICQh+IIAKFNwMAC0QB
  4036. AX9B3oG33QAhBQJAIAJFDQAgAEUNACADRQ0AQQAhBSABQQJJDQAgACAAIAFBf2ogAmxqIAIgAyAE
  4037. EIyAgIAACyAFC60GAwR/AXwFfwJAAkAgASAASw0AIAEhBSAAIQYMAQtBACACayEHIAJBBEshCANA
  4038. IAEiBSAAIgZrIAJuIgFBCEkNAQJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAA
  4039. AAAAAPC/oCABQQFquKIiCUQAAAAAAADwQWMgCUQAAAAAAAAAAGZxRQ0AIAmrIQEMAQtBACEBCyAG
  4040. IAEgAmxqIQogBSEBIAYhCwNAAkAgCyAKIAQgAxGBgICAAABBf0oNAANAIAsgAmoiCyAKIAQgAxGB
  4041. gICAAABBAEgNAAsLAkAgASAKIAQgAxGBgICAAABBAUgNAANAIAEgB2oiASAKIAQgAxGBgICAAABB
  4042. AEoNAAsLAkAgCyABTw0AIAEhACALIQwgAiENAkACQCAIDQACQAJAIAIOBQMBAQEAAwsgCygCACEA
  4043. IAsgASgCADYCACABIAA2AgAMAgsgASEAIAshDCACIQ0LA0AgDC0AACEOIAwgAC0AADoAACAAIA46
  4044. AAAgAEEBaiEAIAxBAWohDCANQX9qIg0NAAsLIAEgCyAKIAogAUYbIAogC0YbIQogASAHaiEBIAsg
  4045. AmohCwwBCwsgCyACaiALIAsgAUYiABshDAJAAkAgASAHaiABIAAbIgEgBk0NACAMIAVPDQACQCAB
  4046. IAZrIAUgDGtNDQAgDCAFIAIgAyAEEIyAgIAAIAYhAAwCCyAGIAEgAiADIAQQjICAgAAgBSEBIAwh
  4047. AAwBCyAGIAwgASAGSyIKGyEAIAEgBSAKGyEBIAoNACAMIAVPDQILIAEhBSAAIQYgASAASw0ACwsC
  4048. QCAGIAVPDQAgAkEESyEHA0AgBiINIAJqIgYhASANIQACQCAGIAVLDQADQCABIAAgASAAIAQgAxGB
  4049. gICAAABBAEgbIQAgASACaiIBIAVNDQALIAAgDUYNAAJAIAcNAAJAIAIOBQIBAQEAAgsgACgCACEB
  4050. IAAgDSgCADYCACANIAE2AgAMAQtBACEBA0AgACABaiIMLQAAIQogDCANIAFqIgstAAA6AAAgCyAK
  4051. OgAAIAIgAUEBaiIBRw0ACwsgBiAFSQ0ACwsLNQECfwJAIAFBAUgNAEEAIQIgACEDA0AgAyACNgIA
  4052. IANBBGohAyABIAJBAWoiAkcNAAsLIAALvgIFAn8BfAF/AXwEfwJAIAFBf2oiA0UNACACQQRLIQRE
  4053. AAAAAAAAAAAhBUEAIQYDQAJAAkBBACgCvJqAgAARgICAgAAAQgyIQoCAgICAgID4P4S/RAAAAAAA
  4054. APC/oCABIAZruKIgBaAiB0QAAAAAAADwQWMgB0QAAAAAAAAAAGZxRQ0AIAerIQgMAQtBACEICwJA
  4055. IAYgCEYNAAJAIAQNAAJAIAIOBQIBAQEAAgsgACAGQQJ0aiIJKAIAIQogCSAAIAhBAnRqIggoAgA2
  4056. AgAgCCAKNgIADAELIAAgBiACbGohCSAAIAggAmxqIQggAiEKA0AgCS0AACELIAkgCC0AADoAACAI
  4057. IAs6AAAgCEEBaiEIIAlBAWohCSAKQX9qIgoNAAsLIAVEAAAAAAAA8D+gIQUgBkEBaiIGIANHDQAL
  4058. CwtFAQN+QQBBACkD2JqAgAAiAEEAKQPQmoCAACIBhSICQiWJNwPYmoCAAEEAIAFCGIkgAoUgAkIQ
  4059. hoU3A9CagIAAIAAgAXwLlAEBAX8CQAJAIAMgAkgNACAAQQFIDQAgAUEBSA0AIAJBAUgNACAAQX9q
  4060. IAJsIAFBf2ogA2xqQQFqIARHDQAgBQ0BC0GfiICAABCAgICAAAtBHEG+iICAABCFgICAACIGIAM2
  4061. AhQgBiACNgIQIAYgATYCDCAGIAA2AgggBiAENgIEIAZBgoCAgAA2AhggBiAFNgIAIAYLAgALkwEB
  4062. BH8CQAJAIABBAUgNACABQQBKDQELQdqIgIAAEICAgIAAC0EcQfmIgIAAEIWAgIAAIQIgASAAbCID
  4063. QQJ0IgRBlYmAgAAQhYCAgAAhBSACIAA2AhQgAkEBNgIQIAIgATYCDCACIAA2AgggAiADNgIEIAVB
  4064. ACAEEIiAgIAAIQAgAkGDgICAADYCGCACIAA2AgAgAgsRACAAQeeKgIAAEIeAgIAAGgv0AQEEfwJA
  4065. AkAgAEEBSA0AIAFBAEoNAQtB2oiAgAAQgICAgAALQRxB+YiAgAAQhYCAgAAhAiABIABsIgNBAnQi
  4066. BEGViYCAABCFgICAACEFIAIgADYCFCACQQE2AhAgAiABNgIMIAIgADYCCCACIAM2AgQgBUEAIAQQ
  4067. iICAgAAhAyACQYOAgIAANgIYIAIgAzYCAAJAIAAgASAAIAFIGyIBQQFIDQAgAyACKAIUIAIoAhBq
  4068. IgQgAUF/amxBAnRqIQAgAUEBaiEBQQAgBEECdGshAwNAIABBgICA/AM2AgAgACADaiEAIAFBf2oi
  4069. AUEBSg0ACwsgAguYAgEKfwJAAkAgACgCCCABKAIIRw0AIAAoAgwgASgCDEYNAQtBx4qAgAAQgICA
  4070. gAALAkACQCAAKAIEIgIgASgCBEYNACAAKAIMIgNBAUgNAUEAIQQgACgCCCIFQQFIIQZBACEHA0AC
  4071. QCAGDQAgACgCEEECdCEIIAEoAhBBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgCACABKAIUIARsaiEK
  4072. QQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsgBEEEaiEEIAdBAWoi
  4073. ByADSA0ADAILCwJAIAEoAgAiCiAAKAIAIgsgAkECdCICak8NACAKIAJqIAtLDQELIAsgCiACEImA
  4074. gIAAGgsgAAtVAQF/QRxBsYmAgAAQhYCAgAAiAEEYakEAKALoiYCAADYCACAAQRBqQQApAuCJgIAA
  4075. NwIAIABBCGpBACkC2ImAgAA3AgAgAEEAKQLQiYCAADcCACAACyEAIAAoAgAgACgCGBGCgICAAAAg
  4076. AEHsiYCAABCHgICAAAsHACAAKAIACwoAIAAoAgRBAnQL0AEBAn8CQCAAKAIYQYKAgIAARg0AQYeK
  4077. gIAAEICAgIAACwJAAkAgAyACSA0AIAJBAEgNACAFIARIDQAgBEEASA0AIAEoAgggA0wNACABKAIM
  4078. IAVKDQELQaeKgIAAEICAgIAACyABKAIQIQYgAEEUaiABQRRqKAIAIgc2AgAgACAGNgIQIAAgBSAE
  4079. a0EBajYCDCAAIAMgAmtBAWo2AgggACAGIANsIAcgBWxqIAcgBGwgBiACbGoiAmtBAWo2AgQgACAB
  4080. KAIAIAJBAnRqNgIAIAALgQEBCH8CQCAAKAIMIgJBAUgNAEEAIQMgACgCCCIEQQFIIQVBACEGA0AC
  4081. QCAFDQAgACgCEEECdCEHIAAoAgAgACgCFCADbGohCEEAIQkDQCAIIAE4AgAgCCAHaiEIIAlBAWoi
  4082. CSAESA0ACwsgA0EEaiEDIAZBAWoiBiACSA0ACwsgAAumAQEIfwJAIAAoAgwiASAAKAIIIgJsIgMg
  4083. ACgCBEcNACAAKAIAQQAgA0ECdBCIgICAABogAA8LAkAgAUEBSA0AIAJBAUghBEEAIQVBACEGA0AC
  4084. QCAEDQAgACgCEEECdCEHIAAoAgAgACgCFCAFbGohAyACIQgDQCADQQA2AgAgAyAHaiEDIAhBf2oi
  4085. CA0ACwsgBUEEaiEFIAZBAWoiBiABRw0ACwsgAAvcAQEKfwJAAkAgACgCCCABKAIMRw0AIAAoAgwi
  4086. AiABKAIIRg0BC0GBi4CAABCAgICAACAAKAIMIQILAkAgAkEBSA0AIAAoAgwhA0EAIQQgACgCCCIF
  4087. QQFIIQZBACEHA0ACQCAGDQAgACgCEEECdCEIIAEoAhRBAnQhCSAAKAIAIAAoAhQgBGxqIQIgASgC
  4088. ACABKAIQIARsaiEKQQAhCwNAIAIgCigCADYCACACIAhqIQIgCiAJaiEKIAtBAWoiCyAFSA0ACwsg
  4089. BEEEaiEEIAdBAWoiByADSA0ACwsgAAuZAgEMfwJAAkAgASgCCCIDIAIoAghHDQAgASgCDCIEIAIo
  4090. AgxHDQAgACgCCCADRw0AIAAoAgwgBEYNAQtBp4uAgAAQgICAgAAgACgCDCEECwJAIARBAUgNACAA
  4091. KAIMIQVBACEGIAAoAggiB0EBSCEIQQAhCQNAAkAgCA0AIAAoAhBBAnQhCiACKAIQQQJ0IQsgASgC
  4092. EEECdCEMIAAoAgAgACgCFCAGbGohBCACKAIAIAIoAhQgBmxqIQMgASgCACABKAIUIAZsaiENQQAh
  4093. DgNAIAQgDSoCACADKgIAkjgCACAEIApqIQQgAyALaiEDIA0gDGohDSAOQQFqIg4gB0gNAAsLIAZB
  4094. BGohBiAJQQFqIgkgBUgNAAsLIAALmQIBDH8CQAJAIAEoAggiAyACKAIIRw0AIAEoAgwiBCACKAIM
  4095. Rw0AIAAoAgggA0cNACAAKAIMIARGDQELQc2LgIAAEICAgIAAIAAoAgwhBAsCQCAEQQFIDQAgACgC
  4096. DCEFQQAhBiAAKAIIIgdBAUghCEEAIQkDQAJAIAgNACAAKAIQQQJ0IQogAigCEEECdCELIAEoAhBB
  4097. AnQhDCAAKAIAIAAoAhQgBmxqIQQgAigCACACKAIUIAZsaiEDIAEoAgAgASgCFCAGbGohDUEAIQ4D
  4098. QCAEIA0qAgAgAyoCAJM4AgAgBCAKaiEEIAMgC2ohAyANIAxqIQ0gDkEBaiIOIAdIDQALCyAGQQRq
  4099. IQYgCUEBaiIJIAVIDQALCyAAC98BAQp/AkACQCAAKAIIIAEoAghHDQAgACgCDCIDIAEoAgxGDQEL
  4100. QfOLgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAhBSAAKAIIIgZBAUghB0EAIQgD
  4101. QAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAFbGohAyABKAIAIAEoAhQgBWxq
  4102. IQtBACEMA0AgAyALKgIAIAKUOAIAIAMgCWohAyALIApqIQsgDEEBaiIMIAZIDQALCyAFQQRqIQUg
  4103. CEEBaiIIIARIDQALCyAAC5kCAQx/AkACQCABKAIIIgMgAigCCEcNACABKAIMIgQgAigCDEcNACAA
  4104. KAIIIANHDQAgACgCDCAERg0BC0GZjICAABCAgICAACAAKAIMIQQLAkAgBEEBSA0AIAAoAgwhBUEA
  4105. IQYgACgCCCIHQQFIIQhBACEJA0ACQCAIDQAgACgCEEECdCEKIAIoAhBBAnQhCyABKAIQQQJ0IQwg
  4106. ACgCACAAKAIUIAZsaiEEIAIoAgAgAigCFCAGbGohAyABKAIAIAEoAhQgBmxqIQ1BACEOA0AgBCAN
  4107. KgIAIAMqAgCUOAIAIAQgCmohBCADIAtqIQMgDSAMaiENIA5BAWoiDiAHSA0ACwsgBkEEaiEGIAlB
  4108. AWoiCSAFSA0ACwsgAAvOAgMLfwF9BX8CQAJAIAEoAgwgAigCCEcNACAAKAIIIAEoAghHDQAgACgC
  4109. DCACKAIMRg0BC0HAjICAABCAgICAAAsgABCcgICAABoCQCAAKAIMIgNBAUgNAEEAIQQgAigCCCIF
  4110. QQFIIQZBACEHA0ACQCAGDQAgAigCFCAHbCEIIAAoAgghCSACKAIQIQogAigCACELQQAhDEEAIQ0D
  4111. QAJAIAlBAUgNACALIAggCiANbGpBAnRqKgIAIQ4gACgCEEECdCEPIAEoAhBBAnQhECAAKAIAIAQg
  4112. ACgCFGxqIREgASgCACABKAIUIAxsaiESQQAhEwNAIBEgDiASKgIAlCARKgIAkjgCACARIA9qIREg
  4113. EiAQaiESIBNBAWoiEyAJSA0ACwsgDEEEaiEMIA1BAWoiDSAFSA0ACwsgBEEEaiEEIAdBAWoiByAD
  4114. SA0ACwsgAAuIAQICfwF9AkACQCAAKAIIIgIgASgCCEcNACACQQFHDQAgAiAAKAIMIgNHDQAgAyAB
  4115. KAIMRg0BC0HnjICAABCAgICAAAsCQAJAIAEoAgAqAgAiBIu7RI3ttaD3xrA+Y0EBcw0AQQAqAoCI
  4116. gIAAIQQMAQtDAACAPyAElSEECyAAKAIAIAQ4AgAgAAuNAgICfwV9AkACQCAAKAIIIgIgASgCCEcN
  4117. ACACQQJHDQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0GOjYCAABCAgICAAAsCQAJAIAEoAgAiAioC
  4118. ACIEIAIgAUEUaigCACIDIAEoAhAiAWpBAnRqKgIAIgWUIAIgAUECdGoqAgAiBiACIANBAnRqKgIA
  4119. IgeUkyIIi7tEje21oPfGsD5jQQFzDQBBACoCgIiAgAAhCAwBC0MAAIA/IAiVIQgLIAAoAgAiASAF
  4120. IAiUOAIAIAEgACgCECICQQJ0aiAIIAaMlDgCACABIABBFGooAgAiA0ECdGogCCAHjJQ4AgAgASAD
  4121. IAJqQQJ0aiAEIAiUOAIAIAALnAQGAn8CfQF/BX0BfwZ9AkACQCAAKAIIIgIgASgCCEcNACACQQNH
  4122. DQAgAiAAKAIMIgNHDQAgAyABKAIMRg0BC0G1jYCAABCAgICAAAsCQAJAIAEoAgAiAiABKAIQIgNB
  4123. A3RqKgIAIgQgAiABQRRqKAIAIgFBAnRqKgIAIgUgAiABQQF0IgYgA2pBAnRqKgIAIgeUIAIgASAD
  4124. akECdGoqAgAiCCACIAFBA3RqKgIAIgmUkyIKlCACKgIAIgsgCCACIAYgA0EBdCIMakECdGoqAgAi
  4125. DZQgAiAMIAFqQQJ0aioCACIOIAeUkyIPlCACIANBAnRqKgIAIhAgBSANlCAOIAmUkyIRlJOSIhKL
  4126. u0SN7bWg98awPmNBAXMNAEEAKgKAiICAACESDAELQwAAgD8gEpUhEgsgACgCACICIA8gEpQ4AgAg
  4127. AiAAKAIQIgFBAnRqIBIgECANlCAEIAeUk4yUOAIAIAIgAUEDdGogECAOlCAEIAiUkyASlDgCACAC
  4128. IABBFGooAgAiA0ECdGogEiARjJQ4AgAgAiADIAFqIgZBAnRqIAsgDZQgBCAJlJMgEpQ4AgAgAiAD
  4129. IAFBAXRqQQJ0aiASIAsgDpQgBCAFlJOMlDgCACACIANBA3RqIAogEpQ4AgAgAiABIANBAXRqQQJ0
  4130. aiASIAsgB5QgECAJlJOMlDgCACACIAZBA3RqIAsgCJQgECAFlJMgEpQ4AgAgAAvZAgIRfwF9AkAC
  4131. QCABKAIIIAIoAghHDQAgACgCCCABKAIMRw0AIAAoAgwiAyACKAIMRg0BC0HcjYCAABCAgICAACAA
  4132. KAIMIQMLAkAgA0EBSA0AIAAoAgwhBCAAKAIIIgVBAUghBkEAIQdBACEIA0ACQCAGDQAgACgCFCAI
  4133. bCEJIAIoAgghCiAAKAIQIQsgACgCACEMQQAhDUEAIQ4DQCAMIAkgCyAObGpBAnRqIg9BADYCAAJA
  4134. IApBAUgNACACKAIQQQJ0IRAgASgCEEECdCERIAIoAgAgByACKAIUbGohAyABKAIAIAEoAhQgDWxq
  4135. IRJBACETQwAAAAAhFANAIA8gFCASKgIAIAMqAgCUkiIUOAIAIAMgEGohAyASIBFqIRIgE0EBaiIT
  4136. IApIDQALCyANQQRqIQ0gDkEBaiIOIAVIDQALCyAHQQRqIQcgCEEBaiIIIARIDQALCyAAC5sFBAR/
  4137. An0DfxB9AkACQCAAKAIIIgMgACgCDEcNACABKAIIIgQgASgCDEcNACACKAIIIgVBA0cNACAEQQNH
  4138. DQAgA0EDRw0AIAUgAigCDEYNAQtBg46AgAAQgICAgAALIAIoAgAiAyACQRRqKAIAIgRBAXQiBiAC
  4139. KAIQIgVBAXQiAmpBAnRqKgIAIQcgAyACIARqQQJ0aioCACEIIAEoAgAiAiABKAIQIglBAXQiCiAB
  4140. QRRqKAIAIgtqQQJ0aioCACEMIAIgC0EBdCIBIApqQQJ0aioCACENIAMgBEEDdGoqAgAhDiADIAYg
  4141. BWpBAnRqKgIAIQ8gAyAEQQJ0aioCACEQIAMgBCAFakECdGoqAgAhESACIAlBA3RqKgIAIRIgAiAJ
  4142. QQJ0aioCACETIAIgCyAJakECdGoqAgAhFCACIAEgCWpBAnRqKgIAIRUgACgCACIBIAIqAgAiFiAD
  4143. KgIAIheUIAIgC0ECdGoqAgAiGCADIAVBAnRqKgIAIhmUkiACIAtBA3RqKgIAIhogAyAFQQN0aioC
  4144. ACIblJI4AgAgASAAKAIQIgNBAnRqIBMgF5QgFCAZlJIgFSAblJI4AgAgASADQQN0aiASIBeUIAwg
  4145. GZSSIA0gG5SSOAIAIAEgAEEUaigCACICQQJ0aiAWIBCUIBggEZSSIBogCJSSOAIAIAEgAiADaiIE
  4146. QQJ0aiATIBCUIBQgEZSSIBUgCJSSOAIAIAEgAiADQQF0akECdGogEiAQlCAMIBGUkiANIAiUkjgC
  4147. ACABIAJBA3RqIBYgDpQgGCAPlJIgGiAHlJI4AgAgASADIAJBAXRqQQJ0aiATIA6UIBQgD5SSIBUg
  4148. B5SSOAIAIAEgBEEDdGogEiAOlCAMIA+UkiANIAeUkjgCACAAC+UBAQp/AkACQCAAKAIIIAEoAghH
  4149. DQAgACgCDCIDIAEoAgxGDQELQaqOgIAAEICAgIAAIAAoAgwhAwsCQCADQQFIDQAgACgCDCEEQQAh
  4150. BSAAKAIIIgZBAUghB0EAIQgDQAJAIAcNACAAKAIQQQJ0IQkgASgCEEECdCEKIAAoAgAgACgCFCAF
  4151. bGohAyABKAIAIAEoAhQgBWxqIQtBACEMA0AgAyALKgIAIAKUIAMqAgCSOAIAIAMgCWohAyALIApq
  4152. IQsgDEEBaiIMIAZIDQALCyAFQQRqIQUgCEEBaiIIIARIDQALCyAAC48CAwh/AX0DfwJAAkAgASgC
  4153. DEEBRw0AIAIoAghBAUcNACAAKAIIIAEoAghHDQAgACgCDCIDIAIoAgxGDQELQdGOgIAAEICAgIAA
  4154. IAAoAgwhAwsCQCADQQFIDQAgAkEUaigCACEEIAAoAgwhBSACKAIAIQZBACEHIAAoAggiCEEBSCEJ
  4155. QQAhCgNAAkAgCQ0AIAYgBCAKbEECdGoqAgAhCyAAKAIQQQJ0IQwgASgCEEECdCENIAAoAgAgACgC
  4156. FCAHbGohAiABKAIAIQNBACEOA0AgAiALIAMqAgCUOAIAIAIgDGohAiADIA1qIQMgDkEBaiIOIAhI
  4157. DQALCyAHQQRqIQcgCkEBaiIKIAVIDQALCyAAC70BAwF/AX0DfwJAAkAgACgCDEEBRw0AIAEoAgxB
  4158. AUcNACAAKAIIIgIgASgCCEYNAQtB+I6AgAAQgICAgAAgASgCCCECCwJAAkAgAkEBTg0AQwAAAAAh
  4159. AwwBCyABKAIQQQJ0IQQgACgCEEECdCEFIAEoAgghBiABKAIAIQEgACgCACEAQwAAAAAhA0EAIQID
  4160. QCADIAAqAgAgASoCAJSSIQMgASAEaiEBIAAgBWohACACQQFqIgIgBkgNAAsLIAMLggEEAX8BfQJ/
  4161. AX0CQCAAKAIMQQFGDQBBn4+AgAAQgICAgAALAkACQCAAKAIIIgFBAU4NAEMAAAAAIQIMAQsgACgC
  4162. EEECdCEDIAAoAgAhAEEAIQRDAAAAACECA0AgAiAAKgIAIgUgBZSSIQIgACADaiEAIARBAWoiBCAB
  4163. SA0ACwsgApELsQIBBX8CQCACKAIIIgMgAigCDCIETg0AQcaPgIAAEICAgIAACwJAAkAgACgCCCAD
  4164. Rw0AIAAoAgwgA0cNACABKAIIIANHDQAgASgCDCAERg0BC0Hlj4CAABCAgICAAAsgBEECdEGfkYCA
  4165. ABCFgICAACEFAkACQCAEQQFIDQBBACEGIAUhBwNAIAcgAyAGakEBEJKAgIAANgIAIAdBBGohByAE
  4166. IAZBf2oiBmoNAAsgAyAEIAUgASACEK2AgIAAIAMgBCAFIAAQroCAgAAgBEEBaiEHIARBAnQgBWpB
  4167. fGohBgNAIAYoAgAQl4CAgAAaIAZBfGohBiAHQX9qIgdBAUoNAAwCCwsgAyAEIAUgASACEK2AgIAA
  4168. IAMgBCAFIAAQroCAgAALIAVBlZKAgAAQh4CAgAAaC5AEAgl/An0CQCAAIAFODQBBupGAgAAQgICA
  4169. gAALAkACQCAEKAIIIABHDQAgBCgCDCABRw0AIAMoAgggAEcNACADKAIMIAFGDQELQdiRgIAAEICA
  4170. gIAACxCWgICAACEFEJaAgIAAIQYQloCAgAAhBxCWgICAACEIIABBAWoiCSABQQFqIgoQkoCAgAAh
  4171. CyAJIAoQkoCAgAAhDCADIAQQlYCAgAAaAkAgAUEBSA0AIAFBf2ohDSAAQX9qIQpBACEAA0AgBSAD
  4172. IAAgCiAAIAAQmoCAgAAiBCgCACoCACEOIAIoAgAgBBCVgICAABogBBCrgICAACEPIAIoAgAiBCgC
  4173. ACIJIA8gDkMAAAAAYCAOQwAAAABda7KUIAkqAgCSOAIAAkAgBBCrgICAACIOi7tEje21oPfGsD5j
  4174. DQAgAigCACIEIARDAACAPyAOlRCggICAABogBiADIAAgCiAAIA0QmoCAgAAhBCAHIAtBASACKAIA
  4175. KAIMQQEgBCgCDBCagICAACACKAIAIAQQpoCAgAAhCSAEIAggDEEBIAIoAgAoAghBASAEKAIMEJqA
  4176. gIAAIAIoAgAgCRCpgICAAEMAAADAEKiAgIAAGgsgAkEEaiECIAEgAEEBaiIARw0ACwsgDBCXgICA
  4177. ABogCxCXgICAABogCBCXgICAABogBxCXgICAABogBhCXgICAABogBRCXgICAABoL8gICCH8BfQJA
  4178. AkAgAygCCCAARw0AIAMoAgwiBCAARg0BIAQgAUYNAQtB9pGAgAAQgICAgAALEJaAgIAAIQUQloCA
  4179. gAAhBiADEJyAgIAAGgJAIAMoAgwiB0EBSA0AIAMoAgAgA0EUaigCACADKAIQaiIIIAdBf2psQQJ0
  4180. aiEEIAdBAWohCUEAIAhBAnRrIQgDQCAEQYCAgPwDNgIAIAQgCGohBCAJQX9qIglBAUoNAAsgB0EB
  4181. SA0AIAFBAWohCiAAQX9qIQAgAUECdCACakF8aiELQQAhAgNAIAUgA0EAIAAgAiACEJqAgIAAIQcg
  4182. CyEEIAohCQJAIAFBAUgNAANAIAYgByAJQX5qIABBAEEAEJqAgIAAIQggBCgCACAIEKqAgIAAIQwg
  4183. CCAEKAIAIAxDAAAAwJQQqICAgAAaIARBfGohBCAJQX9qIglBAUoNAAsLIAJBAWoiAiADKAIMSA0A
  4184. CwsgBhCXgICAABogBRCXgICAABoLlwMBB38CQCACKAIIIgMgAigCDCIETg0AQYSQgIAAEICAgIAA
  4185. CwJAAkAgACgCCCADRw0AIAAoAgwgBEcNACABKAIIIARHDQAgASgCDCAERg0BC0GjkICAABCAgICA
  4186. AAsQloCAgAAhBSADIAQQkoCAgAAhBiAEQQJ0QZ+RgIAAEIWAgIAAIQcCQAJAIARBAUgNAEEAIQgg
  4187. ByEJA0AgCSADIAhqQQEQkoCAgAA2AgAgCUEEaiEJIAQgCEF/aiIIag0ACyADIAQgByAGIAIQrYCA
  4188. gAAgAyAEIAcgABCugICAACABIAUgBkEAIARBf2oiCEEAIAgQmoCAgAAQlYCAgAAaIARBAWohCSAE
  4189. QQJ0IAdqQXxqIQgDQCAIKAIAEJeAgIAAGiAIQXxqIQggCUF/aiIJQQFKDQAMAgsLIAMgBCAHIAYg
  4190. AhCtgICAACADIAQgByAAEK6AgIAAIAEgBSAGQQAgBEF/aiIIQQAgCBCagICAABCVgICAABoLIAdB
  4191. lZKAgAAQh4CAgAAaIAYQl4CAgAAaIAUQl4CAgAAaC+QDAQp/AkAgASgCCCIEIAEoAgwiBU4NAEHC
  4192. kICAABCAgICAAAsCQAJAIAIoAgggBEcNACACKAIMQQFHDQAgACgCCCAFRw0AIAAoAgxBAUYNAQtB
  4193. 4ZCAgAAQgICAgAALIAQgBRCSgICAACEGIARBARCSgICAACEHIARBARCSgICAACEIIAVBARCSgICA
  4194. ACEJIAVBAnRBn5GAgAAQhYCAgAAhCgJAIAVBAUgNACAEIQsgCiEMIAUhDQNAIAwgC0EBEJKAgIAA
  4195. NgIAIAtBf2ohCyAMQQRqIQwgDUF/aiINDQALCyAEIAUgCiAGIAEQrYCAgAAgBCAFIAogByACELGA
  4196. gIAAIAAgBiAHELKAgIAAAkAgA0EBSA0AIANBAWohCwNAIAggAiAHIAEgABCigICAABCfgICAABog
  4197. BCAFIAogByAIELGAgIAAIAkgBiAHELKAgIAAIAAgCUMAAIA/EKiAgIAAGiALQX9qIgtBAUoNAAsL
  4198. AkAgBUEBSA0AIAVBAWohDCAFQQJ0IApqQXxqIQsDQCALKAIAEJeAgIAAGiALQXxqIQsgDEF/aiIM
  4199. QQFKDQALCyAKQZWSgIAAEIeAgIAAGiAJEJeAgIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAA
  4200. GiAAC+MCAwh/AX0BfwJAAkAgAygCCCAARw0AIAMoAgxBAUcNACAEKAIIIABHDQAgBCgCDEEBRg0B
  4201. C0GukoCAABCAgICAAAsgAyAEEJWAgIAAGgJAIAFBAUgNAEEAIQUgACEGQQAhBwNAAkAgByAATiII
  4202. DQAgAygCECIEQQJ0IQkgAygCACAEIAVsaiEEIAIgB0ECdGoiCigCACILKAIQQQJ0IQwgCygCACEL
  4203. QwAAAAAhDSAGIQ4DQCANIAsqAgAgBCoCAJSSIQ0gBCAJaiEEIAsgDGohCyAOQX9qIg4NAAsgCA0A
  4204. IA0gDZIhDSADKAIQIgRBAnQhCSADKAIAIAQgBWxqIQQgCigCACILKAIQQQJ0IQwgCygCACELIAYh
  4205. DgNAIAQgBCoCACANIAsqAgCUkzgCACAEIAlqIQQgCyAMaiELIA5Bf2oiDg0ACwsgBUEEaiEFIAZB
  4206. f2ohBiAHQQFqIgcgAUcNAAsLC7IDAwx/An0DfwJAIAEoAggiAyABKAIMIgRODQBBzZKAgAAQgICA
  4207. gAALAkACQCAAKAIIIARHDQAgACgCDEEBRw0AIAIoAgggA0cNACACKAIMQQFGDQELQeySgIAAEICA
  4208. gIAACwJAIARBAUgNAEEAIQVBACABQRRqKAIAIgNBAnQiBiABKAIQIgdBAnRqayEIIAEoAgAiCSAD
  4209. IARsIAcgBEF/amxqQQJ0aiEKIARBAnQhCyADIAdqIQwgBCENA0ACQCAJIAwgDUF/aiIObEECdGoq
  4210. AgAiD4u7RI3ttaD3xrA+Y0EBcw0AIABBACoCgIiAgAAQm4CAgAAaDwsgAigCACACKAIQIA5sQQJ0
  4211. aioCACEQAkACQCANIARIDQAgACgCECERIAAoAgAhEgwBCyAAKAIQIhFBAnQhEyAAKAIAIhIgESAL
  4212. bGohASAKIQMgBSEHA0AgECADKgIAIAEqAgCUkyEQIAEgE2ohASADIAZqIQMgB0F/aiIHDQALCyAS
  4213. IBEgDmxBAnRqIBAgD5U4AgAgC0F8aiELIAogCGohCiAFQQFqIQUgDUEBSiEBIA4hDSABDQALCwvC
  4214. AwEKfwJAAkAgACgCCCICIAAoAgxHDQAgAiABKAIIIgNHDQAgAyABKAIMRg0BC0GAkYCAABCAgICA
  4215. ACAAKAIMIQILIAIgAhCUgICAACEEIAIgAhCSgICAACEFIAJBARCSgICAACEGEJaAgIAAIQcQloCA
  4216. gAAhCCACQQJ0QZ+RgIAAEIWAgIAAIQkCQAJAIAJBAUgNACAJIQMgAiEKA0AgAyAKQQEQkoCAgAA2
  4217. AgAgA0EEaiEDIApBf2oiCg0ACyACIAIgCSAFIAEQrYCAgAAgAkEBSA0BIAJBf2ohCkEAIQMDQCAH
  4218. IARBACAKIAMgAxCagICAACEBIAggAEEAIAogAyADEJqAgIAAIQsgAiACIAkgBiABELGAgIAAIAsg
  4219. BSAGELKAgIAAIAIgA0EBaiIDRw0ACyACQQFIDQEgAkEBaiEKIAJBAnQgCWpBfGohAwNAIAMoAgAQ
  4220. l4CAgAAaIANBfGohAyAKQX9qIgpBAUoNAAwCCwsgAiACIAkgBSABEK2AgIAACyAJQZWSgIAAEIeA
  4221. gIAAGiAIEJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC9YCAQJ/
  4222. AkACQCAAKAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMQQRHDQAgAigCCEECRw0AIAIo
  4223. AgxBBEYNAQtBi5OAgAAQgICAgAALIAAgASgCACIDKgIAuyADIAEoAhAiBEECdGoqAgC7IAMgAUEU
  4224. aigCACIBQQJ0aioCALsgAyABIARqQQJ0aioCALsgAyABQQN0aioCALsgAyABQQF0IARqQQJ0aioC
  4225. ALsgAyABQQNsIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyACKAIAIgMqAgC7IAMgAigCECIEQQJ0
  4226. aioCALsgAyACQRRqKAIAIgFBAnRqKgIAuyADIAEgBGpBAnRqKgIAuyADIAFBA3RqKgIAuyADIAFB
  4227. AXQgBGpBAnRqKgIAuyADIAFBA2wiAUECdGoqAgC7IAMgASAEakECdGoqAgC7ELWAgIAAIAAL9QoC
  4228. FnwDf0EAKgKAiICAALshEQJAAkAgAiAEoSISIAWiIAQgBqEiEyABoiAGIAKhIhQgA6KgoCAKIAyh
  4229. IhUgDaIgDCAOoSIWIAmiIA4gCqEgC6KgoKJEAAAAAAAAAABjDQAgEyAHoiAGIAihIhcgA6IgCCAE
  4230. oSIYIAWioKAgFiAPoiAOIBChIhkgC6IgECAMoSANoqCgokQAAAAAAAAAAGMNACASIAeiIAQgCKEg
  4231. AaIgCCACoSITIAOioKAgFSAPoiAMIBChIAmiIBAgCqEiEiALoqCgokQAAAAAAAAAAGMNACACIAah
  4232. IAeiIBcgAaIgEyAFoqCgIAogDqEgD6IgGSAJoiASIA2ioKCiRAAAAAAAAAAAYw0AIAQgAqEiGiAH
  4233. IAGhIheiIAMgAaEiGyAToqEiHJkiHUSN7bWg98awPmMNACAUIBeiIAUgAaEiHiAToqEiH5kiIESN
  4234. 7bWg98awPmMNACAbIBSiIBogHqKhIhSZIiFEje21oPfGsD5jDQAgBiAEoSAHIAOhoiAFIAOhIBii
  4235. oZlEje21oPfGsD5jDQAgHCAFoiIYIB8gA6KhIiIgFCAIoiAcIAaiIh6gIiOiIB4gHyAEoqEiHiAU
  4236. IAeiIBigIhiioSIkmUSN7bWg98awPmMNACAcmiIlIBShIiYgIqIgHyAcoSIiIBiioUQAAAAAAADw
  4237. PyAkoyIkoiEYICIgI6IgJiAeoqEgJKIhHgJAAkAgHSAgZEEBcw0AIBMgGCAEoiAeIAOiRAAAAAAA
  4238. APA/oKAiBKIgJaMhHSAcIR8MAQsgEyAYIAaiIB4gBaJEAAAAAAAA8D+goCIEoiAfmqMhHQsgFyAE
  4239. oiAfoyETAkACQCAhICWZZEEBcw0AIBogGCAGoiAeIAWiRAAAAAAAAPA/oKAiBKIgFJqjIQcMAQsg
  4240. GiAYIAiiIB4gB6JEAAAAAAAA8D+goCIEoiAcoyEHICUhFAsgGCAdmiABoiATIAKioSIXIAeioiAd
  4241. IBsgBKIgFKMiFKIgHiATIAeaIAGiIBQgAqKhIhyioqCgIBMgB6KhIBggHSAcoqKhIB4gFyAUoqKh
  4242. mUSN7bWg98awPmMNACALIA2hIhsgECAOoSIaoiAWIA8gDaEiH6KhIiCZRI3ttaD3xrA+Yw0AIBEh
  4243. BCARIQIgESEGIBEhDiARIQEgESEDIBEhBSARIQggGyAVIBmgIhWiIBYgCSALoSANIA+hoCIZoqFE
  4244. AAAAAAAA8D8gIKMiFqIiDSAMIAqhIBogGaIgHyAVoqEgFqIiFiAMoqAiDCAJoqIgCyAJoSAWIAui
  4245. oCILIBIgDSAQoqAiEKIgFiAPIAmhIA0gD6KgIg8gCqKioKAgDyAMoqEgDSALIAqioqEgFiAQIAmi
  4246. oqGZRI3ttaD3xrA+Yw0BIBYgF6IgDSAcoqBEAAAAAAAA8D+gIQUgGCAWIBOiIA0gFKKgoCEDIB4g
  4247. FiAdoiANIAeioKAhASAMIBeiIBAgHKKgIAqgIQ4gGCAKoiAMIBOiIBAgFKKgoCEGIB4gCqIgDCAd
  4248. oiAQIAeioKAhAiALIBeiIA8gHKKgIAmgIQQgGCAJoiALIBOiIA8gFKKgoCERIB4gCaIgCyAdoiAP
  4249. IAeioKAhCAwBCyARIQQgESECIBEhBiARIQ4gESEBIBEhAyARIQUgESEICyAAKAIAIicgCLY4AgAg
  4250. JyAAQRRqKAIAIihBAnRqIBG2OAIAICcgKEEDdGogBLY4AgAgJyAAKAIQIgBBAnRqIAK2OAIAICcg
  4251. ACAoaiIpQQJ0aiAGtjgCACAnIAAgKEEBdGpBAnRqIA62OAIAICcgAEEDdGogAbY4AgAgJyAoIABB
  4252. AXRqQQJ0aiADtjgCACAnIClBA3RqIAW2OAIAC7oHAhZ/Cn0CQAJAIAAoAghBA0cNACAAKAIMQQNH
  4253. DQAgASgCCEECRw0AIAEoAgwiA0EESA0AIAIoAghBAkcNACACKAIMIANGDQELQbKTgIAAEICAgIAA
  4254. IAEoAgwhAwsgA0EBdCIEQQgQkoCAgAAhBSAEQQEQkoCAgAAhBkEIQQEQkoCAgAAhBwJAIANBAUgN
  4255. ACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiELIARBFGwgCWohDCAEQRhsIg0g
  4256. CWohDiAEQRxsIg8gCWohECACKAIQQQJ0IREgASgCEEECdCESIAhBA3QhCCAGKAIQIglBA3QhEyAJ
  4257. QQJ0IRQgAkEUaigCAEECdCEVIAFBFGooAgBBAnQhFiAEQQN0IRcgBEECdCEYIAYoAgAhCSAFKAIA
  4258. IQQgAigCACECIAEoAgAhAQNAIAIgEWoqAgAhGSABIBJqKgIAIRogAioCACEbIAQgASoCACIcOAIA
  4259. IAQgGGogGjgCACAEIBdqQYCAgPwDNgIAIAQgCmogHDgCACAEIAtqIBo4AgAgBCAMakGAgID8AzYC
  4260. ACAEIA1qIBsgHIwiHJQ4AgAgBCAOaiAZIByUOAIAIAQgD2ogGyAajCIalDgCACAEIBBqIBkgGpQ4
  4261. AgAgCSAbOAIAIAkgFGogGTgCACACIBVqIQIgASAWaiEBIAQgCGohBCAJIBNqIQkgA0F/aiIDDQAL
  4262. CyAHIAUgBkEDELCAgIAAGgJAAkAgBygCACIEKgIAIhkgBCAHKAIQIglBBHRqKgIAIhqUIAQgCUEC
  4263. dGoqAgAiGyAEIAlBFGxqKgIAIhyUIAQgCUEYbGoqAgAiHZSSIAQgCUEDdGoqAgAiHiAEIAlBDGxq
  4264. KgIAIh+UIAQgCUEcbGoqAgAiIJSSIBsgH5STIBkgHJQgIJSTIB4gGpQgHZSTIiEQg4CAgAANAEMA
  4265. AIA/ISIgIYu7RI3ttaD3xrA+Y0EBcw0BC0EAKgKAiICAACIZIRsgGSEeIBkhHyAZIRogGSEcIBkh
  4266. HSAZISAgGSEiCyAAKAIAIgQgGTgCACAEIABBFGooAgAiCUECdGogGzgCACAEIAlBA3RqIB44AgAg
  4267. BCAAKAIQIgJBAnRqIB84AgAgBCACIAlqIgFBAnRqIBo4AgAgBCACIAlBAXRqQQJ0aiAcOAIAIAQg
  4268. AkEDdGogHTgCACAEIAkgAkEBdGpBAnRqICA4AgAgBCABQQN0aiAiOAIAIAcQl4CAgAAaIAYQl4CA
  4269. gAAaIAUQl4CAgAAaIAALnwgKAX8BfQF/An0Bfwp9AX8BfQN/AX0CQAJAIAAoAghBA0cNACAAKAIM
  4270. QQNHDQAgASgCCEECRw0AIAEoAgxBBEcNACACKAIIQQJHDQAgAigCDEEERg0BC0HZk4CAABCAgICA
  4271. AAsgACABKAIAIgMqAgAiBCAEIAMgAUEUaigCACIFQQJ0aioCACIGkiADIAVBA3RqKgIAIgeSIAMg
  4272. BUEDbCIIQQJ0aioCACIJkkMAAIA+lCIKkyIEQwAAAEEgAyAIIAEoAhAiAWpBAnRqKgIAIgsgCyAD
  4273. IAFBAnRqKgIAIgwgAyAFIAFqQQJ0aioCACINkiADIAVBAXQgAWpBAnRqKgIAIg6SkkMAAIA+lCIP
  4274. kyILIAuUIAkgCpMiCSAJlCAOIA+TIg4gDpQgByAKkyIHIAeUIA0gD5MiDSANlCAGIAqTIgYgBpQg
  4275. BCAElCAMIA+TIgwgDJSSkpKSkpKSlZEiBJS7IAwgBJS7IAYgBJS7IA0gBJS7IAcgBJS7IA4gBJS7
  4276. IAkgBJS7IAsgBJS7IAIoAgAiAyoCACILIAsgAyACQRRqKAIAIgVBAnRqKgIAIhCSIAMgBUEDdGoq
  4277. AgAiDJIgAyAFQQNsIghBAnRqKgIAIg2SQwAAgD6UIgmTIgtDAAAAQSADIAggAigCECIBakECdGoq
  4278. AgAiDiAOIAMgAUECdGoqAgAiESADIAUgAWpBAnRqKgIAIhKSIAMgBUEBdCABakECdGoqAgAiBpKS
  4279. QwAAgD6UIg6TIgcgB5QgDSAJkyINIA2UIAYgDpMiBiAGlCAMIAmTIgwgDJQgEiAOkyISIBKUIBAg
  4280. CZMiECAQlCALIAuUIBEgDpMiESARlJKSkpKSkpKVkSILlLsgESALlLsgECALlLsgEiALlLsgDCAL
  4281. lLsgBiALlLsgDSALlLsgByALlLsQtYCAgAAgACgCACIDIABBFGooAgAiBUEBdCICIAAoAhAiAUEB
  4282. dCIIakECdGoqAgAhECADIAggBWpBAnRqIggqAgAhByADIAIgAWpBAnRqIgIqAgAhESADIAVBA3Rq
  4283. IhMqAgAhFCADIAUgAWoiFUECdGoiFioCACEGIAMgBUECdGoiBSoCACEMIAMgAUECdGoiFyoCACES
  4284. IAMgBCAJIAMgAUEDdGoiASoCACINlCADKgIAIhhDAACAPyALlSILlJKUOAIAIBcgBCAOIA2UIBIg
  4285. C5SSlDgCACABIAQgDZQ4AgAgBSAEIAkgB5QgDCALlJKUOAIAIBYgBCAOIAeUIAYgC5SSlDgCACAI
  4286. IAQgB5Q4AgAgEyAUIAQgCiAYlCAPIAyUkpSTIAuUIAkgECAEIAogDZQgDyAHlJKUkyIHlJI4AgAg
  4287. AiARIAQgCiASlCAPIAaUkpSTIAuUIA4gB5SSOAIAIAMgFUEDdGogBzgCACAAC5sCAQZ/AkACQCAA
  4288. KAIIQQNHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBBEgNACACKAIIQQJHDQAgAigCDCAD
  4289. Rg0BC0GAlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4290. ACEGQQNBAxCSgICAACEHQQNBAxCSgICAACEIIAQgASAGQQNBAxCSgICAACIDEMGAgIAAIAUgAiAD
  4291. IAcQwYCAgAAgAyAIIAQgBRC2gICAACIBIAYQp4CAgAAaIAAgByADEKeAgIAAGiADEJeAgIAAGiAB
  4292. EJeAgIAAGiAHEJeAgIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC/kFAhZ/Bn0CQAJA
  4293. IAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEoAgwiA0EDSA0AIAIoAghBAkcNACACKAIM
  4294. IANGDQELQaeUgIAAEICAgIAAIAEoAgwhAwsgA0EBdCIEQQYQkoCAgAAhBSAEQQEQkoCAgAAhBkEG
  4295. QQEQkoCAgAAhBwJAIANBAUgNACAFQRRqKAIAIgRBDGwgBSgCECIIQQJ0IglqIQogBEEEdCAJaiEL
  4296. IARBFGwgCWohDCACKAIQQQJ0IQ0gASgCEEECdCEOIAhBA3QhDyAGKAIQIglBA3QhECAJQQJ0IREg
  4297. AkEUaigCAEECdCESIAFBFGooAgBBAnQhEyAEQQN0IRQgBEECdCEVIAYoAgAhCSAFKAIAIQQgAigC
  4298. ACECIAEoAgAhAQNAIAIgDWooAgAhFiABIA5qKAIAIQggAigCACEXIAQgASgCACIYNgIAIAQgFWog
  4299. CDYCACAEIBRqQYCAgPwDNgIAIAQgCmogGDYCACAEIAtqIAg2AgAgBCAMakGAgID8AzYCACAJIBc2
  4300. AgAgCSARaiAWNgIAIAIgEmohAiABIBNqIQEgBCAPaiEEIAkgEGohCSADQX9qIgMNAAsLIAcgBSAG
  4301. QQMQsICAgAAaAkACQCAHKAIAIgQqAgAiGSAEIAcoAhAiCUECdGoqAgAiGpIgBCAJQQN0aioCACIb
  4302. kiAEIAlBDGxqKgIAIhySIAQgCUEEdGoqAgAiHZIgBCAJQRRsaioCACIekhCDgICAAA0AIBkgHZQg
  4303. GiAclJOLu0SN7bWg98awPmNBAXMNAQtBACoCgIiAgAAiGSEaIBkhGyAZIRwgGSEdIBkhHgsgACgC
  4304. ACIEIBk4AgAgBCAAQRRqKAIAIglBAnRqIBo4AgAgBCAJQQN0aiAbOAIAIAQgACgCECICQQJ0aiAc
  4305. OAIAIAQgAiAJakECdGogHTgCACAEIAIgCUEBdGpBAnRqIB44AgAgBxCXgICAABogBhCXgICAABog
  4306. BRCXgICAABogAAvNBQMBfAJ/FXwCQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgASgCCEECRw0AIAEo
  4307. AgxBA0cNACACKAIIQQJHDQAgAigCDEEDRg0BC0HKlICAABCAgICAAAtBACoCgIiAgAC7IQMCQAJA
  4308. IAEoAgAiBCABKAIQIgVBAnRqKgIAuyIGIAQgAUEUaigCACIBIAVqQQJ0aioCALsiB6EiCCAEIAFB
  4309. A3RqKgIAuyIJoiAHIAQgAUEBdCAFakECdGoqAgC7IgqhIgsgBCoCALsiDKIgCiAGoSINIAQgAUEC
  4310. dGoqAgC7Ig6ioKAiD5lEje21oPfGsD5jDQAgAigCACIEIAIoAhAiBUECdGoqAgC7IhAgBCACQRRq
  4311. KAIAIgEgBWpBAnRqKgIAuyIRoSAEIAFBA3RqKgIAuyISoiARIAQgAUEBdCAFakECdGoqAgC7IhOh
  4312. IAQqAgC7IhSiIBMgEKEgBCABQQJ0aioCALsiFaKgoJlEje21oPfGsD5jDQBEAAAAAAAA8D8gD6Mi
  4313. FiALIBSiIA0gFaKgIAggEqKgoiIPIBYgCSAOoSIXIBCiIAwgCaEiGCARoqAgDiAMoSIZIBOioKIi
  4314. GqIgFiAXIBSiIBggFaKgIBkgEqKgoiIXIBYgCyAQoiANIBGioCAIIBOioKIiCKKhmUSN7bWg98aw
  4315. PmNBAXNFDQAgFiAOIAqiIAcgCaKhIgMgEKIgBiAJoiAMIAqioSIKIBGioCAMIAeiIAYgDqKhIgcg
  4316. E6KgoiEGIBYgAyAUoiAKIBWioCAHIBKioKIhAwwBCyADIQ8gAyEXIAMhCCADIRogAyEGCyAAKAIA
  4317. IgQgD7Y4AgAgBCAAQRRqKAIAIgFBAnRqIBe2OAIAIAQgAUEDdGogA7Y4AgAgBCAAKAIQIgVBAnRq
  4318. IAi2OAIAIAQgBSABakECdGogGrY4AgAgBCAFIAFBAXRqQQJ0aiAGtjgCACAAC4EDAQl/AkACQCAA
  4319. KAIIQQJHDQAgACgCDEEDRw0AIAEoAghBAkcNACABKAIMIgNBA0gNACACKAIIQQJHDQAgAigCDCAD
  4320. Rg0BC0HtlICAABCAgICAACABKAIMIQMLQQIgAxCSgICAACEEQQIgAxCSgICAACEFQQNBAxCSgICA
  4321. ACEGQQNBAxCSgICAACEHQQNBAxCUgICAACEIEJaAgIAAIAhBAEEBQQBBAhCagICAACEJQQNBAxCS
  4322. gICAACEDQQNBAxCSgICAACEKEJaAgIAAIApBAEEBQQBBAhCagICAACELIAQgASAGIAMQwYCAgAAg
  4323. BSACIAMgBxDBgICAACAJIAQgBRC5gICAACEBIAMgCCAGEKeAgIAAGiAKIAcgAxCngICAABogACAL
  4324. EJWAgIAAGiALEJeAgIAAGiAKEJeAgIAAGiADEJeAgIAAGiABEJeAgIAAGiAIEJeAgIAAGiAHEJeA
  4325. gIAAGiAGEJeAgIAAGiAFEJeAgIAAGiAEEJeAgIAAGiAAC5kUAhx/DX0jgICAgABBEGsiBySAgICA
  4326. AAJAAkAgACgCCEEDRw0AIAAoAgxBA0cNACACKAIIQQJHDQAgAigCDCIIQQRIDQAgAygCCEECRw0A
  4327. IAMoAgwgCEcNAAJAIAFFDQAgASgCCEEBRw0BIAEoAgwgCEcNAQsgBEEBSA0AIAVBAUgNACAGQwAA
  4328. AABgDQELQZCVgIAAEICAgIAAIAIoAgwhCAsCQCABRQ0AIAFDAAAAABCbgICAABoLIAhBAnQiCUGy
  4329. lYCAABCFgICAACEKIAlB0ZWAgAAQhYCAgAAgCBCNgICAACILIAhBBBCOgICAACAIIARBAnQiDCAI
  4330. b2sgDGoiDUECdEHwlYCAABCFgICAACEOAkAgDUEBSA0AQQAhDyAIQQFIIRAgDiERA0ACQCAQDQBB
  4331. ACEMIBEhEgNAIBIgDDYCACASQQRqIRIgCCAMQQFqIgxHDQALCyAOIA9BAnRqIAhBBBCOgICAACAR
  4332. IAlqIREgDyAIaiIPIA1IDQALC0ECQQQQkoCAgAAhE0ECQQQQkoCAgAAhFCAEQQN0QY+WgIAAEIWA
  4333. gIAAIRUgBCEWAkAgBEEBSA0AIBUhFyAOIQkgBCEYIAQhFgNAIAcgCSgCACIZNgIAIAcgCUEEaigC
  4334. ACIaNgIEIAcgCUEIaigCACIbNgIIIAcgCUEMaigCADYCDCAUKAIUIQ0gEygCFCEQIAMoAhAhHCAU
  4335. KAIQIR0gFCgCACEMIAMoAgAhEiADKAIUIR4gAigCECEfIBMoAhAhICATKAIAIg8gAigCACIRIBkg
  4336. AigCFCIhbCIiQQJ0aigCADYCACAPICBBAnRqIBEgHyAiakECdGooAgA2AgAgDCASIB4gGWwiGUEC
  4337. dGooAgA2AgAgDCAdQQJ0aiASIBwgGWpBAnRqKAIANgIAIA8gEEECdGogESAhIBpsIhlBAnRqKAIA
  4338. NgIAIA8gICAQakECdGogESAfIBlqQQJ0aigCADYCACAMIA1BAnRqIBIgHiAabCIZQQJ0aigCADYC
  4339. ACAMIB0gDWpBAnRqIBIgHCAZakECdGooAgA2AgAgDyAQQQN0aiARICEgG2wiGUECdGooAgA2AgAg
  4340. DyAgIBBBAXRqQQJ0aiARIB8gGWpBAnRqKAIANgIAIAwgDUEDdGogEiAeIBtsIhlBAnRqKAIANgIA
  4341. IAwgHSANQQF0akECdGogEiAcIBlqQQJ0aigCADYCACAPIBBBA2wiEEECdGogESAhIAcoAgwiGWwi
  4342. IUECdGooAgA2AgAgDyAgIBBqQQJ0aiARIB8gIWpBAnRqKAIANgIAIAwgDUEDbCIPQQJ0aiASIB4g
  4343. GWwiEUECdGooAgA2AgAgDCAdIA9qQQJ0aiASIBwgEWpBAnRqKAIANgIAQQNBAxCSgICAACEMIBdB
  4344. BGoiEkEANgIAIBcgDDYCACAMIBMgFBC0gICAABoCQCAXKAIAKAIAKgIAEIOAgIAARQ0AIBJBfzYC
  4345. ACAWQX9qIRYLIBdBCGohFyAJQRBqIQkgGEF/aiIYDQALCwJAAkAgFg0AIABBACoCgIiAgAAQm4CA
  4346. gAAaDAELIAYgBpQhI0EAIRcgFSAEQQhBhICAgABBABCLgICAABoCQAJAIAhBAUgNAEEAIRwDQCAc
  4347. IhJBAWoiHCAFbyEMAkAgFkECSA0AIAwNACAVIBZBCEGEgICAAEEAEIuAgIAAGiAWQQF2IRYLAkAg
  4348. FkEBRw0AQQAhFwwDCwJAIBZBAUgNACADKAIAIgwgAygCFCALIBJBAnRqKAIAIhJsIg9BAnRqKgIA
  4349. ISQgAigCACIRIAIoAhQgEmwiEkECdGoqAgAhBiAMIA8gAygCEGpBAnRqKgIAISUgESASIAIoAhBq
  4350. QQJ0aioCACEmIBUhESAWIQkDQCARQQRqIgwgDCgCACARKAIAIg8oAgAiDCAPQRRqKAIAIhJBAXQi
  4351. DSAPKAIQIg9qQQJ0aioCACAGIAwgD0ECdGoqAgCUICYgDCASIA9qQQJ0aioCAJSSkiAMIA0gD0EB
  4352. dCIQakECdGoqAgAgBiAMIA9BA3RqKgIAlCAmIAwgECASakECdGoqAgCUkpIiJ5UgJZMiKCAolCAM
  4353. IBJBA3RqKgIAIAYgDCoCAJQgJiAMIBJBAnRqKgIAlJKSICeVICSTIicgJ5SSICNfajYCACARQQhq
  4354. IREgCUF/aiIJDQALCyAcIAhHDQALCyAWQQJIDQAgFUEMaiEMQQAhF0EBIRIDQCASIBcgDCgCACAV
  4355. IBdBA3RqKAIEShshFyAMQQhqIQwgFiASQQFqIhJHDQALCwJAIAhBAUgNACAVIBdBA3RqKAIAIg8o
  4356. AgAiDCAPKAIQIhJBA3RqKgIAISQgDCASQQJ0aioCACElIAwgD0EUaigCACIPQQN0aioCACEpIAwg
  4357. D0ECdGoqAgAhKiAMIBJBAXQiESAPakECdGoqAgAhKyAMIA8gEmpBAnRqKgIAISwgDCAPQQF0Ig8g
  4358. EWpBAnRqKgIAIS0gDCAPIBJqQQJ0aioCACEuIAwqAgAhLyADKAIAIQ8gAigCACERQQAhEkEAIQwD
  4359. QAJAICkgLyARIAIoAhQgDGwiCUECdGoqAgAiBpQgKiARIAkgAigCEGpBAnRqKgIAIiaUkpIgLSAk
  4360. IAaUICsgJpSSkiInlSAPIAMoAhQgDGwiCUECdGoqAgCTIiggKJQgLiAlIAaUICwgJpSSkiAnlSAP
  4361. IAkgAygCEGpBAnRqKgIAkyIGIAaUkiAjX0EBcw0AIAogEkECdGogDDYCACASQQFqIRIgAUUNACAB
  4362. KAIAIAEoAhQgDGxBAnRqQYCAgPwDNgIACyAIIAxBAWoiDEcNAAsgEkEDTA0AQQIgEhCSgICAACEW
  4363. QQIgEhCSgICAACIZKAIQQQJ0IRcgFkEUaigCAEECdCEcIBYoAhBBAnQhHSAZQRRqKAIAQQJ0IR4g
  4364. GSgCACEMIANBFGooAgAhHyAWKAIAIQ8gAkEUaigCACEgIAMoAhAhISADKAIAIQggAigCECEDIAIo
  4365. AgAhCSAKIREDQCAPIAkgICARKAIAIg1sIhBBAnRqKAIANgIAIA8gHWogCSADIBBqQQJ0aigCADYC
  4366. ACAMIAggHyANbCINQQJ0aigCADYCACAMIBdqIAggISANakECdGooAgA2AgAgDCAeaiEMIA8gHGoh
  4367. DyARQQRqIREgEkF/aiISDQALIAAgFiAZELiAgIAAGiAZEJeAgIAAGiAWEJeAgIAAGgwBCyAAQQAq
  4368. AoCIgIAAEJuAgIAAGgsCQCAEQQFIDQAgBEEBaiESIARBA3QgFWpBeGohDANAIAwoAgAQl4CAgAAa
  4369. IAxBeGohDCASQX9qIhJBAUoNAAsLIBVBr5aAgAAQh4CAgAAaIBQQl4CAgAAaIBMQl4CAgAAaIA5B
  4370. zZaAgAAQh4CAgAAaIAtB65aAgAAQh4CAgAAaIApBiZeAgAAQh4CAgAAaIAdBEGokgICAgAAgAAsN
  4371. ACABKAIEIAAoAgRrC8gRAhh/CX0CQAJAIAAoAghBAkcNACAAKAIMQQNHDQAgAigCCEECRw0AIAIo
  4372. AgwiB0EDSA0AIAMoAghBAkcNACADKAIMIAdHDQACQCABRQ0AIAEoAghBAUcNASABKAIMIAdHDQEL
  4373. IARBAUgNACAFQQFIDQAgBkMAAAAAYA0BC0Gnl4CAABCAgICAACACKAIMIQcLAkAgAUUNACABQwAA
  4374. AAAQm4CAgAAaCyAHQQJ0IghBypeAgAAQhYCAgAAhCSAIQeqXgIAAEIWAgIAAIAcQjYCAgAAiCiAH
  4375. QQQQjoCAgAAgByAEQQNsIgsgB29rIAtqIgxBAnRBipiAgAAQhYCAgAAhDQJAIAxBAUgNAEEAIQ4g
  4376. B0EBSCEPIA0hEANAAkAgDw0AQQAhCyAQIREDQCARIAs2AgAgEUEEaiERIAcgC0EBaiILRw0ACwsg
  4377. DSAOQQJ0aiAHQQQQjoCAgAAgECAIaiEQIA4gB2oiDiAMSA0ACwtBAkEDEJKAgIAAIQ9BAkEDEJKA
  4378. gIAAIRIgBEEDdEGqmICAABCFgICAACETIAQhFAJAIARBAUgNACATIQggDSEMIAQhFSAEIRQDQCAP
  4379. KAIAIgsgAigCACIRIAIoAhQiFiAMKAIAIhdsIg5BAnRqKAIANgIAIAsgDygCECIYQQJ0aiARIAIo
  4380. AhAiGSAOakECdGooAgA2AgAgEigCACIOIAMoAgAiECAXIAMoAhQiGmwiF0ECdGooAgA2AgAgDiAS
  4381. KAIQIhtBAnRqIBAgAygCECIcIBdqQQJ0aigCADYCACALIA8oAhQiF0ECdGogESAWIAxBBGooAgAi
  4382. HWwiHkECdGooAgA2AgAgCyAYIBdqQQJ0aiARIBkgHmpBAnRqKAIANgIAIA4gEigCFCIeQQJ0aiAQ
  4383. IBogHWwiHUECdGooAgA2AgAgDiAbIB5qQQJ0aiAQIBwgHWpBAnRqKAIANgIAIAsgF0EDdGogESAW
  4384. IAxBCGooAgAiHWwiFkECdGooAgA2AgAgCyAYIBdBAXRqQQJ0aiARIBkgFmpBAnRqKAIANgIAIA4g
  4385. HkEDdGogECAaIB1sIgtBAnRqKAIANgIAIA4gGyAeQQF0akECdGogECAcIAtqQQJ0aigCADYCAEEC
  4386. QQMQkoCAgAAhCyAIQQRqIhFBADYCACAIIAs2AgAgCyAPIBIQuoCAgAAaAkAgCCgCACgCACoCABCD
  4387. gICAAEUNACARQX82AgAgFEF/aiEUCyAIQQhqIQggDEEMaiEMIBVBf2oiFQ0ACwsCQAJAIBQNACAA
  4388. QQAqAoCIgIAAEJuAgIAAGgwBCyAGIAaUIR9BACEMIBMgBEEIQYSAgIAAQQAQi4CAgAAaAkACQCAH
  4389. QQFIDQBBACEXA0AgFyIRQQFqIhcgBW8hCwJAIBRBAkgNACALDQAgEyAUQQhBhICAgABBABCLgICA
  4390. ABogFEEBdiEUCwJAIBRBAUcNAEEAIQwMAwsCQCAUQQFIDQAgAygCACILIAMoAhQgCiARQQJ0aigC
  4391. ACIRbCIOQQJ0aioCACEgIAIoAgAiECACKAIUIBFsIhFBAnRqKgIAIQYgCyAOIAMoAhBqQQJ0aioC
  4392. ACEhIBAgESACKAIQakECdGoqAgAhIiATIREgFCEIA0AgEUEEaiILIAsoAgAgESgCACIQKAIAIgsg
  4393. EEEUaigCACIOQQN0aioCACAGIAsqAgCUICIgCyAOQQJ0aioCAJSSkiAgkyIjICOUIAsgDkEBdCAQ
  4394. KAIQIhBqQQJ0aioCACAGIAsgEEECdGoqAgCUICIgCyAOIBBqQQJ0aioCAJSSkiAhkyIjICOUkiAf
  4395. X2o2AgAgEUEIaiERIAhBf2oiCA0ACwsgFyAHRw0ACwsgFEECSA0AIBNBDGohC0EAIQxBASERA0Ag
  4396. ESAMIAsoAgAgEyAMQQN0aigCBEobIQwgC0EIaiELIBQgEUEBaiIRRw0ACwsCQCAHQQFIDQAgEyAM
  4397. QQN0aigCACIRKAIAIgsgESgCECIOQQJ0aioCACEgIAsgEUEUaigCACIRQQN0aioCACEhIAsgEUEC
  4398. dGoqAgAhJCALIBEgDmpBAnRqKgIAISUgCyARQQF0IA5qQQJ0aioCACEmIAsqAgAhJyADKAIAIQ4g
  4399. AigCACEQQQAhEUEAIQsDQAJAICEgJyAQIAIoAhQgC2wiCEECdGoqAgAiBpQgJCAQIAggAigCEGpB
  4400. AnRqKgIAIiKUkpIgDiADKAIUIAtsIghBAnRqKgIAkyIjICOUICYgICAGlCAlICKUkpIgDiAIIAMo
  4401. AhBqQQJ0aioCAJMiBiAGlJIgH19BAXMNACAJIBFBAnRqIAs2AgAgEUEBaiERIAFFDQAgASgCACAB
  4402. KAIUIAtsQQJ0akGAgID8AzYCAAsgByALQQFqIgtHDQALIBFBAkwNAEECIBEQkoCAgAAhG0ECIBEQ
  4403. koCAgAAiHCgCEEECdCEXIBtBFGooAgBBAnQhHiAbKAIQQQJ0IRQgHEEUaigCAEECdCEWIBwoAgAh
  4404. CyADQRRqKAIAIRggGygCACEOIAJBFGooAgAhGSADKAIQIRogAygCACEQIAIoAhAhAyACKAIAIQgg
  4405. CSEHA0AgDiAIIBkgBygCACIMbCICQQJ0aigCADYCACAOIBRqIAggAyACakECdGooAgA2AgAgCyAQ
  4406. IBggDGwiDEECdGooAgA2AgAgCyAXaiAQIBogDGpBAnRqKAIANgIAIAsgFmohCyAOIB5qIQ4gB0EE
  4407. aiEHIBFBf2oiEQ0ACyAAIBsgHBC7gICAABogHBCXgICAABogGxCXgICAABoMAQsgAEEAKgKAiICA
  4408. ABCbgICAABoLAkAgBEEBSA0AIARBAWohESAEQQN0IBNqQXhqIQsDQCALKAIAEJeAgIAAGiALQXhq
  4409. IQsgEUF/aiIRQQFKDQALCyATQcqYgIAAEIeAgIAAGiASEJeAgIAAGiAPEJeAgIAAGiANQeiYgIAA
  4410. EIeAgIAAGiAKQYaZgIAAEIeAgIAAGiAJQaSZgIAAEIeAgIAAGiAAC+IDCAN/An0BfwN9AX8EfQF/
  4411. A30CQAJAIAAoAghBAkcNACABKAIIQQJHDQAgACgCDCIDIAEoAgxHDQAgAigCCEEDRw0AIAIoAgxB
  4412. A0YNAQtBwpmAgAAQgICAgAAgASgCDCEDCwJAIAIoAgAiBCACKAIQIgVBA3RqKgIAIgYgBCACQRRq
  4413. KAIAIgJBAnRqKgIAIgcgBCACQQF0IgggBWpBAnRqKgIAIgmUIAQgAkEDdGoqAgAiCiAEIAIgBWpB
  4414. AnRqKgIAIguUk5QgBCAFQQF0IgwgAmpBAnRqKgIAIg0gCiAEIAVBAnRqKgIAIg6UIAQqAgAiDyAJ
  4415. lJOUkiAPIAuUIAcgDpSTIAQgCCAMakECdGoqAgAiEJSSi7tEje21oPfGsD5jDQACQCADQQFIDQAg
  4416. ACgCEEECdCECIAEoAhBBAnQhCCAAQRRqKAIAQQJ0IQwgAUEUaigCAEECdCERIAAoAgAhBCABKAIA
  4417. IQUDQCAEIAogDyAFKgIAIhKUIAcgBSAIaioCACITlJKSIBAgBiASlCANIBOUkpIiFJU4AgAgBCAC
  4418. aiAJIA4gEpQgCyATlJKSIBSVOAIAIAQgDGohBCAFIBFqIQUgA0F/aiIDDQALCyAADwsgAEEAKgKA
  4419. iICAABCbgICAAAvVAgQDfwZ9An8CfQJAAkAgACgCCEECRw0AIAEoAghBAkcNACAAKAIMIgMgASgC
  4420. DEcNACACKAIIQQJHDQAgAigCDEEDRg0BC0HnmYCAABCAgICAACABKAIMIQMLAkAgA0EBSA0AIAIo
  4421. AgAiBCACKAIQIgVBAnRqKgIAIQYgBCACQRRqKAIAIgJBA3RqKgIAIQcgBCACQQJ0aioCACEIIAQg
  4422. AiAFakECdGoqAgAhCSAEIAJBAXQgBWpBAnRqKgIAIQogBCoCACELIAAoAhBBAnQhAiABKAIQQQJ0
  4423. IQUgAEEUaigCAEECdCEMIAFBFGooAgBBAnQhDSAAKAIAIQQgASgCACEBA0AgBCAHIAsgASoCACIO
  4424. lCAIIAEgBWoqAgAiD5SSkjgCACAEIAJqIAogBiAOlCAJIA+UkpI4AgAgBCAMaiEEIAEgDWohASAD
  4425. QX9qIgMNAAsLIAAL+AcHAX8BfQF/A30DfwF9An8CQAJAAkAgASgCCEECRw0AIAEoAgwiBEEBSA0A
  4426. IAAoAghBAkcNACAAKAIMIARHDQAgAigCCEEDRw0AIAIoAgxBA0cNACADKAIIQQNHDQAgAygCDEED
  4427. Rw0AIASyIQUMAQtBjJqAgAAQgICAgABBACEGIAEoAgwiBLIhBSAEQQBKDQBDAAAAACEHQwAAAAAg
  4428. BZUiCCEJDAELIAEoAhBBAnQhCiABQRRqKAIAQQJ0IQsgASgCACEGQwAAAAAhByAEIQxDAAAAACEN
  4429. A0AgByAGKgIAkiEHIA0gBiAKaioCAJIhDSAGIAtqIQYgDEF/aiIMDQALIA0gBZUhCCAHIAWVIQkg
  4430. ASgCEEECdCEKIAFBFGooAgBBAnQhCyABKAIAIQZDAAAAACEHIAQhDANAIAcgBioCACAJkyINIA2U
  4431. IAYgCmoqAgAgCJMiDSANlJKSIQcgBiALaiEGIAxBf2oiDA0AC0EBIQYLAkAgByAFlZEiB4u7RI3t
  4432. taD3xrA+Y0UNACACEJyAgIAAGiADEJyAgIAAGiADKAIAIgZBgICA/AM2AgAgAigCACIMQYCAgPwD
  4433. NgIAIAYgA0EUaigCACADKAIQaiIKQQJ0akGAgID8AzYCACAMIAJBFGooAgAgAigCEGoiC0ECdGpB
  4434. gICA/AM2AgAgBiAKQQN0akGAgID8AzYCACAMIAtBA3RqQYCAgPwDNgIAIAAgARCVgICAABoPCyAH
  4435. Q/MEtT+VIQ1D8wS1PyAHlSEHAkAgBkUNACAAKAIQQQJ0IQogASgCEEECdCELIABBFGooAgBBAnQh
  4436. DiABQRRqKAIAQQJ0IQ8gACgCACEGIAEoAgAhDANAIAYgByAMKgIAIAmTlDgCACAGIApqIAcgDCAL
  4437. aioCACAIk5Q4AgAgBiAOaiEGIAwgD2ohDCAEQX9qIgQNAAsLIAIoAgAiBiAHOAIAIAYgAkEUaigC
  4438. ACIMQQJ0akEANgIAIAYgDEEDdGogCSAHjCIFlDgCACAGIAIoAhAiCkECdGpBADYCACAGIAogDGoi
  4439. C0ECdGogBzgCACAGIAogDEEBdGpBAnRqIAggBZQ4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akEC
  4440. dGpBADYCACAGIAtBA3RqQYCAgPwDNgIAIAMoAgAiBiANOAIAIAYgA0EUaigCACIMQQJ0akEANgIA
  4441. IAYgDEEDdGogCTgCACAGIAMoAhAiCkECdGpBADYCACAGIAogDGoiC0ECdGogDTgCACAGIAogDEEB
  4442. dGpBAnRqIAg4AgAgBiAKQQN0akEANgIAIAYgDCAKQQF0akECdGpBADYCACAGIAtBA3RqQYCAgPwD
  4443. NgIACwv2EgMAQYAIC7ISAAD4f091dCBvZiBtZW1vcnkhAERvdWJsZSBmcmVlAEFzc2VydGlvbiBm
  4444. YWlsZWQgYXQgbWF0MzIuYzo2MQBPdXQgb2YgbWVtb3J5IGF0IG1hdDMyLmM6NjMAQXNzZXJ0aW9u
  4445. IGZhaWxlZCBhdCBtYXQzMi5jOjg0AE91dCBvZiBtZW1vcnkgYXQgbWF0MzIuYzo4NgBPdXQgb2Yg
  4446. bWVtb3J5IGF0IG1hdDMyLmM6ODkAT3V0IG9mIG1lbW9yeSBhdCBtYXQzMi5jOjEzNgAAAGANAAAB
  4447. AAAAAAAAAAAAAAABAAAAAQAAAAIAAABEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjE0OQBBc3NlcnRp
  4448. b24gZmFpbGVkIGF0IG1hdDMyLmM6MTg0AEFzc2VydGlvbiBmYWlsZWQgYXQgbWF0MzIuYzoxODgA
  4449. QXNzZXJ0aW9uIGZhaWxlZCBhdCBtYXQzMi5jOjI3NQBEb3VibGUgZnJlZSBhdCBtYXQzMi5jOjI5
  4450. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6MzYAQXNzZXJ0aW9uIGZhaWxlZCBh
  4451. dCBhcml0aG1ldGljMzIuYzo1OABBc3NlcnRpb24gZmFpbGVkIGF0IGFyaXRobWV0aWMzMi5jOjgw
  4452. AEFzc2VydGlvbiBmYWlsZWQgYXQgYXJpdGhtZXRpYzMyLmM6OTkAQXNzZXJ0aW9uIGZhaWxlZCBh
  4453. dCBhcml0aG1ldGljMzIuYzoxMjEAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzox
  4454. NDMAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoxNjgAQXNzZXJ0aW9uIGZhaWxl
  4455. ZCBhdCBhcml0aG1ldGljMzIuYzoxODkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIu
  4456. YzoyMTgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzoyNzEAQXNzZXJ0aW9uIGZh
  4457. aWxlZCBhdCBhcml0aG1ldGljMzIuYzozMjIAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGlj
  4458. MzIuYzozNTYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzozNzgAQXNzZXJ0aW9u
  4459. IGZhaWxlZCBhdCBhcml0aG1ldGljMzIuYzo0MjAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBhcml0aG1l
  4460. dGljMzIuYzo0MzYAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6MjYxAEFzc2VydGlvbiBmYWls
  4461. ZWQgYXQgcXIzMi5jOjI2NQBBc3NlcnRpb24gZmFpbGVkIGF0IHFyMzIuYzoyODYAQXNzZXJ0aW9u
  4462. IGZhaWxlZCBhdCBxcjMyLmM6MjkwAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjMyMQBBc3Nl
  4463. cnRpb24gZmFpbGVkIGF0IHFyMzIuYzozMjUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6Mzc5
  4464. AE91dCBvZiBtZW1vcnkgYXQgcXIzMi5jOjM2AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjY5
  4465. AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjczAEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5j
  4466. OjE4NABEb3VibGUgZnJlZSBhdCBxcjMyLmM6NTUAQXNzZXJ0aW9uIGZhaWxlZCBhdCBxcjMyLmM6
  4467. MTQ4AEFzc2VydGlvbiBmYWlsZWQgYXQgcXIzMi5jOjIyNABBc3NlcnRpb24gZmFpbGVkIGF0IHFy
  4468. MzIuYzoyMjgAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyNDQAQXNzZXJ0aW9u
  4469. IGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzoyODAAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3Jh
  4470. cGh5MzIuYzozNTkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBob21vZ3JhcGh5MzIuYzo0NDQAQXNzZXJ0
  4471. aW9uIGZhaWxlZCBhdCBhZmZpbmUzMi5jOjExOQBBc3NlcnRpb24gZmFpbGVkIGF0IGFmZmluZTMy
  4472. LmM6MTk2AEFzc2VydGlvbiBmYWlsZWQgYXQgYWZmaW5lMzIuYzoyMjkAQXNzZXJ0aW9uIGZhaWxl
  4473. ZCBhdCByYW5zYWMzMi5jOjcxAE91dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzo4NABPdXQgb2Yg
  4474. bWVtb3J5IGF0IHJhbnNhYzMyLmM6ODgAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjkzAE91
  4475. dCBvZiBtZW1vcnkgYXQgcmFuc2FjMzIuYzoxMDcARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoy
  4476. MzYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDMARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIu
  4477. YzoyNDYARG91YmxlIGZyZWUgYXQgcmFuc2FjMzIuYzoyNDkAQXNzZXJ0aW9uIGZhaWxlZCBhdCBy
  4478. YW5zYWMzMi5jOjI3NQBPdXQgb2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6Mjg4AE91dCBvZiBtZW1v
  4479. cnkgYXQgcmFuc2FjMzIuYzoyOTIAT3V0IG9mIG1lbW9yeSBhdCByYW5zYWMzMi5jOjI5NwBPdXQg
  4480. b2YgbWVtb3J5IGF0IHJhbnNhYzMyLmM6MzExAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDM2
  4481. AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQzAERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6
  4482. NDQ2AERvdWJsZSBmcmVlIGF0IHJhbnNhYzMyLmM6NDQ5AEFzc2VydGlvbiBmYWlsZWQgYXQgdHJh
  4483. bnNmb3JtMzIuYzozOQBBc3NlcnRpb24gZmFpbGVkIGF0IHRyYW5zZm9ybTMyLmM6NzcAQXNzZXJ0
  4484. aW9uIGZhaWxlZCBhdCB0cmFuc2Zvcm0zMi5jOjExNAAAQbQaCwwIAAAAUA0AAAEAAAAAQcAaCyQA
  4485. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
  4486. `
  4487. /***/ })
  4488. /******/ });
  4489. /************************************************************************/
  4490. /******/ // The module cache
  4491. /******/ var __webpack_module_cache__ = {};
  4492. /******/
  4493. /******/ // The require function
  4494. /******/ function __nested_webpack_require_314174__(moduleId) {
  4495. /******/ // Check if module is in cache
  4496. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  4497. /******/ if (cachedModule !== undefined) {
  4498. /******/ return cachedModule.exports;
  4499. /******/ }
  4500. /******/ // Create a new module (and put it into the cache)
  4501. /******/ var module = __webpack_module_cache__[moduleId] = {
  4502. /******/ // no module.id needed
  4503. /******/ // no module.loaded needed
  4504. /******/ exports: {}
  4505. /******/ };
  4506. /******/
  4507. /******/ // Execute the module function
  4508. /******/ __webpack_modules__[moduleId](module, module.exports, __nested_webpack_require_314174__);
  4509. /******/
  4510. /******/ // Return the exports of the module
  4511. /******/ return module.exports;
  4512. /******/ }
  4513. /******/
  4514. /************************************************************************/
  4515. /******/ /* webpack/runtime/define property getters */
  4516. /******/ (() => {
  4517. /******/ // define getter functions for harmony exports
  4518. /******/ __nested_webpack_require_314174__.d = (exports, definition) => {
  4519. /******/ for(var key in definition) {
  4520. /******/ if(__nested_webpack_require_314174__.o(definition, key) && !__nested_webpack_require_314174__.o(exports, key)) {
  4521. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  4522. /******/ }
  4523. /******/ }
  4524. /******/ };
  4525. /******/ })();
  4526. /******/
  4527. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  4528. /******/ (() => {
  4529. /******/ __nested_webpack_require_314174__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  4530. /******/ })();
  4531. /******/
  4532. /******/ /* webpack/runtime/make namespace object */
  4533. /******/ (() => {
  4534. /******/ // define __esModule on exports
  4535. /******/ __nested_webpack_require_314174__.r = (exports) => {
  4536. /******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
  4537. /******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
  4538. /******/ }
  4539. /******/ Object.defineProperty(exports, '__esModule', { value: true });
  4540. /******/ };
  4541. /******/ })();
  4542. /******/
  4543. /************************************************************************/
  4544. var __nested_webpack_exports__ = {};
  4545. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  4546. (() => {
  4547. "use strict";
  4548. // EXPORTS
  4549. __nested_webpack_require_314174__.d(__nested_webpack_exports__, {
  4550. "default": () => (/* binding */ Speedy)
  4551. });
  4552. // EXTERNAL MODULE: ./src/gpu/speedy-gl.js
  4553. var speedy_gl = __nested_webpack_require_314174__(1001);
  4554. // EXTERNAL MODULE: ./src/utils/utils.js
  4555. var utils = __nested_webpack_require_314174__(9037);
  4556. // EXTERNAL MODULE: ./src/core/settings.js
  4557. var settings = __nested_webpack_require_314174__(2199);
  4558. // EXTERNAL MODULE: ./src/core/speedy-promise.js
  4559. var speedy_promise = __nested_webpack_require_314174__(9192);
  4560. ;// CONCATENATED MODULE: ./src/utils/asap.js
  4561. /*
  4562. * speedy-vision.js
  4563. * GPU-accelerated Computer Vision for JavaScript
  4564. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4565. *
  4566. * Licensed under the Apache License, Version 2.0 (the "License");
  4567. * you may not use this file except in compliance with the License.
  4568. * You may obtain a copy of the License at
  4569. *
  4570. * http://www.apache.org/licenses/LICENSE-2.0
  4571. *
  4572. * Unless required by applicable law or agreed to in writing, software
  4573. * distributed under the License is distributed on an "AS IS" BASIS,
  4574. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4575. * See the License for the specific language governing permissions and
  4576. * limitations under the License.
  4577. *
  4578. * asap.js
  4579. * Schedule a function to run "as soon as possible"
  4580. */
  4581. /** callbacks */
  4582. const callbacks = /** @type {Function[]} */[];
  4583. /** arguments to be passed to the callbacks */
  4584. const args = /** @type {any[][]} */[];
  4585. /** asap key */
  4586. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  4587. // Register an event listener
  4588. window.addEventListener('message', event => {
  4589. if (event.source !== window || event.data !== ASAP_KEY) return;
  4590. event.stopPropagation();
  4591. if (callbacks.length == 0) return;
  4592. const fn = callbacks.pop();
  4593. const argArray = args.pop();
  4594. fn.apply(undefined, argArray);
  4595. }, true);
  4596. /**
  4597. * Schedule a function to run "as soon as possible"
  4598. * @param {Function} fn callback
  4599. * @param {any[]} params optional parameters
  4600. */
  4601. function asap(fn, ...params) {
  4602. callbacks.unshift(fn);
  4603. args.unshift(params);
  4604. window.postMessage(ASAP_KEY, '*');
  4605. }
  4606. // EXTERNAL MODULE: ./src/utils/errors.js
  4607. var utils_errors = __nested_webpack_require_314174__(8581);
  4608. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-reader.js
  4609. /*
  4610. * speedy-vision.js
  4611. * GPU-accelerated Computer Vision for JavaScript
  4612. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4613. *
  4614. * Licensed under the Apache License, Version 2.0 (the "License");
  4615. * you may not use this file except in compliance with the License.
  4616. * You may obtain a copy of the License at
  4617. *
  4618. * http://www.apache.org/licenses/LICENSE-2.0
  4619. *
  4620. * Unless required by applicable law or agreed to in writing, software
  4621. * distributed under the License is distributed on an "AS IS" BASIS,
  4622. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4623. * See the License for the specific language governing permissions and
  4624. * limitations under the License.
  4625. *
  4626. * speedy-texture-reader.js
  4627. * Reads data from textures
  4628. */
  4629. /** @type {number} number of PBOs; used to get a performance boost in gl.readPixels() */
  4630. const DEFAULT_NUMBER_OF_BUFFERS = 2;
  4631. /** @type {(fn: Function, ...args: any[]) => number} Run function fn on the "next frame" */
  4632. const runOnNextFrame = navigator.userAgent.includes('Firefox') ? (fn, ...args) => setTimeout(fn, 10, ...args) :
  4633. // RAF produces a warning on Firefox
  4634. (fn, ...args) => requestAnimationFrame(() => fn.apply(undefined, args)); // reduce battery usage
  4635. /**
  4636. * Reads data from textures
  4637. */
  4638. class SpeedyTextureReader {
  4639. /**
  4640. * Constructor
  4641. * @param {number} [numberOfBuffers]
  4642. */
  4643. constructor(numberOfBuffers = DEFAULT_NUMBER_OF_BUFFERS) {
  4644. utils/* Utils */.A.assert(numberOfBuffers > 0);
  4645. /** @type {boolean} is this object initialized? */
  4646. this._initialized = false;
  4647. /** @type {Uint8Array[]} pixel buffers for data transfers (each stores RGBA data) */
  4648. this._pixelBuffer = new Array(numberOfBuffers).fill(null).map(() => new Uint8Array(0));
  4649. /** @type {WebGLBuffer[]} Pixel Buffer Objects (PBOs) */
  4650. this._pbo = new Array(numberOfBuffers).fill(null);
  4651. /** @type {number} the index of the buffer that will be consumed in this frame */
  4652. this._consumerIndex = 0;
  4653. /** @type {number} the index of the buffer that will be produced next */
  4654. this._producerIndex = numberOfBuffers - 1;
  4655. /** @type {SpeedyPromise<void>[]} producer-consumer promises */
  4656. this._promise = Array.from({
  4657. length: numberOfBuffers
  4658. }, () => speedy_promise/* SpeedyPromise */.i.resolve());
  4659. /** @type {boolean[]} are the contents of the ith buffer being produced? */
  4660. this._busy = new Array(numberOfBuffers).fill(false);
  4661. /** @type {boolean[]} can the ith buffer be consumed? */
  4662. this._ready = new Array(numberOfBuffers).fill(true);
  4663. }
  4664. /**
  4665. * Initialize this object
  4666. * @param {SpeedyGPU} gpu
  4667. */
  4668. init(gpu) {
  4669. this._allocatePBOs(gpu);
  4670. gpu.subscribe(this._allocatePBOs, this, gpu);
  4671. this._initialized = true;
  4672. }
  4673. /**
  4674. * Release resources
  4675. * @param {SpeedyGPU} gpu
  4676. * @returns {null}
  4677. */
  4678. release(gpu) {
  4679. gpu.unsubscribe(this._allocatePBOs, this);
  4680. this._deallocatePBOs(gpu);
  4681. this._initialized = false;
  4682. return null;
  4683. }
  4684. /**
  4685. * Read pixels from a texture, synchronously.
  4686. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4687. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4688. * @param {number} [x]
  4689. * @param {number} [y]
  4690. * @param {number} [width]
  4691. * @param {number} [height]
  4692. * @returns {Uint8Array} pixels in the RGBA format
  4693. */
  4694. readPixelsSync(texture, x = 0, y = 0, width = texture.width, height = texture.height) {
  4695. utils/* Utils */.A.assert(this._initialized);
  4696. const gl = texture.gl;
  4697. const fbo = texture.glFbo;
  4698. // clamp values
  4699. width = Math.max(0, Math.min(width, texture.width));
  4700. height = Math.max(0, Math.min(height, texture.height));
  4701. x = Math.max(0, Math.min(x, texture.width - width));
  4702. y = Math.max(0, Math.min(y, texture.height - height));
  4703. // buffer allocation
  4704. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4705. this._reallocate(sizeofBuffer);
  4706. // lost context?
  4707. if (gl.isContextLost()) return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4708. // read pixels
  4709. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4710. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, this._pixelBuffer[0]);
  4711. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4712. // done!
  4713. return this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4714. }
  4715. /**
  4716. * Read pixels from a texture, asynchronously, with PBOs.
  4717. * You may optionally specify a (x,y,width,height) sub-rectangle.
  4718. * @param {SpeedyDrawableTexture} texture a texture with a FBO
  4719. * @param {number} [x]
  4720. * @param {number} [y]
  4721. * @param {number} [width]
  4722. * @param {number} [height]
  4723. * @param {boolean} [useBufferedDownloads] accelerate downloads by returning pixels from the texture of the previous call (useful for streaming)
  4724. * @returns {SpeedyPromise<Uint8Array>} resolves to an array of pixels in the RGBA format
  4725. */
  4726. readPixelsAsync(texture, x = 0, y = 0, width = texture.width, height = texture.height, useBufferedDownloads = false) {
  4727. utils/* Utils */.A.assert(this._initialized);
  4728. const gl = texture.gl;
  4729. const fbo = texture.glFbo;
  4730. // clamp values
  4731. width = Math.max(0, Math.min(width, texture.width));
  4732. height = Math.max(0, Math.min(height, texture.height));
  4733. x = Math.max(0, Math.min(x, texture.width - width));
  4734. y = Math.max(0, Math.min(y, texture.height - height));
  4735. // buffer allocation
  4736. const sizeofBuffer = width * height * 4; // 4 bytes per pixel (RGBA)
  4737. this._reallocate(sizeofBuffer);
  4738. // lost context?
  4739. if (gl.isContextLost()) return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[0].subarray(0, sizeofBuffer));
  4740. // do not optimize?
  4741. if (!useBufferedDownloads) {
  4742. const pixelBuffer = this._pixelBuffer[0].subarray(0, sizeofBuffer);
  4743. return SpeedyTextureReader._readPixelsViaPBO(gl, this._pbo[0], pixelBuffer, fbo, x, y, width, height).then(() => pixelBuffer);
  4744. }
  4745. // Hide latency with a Producer-Consumer mechanism
  4746. const numberOfBuffers = this._pixelBuffer.length;
  4747. // GPU needs to produce data
  4748. const producerIndex = this._producerIndex;
  4749. if (!this._busy[producerIndex]) {
  4750. const pbo = this._pbo[producerIndex];
  4751. const pixelBuffer = this._pixelBuffer[producerIndex].subarray(0, sizeofBuffer);
  4752. this._producerIndex = (producerIndex + 1) % numberOfBuffers;
  4753. this._ready[producerIndex] = false;
  4754. this._busy[producerIndex] = true;
  4755. //console.time("produce "+producerIndex);
  4756. this._promise[producerIndex] = SpeedyTextureReader._readPixelsViaPBO(gl, pbo, pixelBuffer, fbo, x, y, width, height).then(() => {
  4757. //console.timeEnd("produce "+producerIndex);
  4758. this._busy[producerIndex] = false;
  4759. this._ready[producerIndex] = true;
  4760. });
  4761. }
  4762. //else console.log("skip",producerIndex);
  4763. else /* skip frame */;
  4764. // CPU needs to consume data
  4765. const consumerIndex = this._consumerIndex;
  4766. this._consumerIndex = (consumerIndex + 1) % numberOfBuffers;
  4767. if (!this._ready[consumerIndex]) {
  4768. //console.time("consume "+consumerIndex);
  4769. return this._promise[consumerIndex].then(() => {
  4770. //console.timeEnd("consume "+consumerIndex);
  4771. this._ready[consumerIndex] = false;
  4772. return this._pixelBuffer[consumerIndex];
  4773. });
  4774. }
  4775. //console.log("NO WAIT "+consumerIndex);
  4776. this._ready[consumerIndex] = false;
  4777. return speedy_promise/* SpeedyPromise */.i.resolve(this._pixelBuffer[consumerIndex]);
  4778. }
  4779. /**
  4780. * Reallocate the pixel buffers, so that they can hold the required number of bytes
  4781. * If the pixel buffers already have the required capacity, then nothing is done
  4782. * @param {number} size in bytes
  4783. */
  4784. _reallocate(size) {
  4785. // no need to reallocate
  4786. if (size <= this._pixelBuffer[0].byteLength) return;
  4787. // reallocate
  4788. for (let i = 0; i < this._pixelBuffer.length; i++) {
  4789. const newBuffer = new Uint8Array(size);
  4790. //newBuffer.set(this._pixelBuffer[i]); // make this optional?
  4791. this._pixelBuffer[i] = newBuffer;
  4792. }
  4793. }
  4794. /**
  4795. * Allocate PBOs
  4796. * @param {SpeedyGPU} gpu
  4797. */
  4798. _allocatePBOs(gpu) {
  4799. const gl = gpu.gl;
  4800. for (let i = 0; i < this._pbo.length; i++) this._pbo[i] = gl.createBuffer();
  4801. }
  4802. /**
  4803. * Deallocate PBOs
  4804. * @param {SpeedyGPU} gpu
  4805. */
  4806. _deallocatePBOs(gpu) {
  4807. const gl = gpu.gl;
  4808. for (let i = this._pbo.length - 1; i >= 0; i--) {
  4809. gl.deleteBuffer(this._pbo[i]);
  4810. this._pbo[i] = null;
  4811. }
  4812. }
  4813. /**
  4814. * Read pixels to a Uint8Array, asynchronously, using a Pixel Buffer Object (PBO)
  4815. * It's assumed that the target texture is in the RGBA8 format
  4816. * @param {WebGL2RenderingContext} gl
  4817. * @param {WebGLBuffer} pbo
  4818. * @param {Uint8Array} outputBuffer with size >= width * height * 4
  4819. * @param {WebGLFramebuffer} fbo
  4820. * @param {GLint} x
  4821. * @param {GLint} y
  4822. * @param {GLsizei} width
  4823. * @param {GLsizei} height
  4824. * @returns {SpeedyPromise<void>}
  4825. */
  4826. static _readPixelsViaPBO(gl, pbo, outputBuffer, fbo, x, y, width, height) {
  4827. /*
  4828. When testing Speedy on Chrome (mobile) using about:tracing with the
  4829. --enable-gpu-service-tracing flag, I found that A LOT of time is spent
  4830. in TraceGLAPI::glMapBufferRange, which takes place just after
  4831. GLES2DecoderImpl::HandleReadPixels and GLES2DecoderImpl::glReadPixels.
  4832. Using multiple PBOs doesn't seem to impact Chrome too much. Performance
  4833. is much better on Firefox. This suggests there is room for improvement.
  4834. I do not yet understand clearly the cause for this lag on Chrome. It
  4835. may be a CPU-GPU synchronization issue.
  4836. EDIT: I have found that using gl.flush() aggressively greatly improves
  4837. things. WebGL commands will be pushed frequently!
  4838. See also:
  4839. https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.3 (Buffer objects)
  4840. https://github.com/chromium/chromium/blob/master/docs/gpu/debugging_gpu_related_code.md
  4841. */
  4842. const size = width * height * 4;
  4843. // validate outputBuffer
  4844. utils/* Utils */.A.assert(outputBuffer.byteLength >= size, `Invalid buffer size`);
  4845. // read pixels into the PBO
  4846. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4847. gl.bufferData(gl.PIXEL_PACK_BUFFER, size, gl.DYNAMIC_READ);
  4848. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  4849. gl.readPixels(x, y, width, height, gl.RGBA, gl.UNSIGNED_BYTE, 0);
  4850. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  4851. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4852. // create a fence
  4853. const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
  4854. gl.flush(); // make sure the sync command is read
  4855. // wait for the commands to be processed by the GPU
  4856. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  4857. // according to the WebGL2 spec sec 3.7.14 Sync objects,
  4858. // "sync objects may only transition to the signaled state
  4859. // when the user agent's event loop is not executing a task"
  4860. // in other words, it won't be signaled in the same frame
  4861. if (settings/* Settings */.w.gpuPollingMode != 'asap') runOnNextFrame(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);else asap(SpeedyTextureReader._clientWaitAsync, gl, sync, 0, resolve, reject);
  4862. }).then(() => {
  4863. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, pbo);
  4864. gl.getBufferSubData(gl.PIXEL_PACK_BUFFER, 0, outputBuffer);
  4865. gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null);
  4866. }).catch(err => {
  4867. throw new utils_errors/* IllegalOperationError */.Er(`Can't getBufferSubDataAsync(): error in clientWaitAsync()`, err);
  4868. }).finally(() => {
  4869. gl.deleteSync(sync);
  4870. });
  4871. }
  4872. /**
  4873. * Waits for a sync object to become signaled
  4874. * @param {WebGL2RenderingContext} gl
  4875. * @param {WebGLSync} sync
  4876. * @param {GLbitfield} flags may be gl.SYNC_FLUSH_COMMANDS_BIT or 0
  4877. * @param {Function} resolve
  4878. * @param {Function} reject
  4879. * @param {number} [pollInterval] in milliseconds
  4880. * @param {number} [remainingAttempts] for timeout
  4881. */
  4882. static _clientWaitAsync(gl, sync, flags, resolve, reject, pollInterval = 10, remainingAttempts = 1000) {
  4883. (function poll() {
  4884. const status = gl.clientWaitSync(sync, flags, 0);
  4885. if (remainingAttempts-- <= 0) {
  4886. reject(new utils_errors/* TimeoutError */.MU(`GPU polling timeout`, utils_errors/* GLError */.wB.from(gl)));
  4887. } else if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
  4888. resolve();
  4889. } else {
  4890. //setTimeout(poll, pollInterval);
  4891. if (settings/* Settings */.w.gpuPollingMode != 'asap') requestAnimationFrame(poll); // RAF is a rather unusual way to do polling at ~60 fps. Does it reduce CPU usage?
  4892. else asap(poll);
  4893. }
  4894. })();
  4895. }
  4896. }
  4897. // EXTERNAL MODULE: ./src/utils/globals.js
  4898. var globals = __nested_webpack_require_314174__(3816);
  4899. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture.js
  4900. /*
  4901. * speedy-vision.js
  4902. * GPU-accelerated Computer Vision for JavaScript
  4903. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  4904. *
  4905. * Licensed under the Apache License, Version 2.0 (the "License");
  4906. * you may not use this file except in compliance with the License.
  4907. * You may obtain a copy of the License at
  4908. *
  4909. * http://www.apache.org/licenses/LICENSE-2.0
  4910. *
  4911. * Unless required by applicable law or agreed to in writing, software
  4912. * distributed under the License is distributed on an "AS IS" BASIS,
  4913. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  4914. * See the License for the specific language governing permissions and
  4915. * limitations under the License.
  4916. *
  4917. * speedy-texture.js
  4918. * A wrapper around WebGLTexture
  4919. */
  4920. /**
  4921. * Get a buffer filled with zeros
  4922. * @param {number} size number of bytes
  4923. * @returns {Uint8Array}
  4924. */
  4925. /*
  4926. const zeros = (function() {
  4927. let buffer = new Uint8Array(4);
  4928. return function(size) {
  4929. if(size > buffer.length)
  4930. buffer = new Uint8Array(size);
  4931. return buffer.subarray(0, size);
  4932. }
  4933. })();
  4934. */
  4935. /**
  4936. * A wrapper around WebGLTexture
  4937. */
  4938. class SpeedyTexture {
  4939. /**
  4940. * Constructor
  4941. * @param {WebGL2RenderingContext} gl
  4942. * @param {number} width texture width in pixels
  4943. * @param {number} height texture height in pixels
  4944. * @param {number} [format]
  4945. * @param {number} [internalFormat]
  4946. * @param {number} [dataType]
  4947. * @param {number} [filter]
  4948. * @param {number} [wrap]
  4949. */
  4950. constructor(gl, width, height, format = gl.RGBA, internalFormat = gl.RGBA8, dataType = gl.UNSIGNED_BYTE, filter = gl.NEAREST, wrap = gl.MIRRORED_REPEAT) {
  4951. /** @type {WebGL2RenderingContext} rendering context */
  4952. this._gl = gl;
  4953. /** @type {number} width of the texture */
  4954. this._width = Math.max(1, width | 0);
  4955. /** @type {number} height of the texture */
  4956. this._height = Math.max(1, height | 0);
  4957. /** @type {boolean} have we generated mipmaps for this texture? */
  4958. this._hasMipmaps = false;
  4959. /** @type {number} texture format */
  4960. this._format = format;
  4961. /** @type {number} internal format (usually a sized format) */
  4962. this._internalFormat = internalFormat;
  4963. /** @type {number} data type */
  4964. this._dataType = dataType;
  4965. /** @type {number} texture filtering (min & mag) */
  4966. this._filter = filter;
  4967. /** @type {number} texture wrapping */
  4968. this._wrap = wrap;
  4969. /** @type {WebGLTexture} internal texture object */
  4970. this._glTexture = SpeedyTexture._createTexture(this._gl, this._width, this._height, this._format, this._internalFormat, this._dataType, this._filter, this._wrap);
  4971. }
  4972. /**
  4973. * Releases the texture
  4974. * @returns {null}
  4975. */
  4976. release() {
  4977. const gl = this._gl;
  4978. // already released?
  4979. if (this._glTexture == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyTexture has already been released`);
  4980. // release resources
  4981. this.discardMipmaps();
  4982. gl.deleteTexture(this._glTexture);
  4983. this._glTexture = null;
  4984. this._width = this._height = 0;
  4985. // done!
  4986. return null;
  4987. }
  4988. /**
  4989. * Upload pixel data to the texture. The texture will be resized if needed.
  4990. * @param {TexImageSource} data
  4991. * @param {number} [width] in pixels
  4992. * @param {number} [height] in pixels
  4993. * @return {SpeedyTexture} this
  4994. */
  4995. upload(data, width = this._width, height = this._height) {
  4996. const gl = this._gl;
  4997. // bugfix: if the media is a video, we can't really
  4998. // upload it to the GPU unless it's ready
  4999. if (data instanceof HTMLVideoElement) {
  5000. if (data.readyState < 2) {
  5001. // this may happen when the video loops (Firefox)
  5002. // keep the previously uploaded texture
  5003. //Utils.warning(`Trying to process a video that isn't ready yet`);
  5004. return this;
  5005. }
  5006. }
  5007. utils/* Utils */.A.assert(width > 0 && height > 0);
  5008. this.discardMipmaps();
  5009. this._width = width;
  5010. this._height = height;
  5011. this._internalFormat = gl.RGBA8;
  5012. this._format = gl.RGBA;
  5013. this._dataType = gl.UNSIGNED_BYTE;
  5014. SpeedyTexture._upload(gl, this._glTexture, this._width, this._height, data, 0, this._format, this._internalFormat, this._dataType);
  5015. return this;
  5016. }
  5017. /**
  5018. * Clear the texture
  5019. * @returns {this}
  5020. */
  5021. clear() {
  5022. const gl = this._gl;
  5023. // context loss?
  5024. if (gl.isContextLost()) return this;
  5025. // clear texture data
  5026. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5027. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5028. gl.bindTexture(gl.TEXTURE_2D, null);
  5029. // no mipmaps
  5030. this.discardMipmaps();
  5031. // done!
  5032. return this;
  5033. }
  5034. /**
  5035. * Resize this texture. Its content will be lost!
  5036. * @param {number} width new width, in pixels
  5037. * @param {number} height new height, in pixels
  5038. * @returns {this}
  5039. */
  5040. resize(width, height) {
  5041. const gl = this._gl;
  5042. // no need to resize?
  5043. if (this._width === width && this._height === height) return this;
  5044. // validate size
  5045. width |= 0;
  5046. height |= 0;
  5047. if (width > globals.MAX_TEXTURE_LENGTH || height > globals.MAX_TEXTURE_LENGTH) throw new utils_errors/* NotSupportedError */.EM(`Maximum texture size exceeded. Using ${width} x ${height}, expected up to ${globals.MAX_TEXTURE_LENGTH} x ${globals.MAX_TEXTURE_LENGTH}.`);else if (width < 1 || height < 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid texture size: ${width} x ${height}`);
  5048. // context loss?
  5049. if (gl.isContextLost()) return this;
  5050. // update dimensions
  5051. this._width = width;
  5052. this._height = height;
  5053. // resize
  5054. // Note: this is fast on Chrome, but seems slow on Firefox
  5055. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5056. gl.texImage2D(gl.TEXTURE_2D, 0, this._internalFormat, this._width, this._height, 0, this._format, this._dataType, null);
  5057. gl.bindTexture(gl.TEXTURE_2D, null);
  5058. // no mipmaps
  5059. this.discardMipmaps();
  5060. // done!
  5061. return this;
  5062. }
  5063. /**
  5064. * Generate mipmap
  5065. * @param {SpeedyDrawableTexture[]} [mipmap] custom texture for each mip level
  5066. * @returns {SpeedyTexture} this
  5067. */
  5068. generateMipmaps(mipmap = []) {
  5069. const gl = this._gl;
  5070. // nothing to do
  5071. if (this._hasMipmaps) return this;
  5072. // let the hardware compute the all levels of the pyramid, up to 1x1
  5073. // we also specify the TEXTURE_MIN_FILTER to be used from now on
  5074. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5075. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_LINEAR);
  5076. gl.generateMipmap(gl.TEXTURE_2D);
  5077. gl.bindTexture(gl.TEXTURE_2D, null);
  5078. // accept custom textures
  5079. if (mipmap.length > 0) {
  5080. // expected number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  5081. const width = this.width,
  5082. height = this.height;
  5083. const numMipmaps = 1 + Math.floor(Math.log2(Math.max(width, height)));
  5084. utils/* Utils */.A.assert(mipmap.length <= numMipmaps);
  5085. // verify the dimensions of each level
  5086. for (let level = 1; level < mipmap.length; level++) {
  5087. // use max(1, floor(size / 2^lod)), in accordance to
  5088. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  5089. const w = Math.max(1, width >>> level);
  5090. const h = Math.max(1, height >>> level);
  5091. // verify the dimensions of this level
  5092. utils/* Utils */.A.assert(mipmap[level].width === w && mipmap[level].height === h);
  5093. // copy to mipmap
  5094. mipmap[level].copyTo(this, level);
  5095. }
  5096. }
  5097. // done!
  5098. this._hasMipmaps = true;
  5099. return this;
  5100. }
  5101. /**
  5102. * Invalidates previously generated mipmap, if any
  5103. */
  5104. discardMipmaps() {
  5105. const gl = this._gl;
  5106. // nothing to do
  5107. if (!this._hasMipmaps) return;
  5108. // reset the min filter
  5109. gl.bindTexture(gl.TEXTURE_2D, this._glTexture);
  5110. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, this._filter);
  5111. gl.bindTexture(gl.TEXTURE_2D, null);
  5112. // done!
  5113. this._hasMipmaps = false;
  5114. }
  5115. /**
  5116. * Does this texture have a mipmap?
  5117. * @returns {boolean}
  5118. */
  5119. hasMipmaps() {
  5120. return this._hasMipmaps;
  5121. }
  5122. /**
  5123. * Has this texture been released?
  5124. * @returns {boolean}
  5125. */
  5126. isReleased() {
  5127. return this._glTexture == null;
  5128. }
  5129. /**
  5130. * The internal WebGLTexture
  5131. * @returns {WebGLTexture}
  5132. */
  5133. get glTexture() {
  5134. return this._glTexture;
  5135. }
  5136. /**
  5137. * The width of the texture, in pixels
  5138. * @returns {number}
  5139. */
  5140. get width() {
  5141. return this._width;
  5142. }
  5143. /**
  5144. * The height of the texture, in pixels
  5145. * @returns {number}
  5146. */
  5147. get height() {
  5148. return this._height;
  5149. }
  5150. /**
  5151. * The WebGL Context
  5152. * @returns {WebGL2RenderingContext}
  5153. */
  5154. get gl() {
  5155. return this._gl;
  5156. }
  5157. /**
  5158. * Create a WebGL texture
  5159. * @param {WebGL2RenderingContext} gl
  5160. * @param {number} width in pixels
  5161. * @param {number} height in pixels
  5162. * @param {number} format usually gl.RGBA
  5163. * @param {number} internalFormat usually gl.RGBA8
  5164. * @param {number} dataType usually gl.UNSIGNED_BYTE
  5165. * @param {number} filter usually gl.NEAREST or gl.LINEAR
  5166. * @param {number} wrap gl.REPEAT, gl.MIRRORED_REPEAT or gl.CLAMP_TO_EDGE
  5167. * @returns {WebGLTexture}
  5168. */
  5169. static _createTexture(gl, width, height, format, internalFormat, dataType, filter, wrap) {
  5170. utils/* Utils */.A.assert(width > 0 && height > 0);
  5171. // create & bind texture
  5172. const texture = gl.createTexture();
  5173. gl.bindTexture(gl.TEXTURE_2D, texture);
  5174. // setup
  5175. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
  5176. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
  5177. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, wrap);
  5178. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, wrap);
  5179. //gl.texStorage2D(gl.TEXTURE_2D, 1, internalFormat, width, height);
  5180. gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, format, dataType, null);
  5181. // unbind & return
  5182. gl.bindTexture(gl.TEXTURE_2D, null);
  5183. return texture;
  5184. }
  5185. /**
  5186. * Upload pixel data to a WebGL texture
  5187. * @param {WebGL2RenderingContext} gl
  5188. * @param {WebGLTexture} texture
  5189. * @param {GLsizei} width texture width
  5190. * @param {GLsizei} height texture height
  5191. * @param {TexImageSource} pixels
  5192. * @param {GLint} lod mipmap level-of-detail
  5193. * @param {number} format
  5194. * @param {number} internalFormat
  5195. * @param {number} dataType
  5196. * @returns {WebGLTexture} texture
  5197. */
  5198. static _upload(gl, texture, width, height, pixels, lod, format, internalFormat, dataType) {
  5199. // Prefer calling _upload() before gl.useProgram() to avoid the
  5200. // needless switching of GL programs internally. See also:
  5201. // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  5202. gl.bindTexture(gl.TEXTURE_2D, texture);
  5203. /*
  5204. // slower than texImage2D, unlike the spec?
  5205. gl.texSubImage2D(gl.TEXTURE_2D, // target
  5206. lod, // mip level
  5207. 0, // x-offset
  5208. 0, // y-offset
  5209. width, // texture width
  5210. height, // texture height
  5211. gl.RGBA, // source format
  5212. gl.UNSIGNED_BYTE, // source type
  5213. pixels); // source data
  5214. */
  5215. gl.texImage2D(gl.TEXTURE_2D,
  5216. // target
  5217. lod,
  5218. // mip level
  5219. internalFormat,
  5220. // internal format
  5221. width,
  5222. // texture width
  5223. height,
  5224. // texture height
  5225. 0,
  5226. // border
  5227. format,
  5228. // source format
  5229. dataType,
  5230. // source type
  5231. pixels); // source data
  5232. gl.bindTexture(gl.TEXTURE_2D, null);
  5233. return texture;
  5234. }
  5235. }
  5236. /**
  5237. * A SpeedyTexture with a framebuffer
  5238. */
  5239. class SpeedyDrawableTexture extends SpeedyTexture {
  5240. /**
  5241. * Constructor
  5242. * @param {WebGL2RenderingContext} gl
  5243. * @param {number} width texture width in pixels
  5244. * @param {number} height texture height in pixels
  5245. * @param {number} [format]
  5246. * @param {number} [internalFormat]
  5247. * @param {number} [dataType]
  5248. * @param {number} [filter]
  5249. * @param {number} [wrap]
  5250. */
  5251. constructor(gl, width, height, format = undefined, internalFormat = undefined, dataType = undefined, filter = undefined, wrap = undefined) {
  5252. super(gl, width, height, format, internalFormat, dataType, filter, wrap);
  5253. /** @type {WebGLFramebuffer} framebuffer */
  5254. this._glFbo = SpeedyDrawableTexture._createFramebuffer(gl, this._glTexture);
  5255. }
  5256. /**
  5257. * Releases the texture
  5258. * @returns {null}
  5259. */
  5260. release() {
  5261. const gl = this._gl;
  5262. // already released?
  5263. if (this._glFbo == null) throw new utils_errors/* IllegalOperationError */.Er(`The SpeedyDrawableTexture has already been released`);
  5264. // release the framebuffer
  5265. gl.deleteFramebuffer(this._glFbo);
  5266. this._glFbo = null;
  5267. // release the SpeedyTexture
  5268. return super.release();
  5269. }
  5270. /**
  5271. * The internal WebGLFramebuffer
  5272. * @returns {WebGLFramebuffer}
  5273. */
  5274. get glFbo() {
  5275. return this._glFbo;
  5276. }
  5277. /**
  5278. * Copy this texture into another
  5279. * (you may have to discard the mipmaps after calling this function)
  5280. * @param {SpeedyTexture} texture target texture
  5281. * @param {number} [lod] level-of-detail of the target texture
  5282. */
  5283. copyTo(texture, lod = 0) {
  5284. const gl = this._gl;
  5285. // context loss?
  5286. if (gl.isContextLost()) return;
  5287. // compute texture size as max(1, floor(size / 2^lod)),
  5288. // in accordance to the OpenGL ES 3.0 spec sec 3.8.10.4
  5289. // (Mipmapping)
  5290. const pot = 1 << (lod |= 0);
  5291. const expectedWidth = Math.max(1, Math.floor(texture.width / pot));
  5292. const expectedHeight = Math.max(1, Math.floor(texture.height / pot));
  5293. // validate
  5294. utils/* Utils */.A.assert(this._width === expectedWidth && this._height === expectedHeight);
  5295. // copy to texture
  5296. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, texture.glTexture, 0, 0, this._width, this._height, lod);
  5297. }
  5298. /*
  5299. * Resize this texture
  5300. * @param {number} width new width, in pixels
  5301. * @param {number} height new height, in pixels
  5302. * @param {boolean} [preserveContent] should we preserve the content of the texture? EXPENSIVE!
  5303. * @returns {this}
  5304. */
  5305. /*resize(width, height, preserveContent = false)
  5306. {
  5307. const gl = this._gl;
  5308. // no need to preserve the content?
  5309. if(!preserveContent)
  5310. return super.resize(width, height);
  5311. // no need to resize?
  5312. if(this._width === width && this._height === height)
  5313. return this;
  5314. // validate size
  5315. width |= 0; height |= 0;
  5316. Utils.assert(width > 0 && height > 0);
  5317. // context loss?
  5318. if(gl.isContextLost())
  5319. return this;
  5320. // allocate new texture
  5321. const newTexture = SpeedyTexture._createTexture(gl, width, height);
  5322. // initialize the new texture with zeros to avoid a
  5323. // warning when calling copyTexSubImage2D() on Firefox
  5324. // this may not be very efficient?
  5325. SpeedyTexture._upload(gl, newTexture, width, height, zeros(width * height * 4)); // RGBA: 4 bytes per pixel
  5326. // copy the old texture to the new one
  5327. const oldWidth = this._width, oldHeight = this._height;
  5328. SpeedyDrawableTexture._copyToTexture(gl, this._glFbo, newTexture, 0, 0, Math.min(width, oldWidth), Math.min(height, oldHeight), 0);
  5329. // bind FBO
  5330. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5331. // invalidate old data (is this needed?)
  5332. gl.invalidateFramebuffer(gl.FRAMEBUFFER, [gl.COLOR_ATTACHMENT0]);
  5333. // attach the new texture to the existing framebuffer
  5334. gl.framebufferTexture2D(gl.FRAMEBUFFER, // target
  5335. gl.COLOR_ATTACHMENT0, // color buffer
  5336. gl.TEXTURE_2D, // tex target
  5337. newTexture, // texture
  5338. 0); // mipmap level
  5339. // unbind FBO
  5340. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5341. // release the old texture and replace it
  5342. gl.deleteTexture(this._glTexture);
  5343. this._glTexture = newTexture;
  5344. // update dimensions & discard mipmaps
  5345. this.discardMipmaps();
  5346. this._width = width;
  5347. this._height = height;
  5348. // done!
  5349. return this;
  5350. }
  5351. */
  5352. /**
  5353. * Clear the texture
  5354. * @returns {this}
  5355. */
  5356. clear() {
  5357. //
  5358. // When we pass null to texImage2D(), it seems that Firefox
  5359. // doesn't clear the texture. Instead, it displays this warning:
  5360. //
  5361. // "WebGL warning: drawArraysInstanced:
  5362. // Tex image TEXTURE_2D level 0 is incurring lazy initialization."
  5363. //
  5364. // Here is a workaround:
  5365. //
  5366. return this.clearToColor(0, 0, 0, 0);
  5367. }
  5368. /**
  5369. * Clear the texture to a color
  5370. * @param {number} r red component, a value in [0,1]
  5371. * @param {number} g green component, a value in [0,1]
  5372. * @param {number} b blue component, a value in [0,1]
  5373. * @param {number} a alpha component, a value in [0,1]
  5374. * @returns {this}
  5375. */
  5376. clearToColor(r, g, b, a) {
  5377. const gl = this._gl;
  5378. // context loss?
  5379. if (gl.isContextLost()) return this;
  5380. // clamp parameters
  5381. r = Math.max(0.0, Math.min(+r, 1.0));
  5382. g = Math.max(0.0, Math.min(+g, 1.0));
  5383. b = Math.max(0.0, Math.min(+b, 1.0));
  5384. a = Math.max(0.0, Math.min(+a, 1.0));
  5385. // discard mipmaps, if any
  5386. this.discardMipmaps();
  5387. // clear the texture
  5388. gl.bindFramebuffer(gl.FRAMEBUFFER, this._glFbo);
  5389. gl.viewport(0, 0, this._width, this._height);
  5390. gl.clearColor(r, g, b, a);
  5391. gl.clear(gl.COLOR_BUFFER_BIT);
  5392. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5393. // done!
  5394. return this;
  5395. }
  5396. /**
  5397. * Inspect the pixels of the texture for debugging purposes
  5398. * @param {SpeedyGPU} gpu
  5399. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5400. * @returns {Uint8Array}
  5401. */
  5402. inspect(gpu, textureReader) {
  5403. if (textureReader === undefined) {
  5404. textureReader = new SpeedyTextureReader();
  5405. textureReader.init(gpu);
  5406. const pixels = textureReader.readPixelsSync(this);
  5407. textureReader.release(gpu);
  5408. return new Uint8Array(pixels); // copy the array
  5409. } else {
  5410. const pixels = textureReader.readPixelsSync(this);
  5411. return new Uint8Array(pixels);
  5412. }
  5413. }
  5414. /**
  5415. * Inspect the pixels of the texture as unsigned 32-bit integers
  5416. * @param {SpeedyGPU} gpu
  5417. * @param {SpeedyTextureReader} [textureReader] optional texture reader
  5418. * @returns {Uint32Array}
  5419. */
  5420. inspect32(gpu, textureReader) {
  5421. utils/* Utils */.A.assert(globals.LITTLE_ENDIAN); // make sure we use little-endian
  5422. return new Uint32Array(this.inspect(gpu, textureReader).buffer);
  5423. }
  5424. /**
  5425. * Create a FBO associated with an existing texture
  5426. * @param {WebGL2RenderingContext} gl
  5427. * @param {WebGLTexture} texture
  5428. * @returns {WebGLFramebuffer}
  5429. */
  5430. static _createFramebuffer(gl, texture) {
  5431. const fbo = gl.createFramebuffer();
  5432. // setup framebuffer
  5433. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5434. gl.framebufferTexture2D(gl.FRAMEBUFFER,
  5435. // target
  5436. gl.COLOR_ATTACHMENT0,
  5437. // color buffer
  5438. gl.TEXTURE_2D,
  5439. // tex target
  5440. texture,
  5441. // texture
  5442. 0); // mipmap level
  5443. // check for errors
  5444. const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
  5445. if (status != gl.FRAMEBUFFER_COMPLETE) {
  5446. const error = (() => ['FRAMEBUFFER_UNSUPPORTED', 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS', 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT', 'FRAMEBUFFER_INCOMPLETE_MULTISAMPLE'].filter(err => gl[err] === status)[0] || 'unknown error')();
  5447. throw new utils_errors/* GLError */.wB(`Can't create framebuffer: ${error} (${status})`);
  5448. }
  5449. // unbind & return
  5450. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5451. return fbo;
  5452. }
  5453. /**
  5454. * Copy data from a framebuffer to a texture
  5455. * @param {WebGL2RenderingContext} gl
  5456. * @param {WebGLFramebuffer} fbo we'll read the data from this
  5457. * @param {WebGLTexture} texture destination texture
  5458. * @param {GLint} x xpos (where to start copying)
  5459. * @param {GLint} y ypos (where to start copying)
  5460. * @param {GLsizei} width width of the texture
  5461. * @param {GLsizei} height height of the texture
  5462. * @param {GLint} [lod] mipmap level-of-detail
  5463. * @returns {WebGLTexture} texture
  5464. */
  5465. static _copyToTexture(gl, fbo, texture, x, y, width, height, lod = 0) {
  5466. //gl.activeTexture(gl.TEXTURE0);
  5467. gl.bindTexture(gl.TEXTURE_2D, texture);
  5468. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5469. gl.copyTexSubImage2D(gl.TEXTURE_2D,
  5470. // target
  5471. lod,
  5472. // mipmap level
  5473. 0,
  5474. // xoffset
  5475. 0,
  5476. // yoffset
  5477. x,
  5478. // xpos (where to start copying)
  5479. y,
  5480. // ypos (where to start copying)
  5481. width,
  5482. // width of the texture
  5483. height // height of the texture
  5484. );
  5485. /*
  5486. gl.copyTexImage2D(
  5487. gl.TEXTURE_2D, // target
  5488. lod, // mipmap level
  5489. gl.RGBA, // internal format
  5490. x, // xpos (where to start copying)
  5491. y, // ypos (where to start copying)
  5492. width, // width of the texture
  5493. height, // height of the texture
  5494. 0 // border
  5495. );
  5496. */
  5497. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5498. gl.bindTexture(gl.TEXTURE_2D, null);
  5499. return texture;
  5500. }
  5501. }
  5502. // EXTERNAL MODULE: ./src/gpu/shader-declaration.js + 1 modules
  5503. var shader_declaration = __nested_webpack_require_314174__(9420);
  5504. ;// CONCATENATED MODULE: ./src/gpu/speedy-program.js
  5505. /*
  5506. * speedy-vision.js
  5507. * GPU-accelerated Computer Vision for JavaScript
  5508. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  5509. *
  5510. * Licensed under the Apache License, Version 2.0 (the "License");
  5511. * you may not use this file except in compliance with the License.
  5512. * You may obtain a copy of the License at
  5513. *
  5514. * http://www.apache.org/licenses/LICENSE-2.0
  5515. *
  5516. * Unless required by applicable law or agreed to in writing, software
  5517. * distributed under the License is distributed on an "AS IS" BASIS,
  5518. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  5519. * See the License for the specific language governing permissions and
  5520. * limitations under the License.
  5521. *
  5522. * speedy-program.js
  5523. * SpeedyProgram class
  5524. */
  5525. /** @const {Object<string,string>} Map uniform type to a gl function */
  5526. const UNIFORM_SETTERS = Object.freeze({
  5527. 'sampler2D': 'uniform1i',
  5528. 'isampler2D': 'uniform1i',
  5529. 'usampler2D': 'uniform1i',
  5530. 'float': 'uniform1f',
  5531. 'int': 'uniform1i',
  5532. 'uint': 'uniform1ui',
  5533. 'bool': 'uniform1i',
  5534. 'vec2': 'uniform2f',
  5535. 'vec3': 'uniform3f',
  5536. 'vec4': 'uniform4f',
  5537. 'ivec2': 'uniform2i',
  5538. 'ivec3': 'uniform3i',
  5539. 'ivec4': 'uniform4i',
  5540. 'uvec2': 'uniform2ui',
  5541. 'uvec3': 'uniform3ui',
  5542. 'uvec4': 'uniform4ui',
  5543. 'bvec2': 'uniform2i',
  5544. 'bvec3': 'uniform3i',
  5545. 'bvec4': 'uniform4i',
  5546. 'mat2': 'uniformMatrix2fv',
  5547. 'mat3': 'uniformMatrix3fv',
  5548. 'mat4': 'uniformMatrix4fv'
  5549. });
  5550. /**
  5551. * @typedef {object} SpeedyProgramOptions
  5552. * @property {boolean} [renderToTexture] render results to a texture?
  5553. * @property {boolean} [pingpong] alternate output texture between calls
  5554. */
  5555. /** @typedef {number|number[]|boolean|boolean[]|SpeedyTexture} SpeedyProgramUniformValue */
  5556. /**
  5557. * A SpeedyProgram is a Function that runs GLSL code
  5558. */
  5559. class SpeedyProgram extends Function {
  5560. /**
  5561. * Creates a new SpeedyProgram
  5562. * @param {WebGL2RenderingContext} gl WebGL context
  5563. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5564. * @param {SpeedyProgramOptions} [options] user options
  5565. */
  5566. constructor(gl, shaderdecl, options = {}) {
  5567. super('...args', 'return this._self._call(...args)');
  5568. /** @type {SpeedyProgram} this function bound to this function! */
  5569. this._self = this.bind(this);
  5570. this._self._init(gl, shaderdecl, options);
  5571. return this._self;
  5572. }
  5573. /**
  5574. * Initialize the SpeedyProgram
  5575. * @param {WebGL2RenderingContext} gl WebGL context
  5576. * @param {ShaderDeclaration} shaderdecl Shader declaration
  5577. * @param {SpeedyProgramOptions} options user options
  5578. */
  5579. _init(gl, shaderdecl, options) {
  5580. // not a valid context?
  5581. if (gl.isContextLost()) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize SpeedyProgram: lost context`);
  5582. // options object
  5583. options = Object.assign({
  5584. // default options
  5585. renderToTexture: true,
  5586. pingpong: false
  5587. }, options);
  5588. /** @type {WebGL2RenderingContext} */
  5589. this._gl = gl;
  5590. /** @type {WebGLProgram} vertex shader + fragment shader */
  5591. this._program = SpeedyProgram._compile(gl, shaderdecl.vertexSource, shaderdecl.fragmentSource);
  5592. /** @type {ProgramGeometry} this is a quad */
  5593. this._geometry = new ProgramGeometry(gl, {
  5594. position: shaderdecl.locationOfAttributes.position,
  5595. texCoord: shaderdecl.locationOfAttributes.texCoord
  5596. });
  5597. /** @type {string[]} names of the arguments of the SpeedyProgram */
  5598. this._argnames = shaderdecl.arguments;
  5599. /** @type {boolean[]} tells whether the i-th argument of the SpeedyProgram is an array or not */
  5600. this._argIsArray = new Array(this._argnames.length).fill(false);
  5601. /** @type {UBOHelper} UBO helper (lazy instantiation) */
  5602. this._ubo = null;
  5603. /** @type {boolean} should we render to a texture? If false, we render to the canvas */
  5604. this._renderToTexture = Boolean(options.renderToTexture);
  5605. /** @type {number} width of the output */
  5606. this._width = 1;
  5607. /** @type {number} height of the output */
  5608. this._height = 1;
  5609. /** @type {[number,number]} cached object that stores the size of the output */
  5610. this._size = [1, 1];
  5611. /** @type {SpeedyDrawableTexture[]} output texture(s) */
  5612. this._texture = new Array(options.pingpong ? 2 : 1).fill(null);
  5613. /** @type {number} used for pingpong rendering */
  5614. this._textureIndex = 0;
  5615. /** @type {Map<string,UniformVariable>} uniform variables */
  5616. this._uniform = new Map();
  5617. /** @type {ShaderDeclaration} shader declaration */
  5618. this._shaderdecl = shaderdecl;
  5619. // autodetect uniforms
  5620. gl.useProgram(this._program);
  5621. for (const name of shaderdecl.uniforms) {
  5622. const type = shaderdecl.uniformType(name);
  5623. const location = gl.getUniformLocation(this._program, name);
  5624. this._uniform.set(name, new UniformVariable(type, location));
  5625. }
  5626. // match arguments & uniforms
  5627. for (let j = 0; j < this._argnames.length; j++) {
  5628. const argname = this._argnames[j];
  5629. if (!this._uniform.has(argname)) {
  5630. this._argIsArray[j] = this._uniform.has(indexedVariable(argname, 0));
  5631. if (!this._argIsArray[j]) throw new utils_errors/* IllegalOperationError */.Er(`Expected uniform "${argname}", as declared in the argument list`);
  5632. }
  5633. }
  5634. }
  5635. /**
  5636. * Run the SpeedyProgram
  5637. * @param {...SpeedyProgramUniformValue} args
  5638. * @returns {SpeedyDrawableTexture}
  5639. */
  5640. _call(...args) {
  5641. const gl = this._gl;
  5642. const argnames = this._argnames;
  5643. const texture = this._texture[this._textureIndex];
  5644. // matching arguments?
  5645. if (args.length != argnames.length) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of arguments (expected ${argnames.length}, got ${args.length})`);
  5646. // can't use the output texture as an input
  5647. /*
  5648. // slower method
  5649. const flatArgs = Utils.flatten(args);
  5650. for(let j = flatArgs.length - 1; j >= 0; j--) {
  5651. if(flatArgs[j] === this._texture[this._textureIndex])
  5652. throw new NotSupportedError(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5653. }
  5654. */
  5655. for (let j = args.length - 1; j >= 0; j--) {
  5656. if (args[j] === texture) throw new utils_errors/* NotSupportedError */.EM(`Can't run shader: don't use its output texture as an input to itself. Consider using pingpong rendering!`);
  5657. // else if(Array.isArray(args[j])) ...
  5658. // we don't support passing arrays of textures at the time of this writing
  5659. }
  5660. // context loss?
  5661. if (gl.isContextLost()) return texture;
  5662. // use program
  5663. gl.useProgram(this._program);
  5664. // bind the VAO
  5665. gl.bindVertexArray(this._geometry.vao);
  5666. // select the render target
  5667. const fbo = this._renderToTexture ? texture.glFbo : null;
  5668. // update texSize uniform (available in all fragment shaders)
  5669. const texSize = this._uniform.get('texSize');
  5670. this._size[0] = this._width;
  5671. this._size[1] = this._height;
  5672. texSize.setValue(gl, this._size);
  5673. // set uniforms[i] to args[i]
  5674. for (let i = 0, texNo = 0; i < args.length; i++) {
  5675. const argname = argnames[i];
  5676. if (!this._argIsArray[i]) {
  5677. // uniform variable matches argument name
  5678. const uniform = this._uniform.get(argname);
  5679. texNo = uniform.setValue(gl, args[i], texNo);
  5680. } else {
  5681. // uniform array matches argument name
  5682. const array = args[i];
  5683. if (Array.isArray(array)) {
  5684. if (this._uniform.has(indexedVariable(argname, array.length))) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: too few elements in the "${argname}" array`);
  5685. for (let j = 0, uniform = undefined; (uniform = this._uniform.get(indexedVariable(argname, j))) !== undefined; j++) texNo = uniform.setValue(gl, array[j], texNo);
  5686. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: expected an array for "${argname}"`);
  5687. }
  5688. }
  5689. // set Uniform Buffer Objects (if any)
  5690. if (this._ubo !== null) this._ubo.update();
  5691. // bind the FBO
  5692. gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  5693. // draw call
  5694. gl.viewport(0, 0, this._width, this._height);
  5695. gl.drawArrays(gl.TRIANGLES, 0, 6); // mode, offset, count
  5696. // unbind the FBO
  5697. gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  5698. // unbind the VAO
  5699. gl.bindVertexArray(null);
  5700. // we've just changed the texture! discard the pyramid, if any
  5701. if (texture != null) texture.discardMipmaps();
  5702. // ping-pong rendering
  5703. this._pingpong();
  5704. // done!
  5705. return texture;
  5706. }
  5707. /**
  5708. * Set the output texture(s) and its (their) shape(s)
  5709. * @param {number} width new width, in pixels
  5710. * @param {number} height new height, in pixels
  5711. * @param {...SpeedyDrawableTexture|null} texture output texture(s)
  5712. * @returns {SpeedyProgram} this
  5713. */
  5714. outputs(width, height, ...texture) {
  5715. this._setOutputTexture(...texture);
  5716. this._setOutputSize(width, height);
  5717. return this;
  5718. }
  5719. /**
  5720. * Set the size of the output
  5721. * @param {number} width new width, in pixels
  5722. * @param {number} height new height, in pixels
  5723. * @returns {SpeedyProgram} this
  5724. */
  5725. _setOutputSize(width, height) {
  5726. utils/* Utils */.A.assert(width > 0 && height > 0);
  5727. // update output size
  5728. this._width = width | 0;
  5729. this._height = height | 0;
  5730. // resize the output texture(s)
  5731. for (let i = 0; i < this._texture.length; i++) {
  5732. if (this._texture[i] != null) this._texture[i].resize(this._width, this._height);
  5733. }
  5734. // done!
  5735. return this;
  5736. }
  5737. /**
  5738. * Use the provided texture(s) as output
  5739. * @param {...SpeedyDrawableTexture} texture set to null to use the internal texture(s)
  5740. * @returns {SpeedyProgram} this
  5741. */
  5742. _setOutputTexture(...texture) {
  5743. utils/* Utils */.A.assert(texture.length === this._texture.length, `Incorrect number of textures (expected ${this._texture.length})`);
  5744. // update output texture(s)
  5745. for (let i = 0; i < this._texture.length; i++) this._texture[i] = texture[i];
  5746. this._textureIndex = 0;
  5747. // done!
  5748. return this;
  5749. }
  5750. /**
  5751. * Clear the internal textures
  5752. * @returns {SpeedyDrawableTexture}
  5753. */
  5754. clear() {
  5755. const texture = this._texture[this._textureIndex];
  5756. // clear internal textures
  5757. for (let i = 0; i < this._texture.length; i++) this._texture[i].clear();
  5758. // ping-pong rendering
  5759. this._pingpong();
  5760. // done!
  5761. return texture;
  5762. }
  5763. /**
  5764. * Set data using a Uniform Buffer Object
  5765. * @param {string} blockName uniform block name
  5766. * @param {ArrayBufferView} data
  5767. * @returns {SpeedyProgram} this
  5768. */
  5769. setUBO(blockName, data) {
  5770. if (this._ubo === null) this._ubo = new UBOHelper(this._gl, this._program);
  5771. this._ubo.set(blockName, data);
  5772. return this;
  5773. }
  5774. /**
  5775. * Release the resources associated with this SpeedyProgram
  5776. * @returns {null}
  5777. */
  5778. release() {
  5779. const gl = this._gl;
  5780. // Release UBOs (if any)
  5781. if (this._ubo != null) this._ubo = this._ubo.release();
  5782. // Unlink textures
  5783. this._texture.fill(null);
  5784. // Release geometry
  5785. this._geometry = this._geometry.release();
  5786. // Release program
  5787. gl.deleteProgram(this._program);
  5788. this._program = null;
  5789. // Need to delete the shaders as well? In sec 5.14.9 Programs and shaders
  5790. // of the WebGL 1.0 spec, it is mentioned that the underlying GL object
  5791. // will automatically be marked for deletion when the JS object is
  5792. // destroyed (i.e., garbage collected)
  5793. // done!
  5794. return null;
  5795. }
  5796. /**
  5797. * A constant #defined in the shader declaration
  5798. * @param {string} name
  5799. * @returns {number}
  5800. */
  5801. definedConstant(name) {
  5802. return this._shaderdecl.definedConstant(name);
  5803. }
  5804. /**
  5805. * Helper method for pingpong rendering: alternates
  5806. * the texture index from 0 to 1 and vice-versa
  5807. */
  5808. _pingpong() {
  5809. if (this._texture.length > 1) this._textureIndex = 1 - this._textureIndex;
  5810. }
  5811. /**
  5812. * Compile and link GLSL shaders
  5813. * @param {WebGL2RenderingContext} gl
  5814. * @param {string} vertexShaderSource GLSL code of the vertex shader
  5815. * @param {string} fragmentShaderSource GLSL code of the fragment shader
  5816. * @returns {WebGLProgram}
  5817. */
  5818. static _compile(gl, vertexShaderSource, fragmentShaderSource) {
  5819. const program = gl.createProgram();
  5820. const vertexShader = gl.createShader(gl.VERTEX_SHADER);
  5821. const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
  5822. // compile vertex shader
  5823. gl.shaderSource(vertexShader, vertexShaderSource);
  5824. gl.compileShader(vertexShader);
  5825. gl.attachShader(program, vertexShader);
  5826. // compile fragment shader
  5827. gl.shaderSource(fragmentShader, fragmentShaderSource);
  5828. gl.compileShader(fragmentShader);
  5829. gl.attachShader(program, fragmentShader);
  5830. // link program
  5831. gl.linkProgram(program);
  5832. gl.validateProgram(program);
  5833. // return on success
  5834. if (gl.getProgramParameter(program, gl.LINK_STATUS)) return program;
  5835. // display an error
  5836. const errors = [gl.getShaderInfoLog(fragmentShader), gl.getShaderInfoLog(vertexShader), gl.getProgramInfoLog(program)];
  5837. gl.deleteProgram(program);
  5838. gl.deleteShader(fragmentShader);
  5839. gl.deleteShader(vertexShader);
  5840. // display error
  5841. const spaces = i => Math.max(0, 2 - Math.floor(Math.log10(i)));
  5842. const col = k => new Array(spaces(k)).fill(' ').join('') + k + '. ';
  5843. const source = errors[0] ? fragmentShaderSource : vertexShaderSource;
  5844. const formattedSource = source.split('\n').map((line, no) => col(1 + no) + line).join('\n');
  5845. throw new utils_errors/* GLError */.wB(`\n\n---------- ERROR ----------\n\n` + errors.filter(err => err).join('\n') + `\n\n---------- SOURCE CODE ----------\n\n` + formattedSource + '\n');
  5846. }
  5847. }
  5848. // ============================================================================
  5849. // HELPERS
  5850. // ============================================================================
  5851. /**
  5852. * Configure and store the VAO and the VBOs
  5853. * @param {WebGL2RenderingContext} gl
  5854. * @param {LocationOfAttributes} location
  5855. * @returns {ProgramGeometry}
  5856. *
  5857. * @typedef {Object} LocationOfAttributes
  5858. * @property {number} position
  5859. * @property {number} texCoord
  5860. *
  5861. * @typedef {Object} BufferOfAttributes
  5862. * @property {WebGLBuffer} position
  5863. * @property {WebGLBuffer} texCoord
  5864. */
  5865. function ProgramGeometry(gl, location) {
  5866. /** @type {WebGLVertexArrayObject} Vertex Array Object */
  5867. this.vao = gl.createVertexArray();
  5868. /** @type {BufferOfAttributes} Vertex Buffer Objects */
  5869. this.vbo = Object.freeze({
  5870. position: gl.createBuffer(),
  5871. texCoord: gl.createBuffer()
  5872. });
  5873. /** @type {WebGL2RenderingContext} */
  5874. this._gl = gl;
  5875. // bind the VAO
  5876. gl.bindVertexArray(this.vao);
  5877. // set the position attribute
  5878. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.position);
  5879. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  5880. // clip coordinates (CCW)
  5881. -1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), gl.STATIC_DRAW);
  5882. gl.enableVertexAttribArray(location.position);
  5883. gl.vertexAttribPointer(location.position,
  5884. // attribute location
  5885. 2,
  5886. // 2 components per vertex (x,y)
  5887. gl.FLOAT,
  5888. // type
  5889. false,
  5890. // don't normalize
  5891. 0,
  5892. // default stride (tightly packed)
  5893. 0); // offset
  5894. // set the texCoord attribute
  5895. gl.bindBuffer(gl.ARRAY_BUFFER, this.vbo.texCoord);
  5896. gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
  5897. // texture coordinates (CCW)
  5898. 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW);
  5899. gl.enableVertexAttribArray(location.texCoord);
  5900. gl.vertexAttribPointer(location.texCoord,
  5901. // attribute location
  5902. 2,
  5903. // 2 components per vertex (x,y)
  5904. gl.FLOAT,
  5905. // type
  5906. false,
  5907. // don't normalize
  5908. 0,
  5909. // default stride (tightly packed)
  5910. 0); // offset
  5911. // unbind
  5912. gl.bindBuffer(gl.ARRAY_BUFFER, null);
  5913. gl.bindVertexArray(null);
  5914. // done!
  5915. return Object.freeze(this);
  5916. }
  5917. /**
  5918. * Releases the internal resources
  5919. * @returns {null}
  5920. */
  5921. ProgramGeometry.prototype.release = function () {
  5922. const gl = this._gl;
  5923. gl.deleteVertexArray(this.vao);
  5924. gl.deleteBuffer(this.vbo.position);
  5925. gl.deleteBuffer(this.vbo.texCoord);
  5926. return null;
  5927. };
  5928. /**
  5929. * Helper class for storing data in GLSL uniform variables
  5930. * @param {string} type
  5931. * @param {WebGLUniformLocation} location
  5932. */
  5933. function UniformVariable(type, location) {
  5934. /** @type {string} GLSL data type */
  5935. this.type = String(type);
  5936. if (!Object.prototype.hasOwnProperty.call(UNIFORM_SETTERS, this.type)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported uniform type: ${this.type}`);
  5937. /** @type {WebGLUniformLocation} uniform location in a WebGL program */
  5938. this.location = location;
  5939. /** @type {string} setter function */
  5940. this.setter = UNIFORM_SETTERS[this.type];
  5941. const n = Number(this.setter.match(/^uniform(Matrix)?(\d)/)[2]) | 0;
  5942. /** @type {number} is the uniform a scalar (0), a vector (1) or a matrix (2)? */
  5943. this.dim = this.type.startsWith('mat') ? 2 : this.type.indexOf('vec') >= 0 ? 1 : 0;
  5944. /** @type {number} required number of scalars */
  5945. this.length = this.dim == 2 ? n * n : n;
  5946. /** @type {SpeedyProgramUniformValue|null} cached value */
  5947. this._value = null;
  5948. }
  5949. /**
  5950. * Set the value of a uniform variable
  5951. * @param {WebGL2RenderingContext} gl
  5952. * @param {SpeedyProgramUniformValue} value use column-major format for matrices
  5953. * @param {number} [texNo] current texture index
  5954. * @returns {number} new texture index
  5955. */
  5956. UniformVariable.prototype.setValue = function (gl, value, texNo = -1) {
  5957. const setValue = /** @type {Function} */gl[this.setter];
  5958. // check uniform type
  5959. if (typeof value === 'object' && this.type.endsWith('sampler2D')) {
  5960. // set texture
  5961. if (texNo >= gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS) throw new utils_errors/* NotSupportedError */.EM(`Can't activate texture unit ${texNo}: max is ${gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS}`);else if (Array.isArray(value)) throw new utils_errors/* NotSupportedError */.EM(`Can't pass arrays of textures to shaders`);else if (value == null) throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: cannot use ${value} as an input texture`);else if (texNo < 0) throw new utils_errors/* IllegalArgumentError */.qw(`Missing texNo`);
  5962. const tex = value;
  5963. gl.activeTexture(gl.TEXTURE0 + texNo);
  5964. gl.bindTexture(gl.TEXTURE_2D, tex.glTexture);
  5965. gl.uniform1i(this.location, texNo);
  5966. texNo++;
  5967. } else if (value === this._value && typeof value !== 'object') {
  5968. // do not update the uniform if it hasn't changed
  5969. // note that value may be an array whose entries may have been updated
  5970. void 0;
  5971. } else if (typeof value === 'number' || typeof value === 'boolean') {
  5972. // set scalar value
  5973. setValue.call(gl, this.location, value);
  5974. } else if (Array.isArray(value)) {
  5975. // set vector or matrix
  5976. if (value.length === this.length) {
  5977. if (this.dim == 2) setValue.call(gl, this.location, false, value); // matrix
  5978. else setValue.call(gl, this.location, ...value); // vector
  5979. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: incorrect number of values for ${this.type}: "${value}"`);
  5980. } else throw new utils_errors/* IllegalArgumentError */.qw(`Can't run shader: unrecognized argument "${value}"`);
  5981. // cache the value
  5982. this._value = value;
  5983. // done
  5984. return texNo;
  5985. };
  5986. /**
  5987. * @typedef {object} UBOStuff
  5988. * @property {WebGLBuffer} buffer
  5989. * @property {number} blockBindingIndex "global" binding index
  5990. * @property {number} blockIndex UBO "location" in the program
  5991. * @property {ArrayBufferView|null} data user-data
  5992. */
  5993. /**
  5994. * A helper class for handling Uniform Buffer Objects (UBOs)
  5995. * @param {WebGL2RenderingContext} gl
  5996. * @param {WebGLProgram} program
  5997. */
  5998. function UBOHelper(gl, program) {
  5999. /** @type {WebGL2RenderingContext} */
  6000. this._gl = gl;
  6001. /** @type {WebGLProgram} */
  6002. this._program = program;
  6003. /** @type {number} auto-increment counter */
  6004. this._nextIndex = 0;
  6005. /** @type {Object<string,UBOStuff>} UBO dictionary indexed by uniform block names */
  6006. this._ubo = Object.create(null);
  6007. }
  6008. /**
  6009. * Set Uniform Buffer Object data
  6010. * (the buffer will be uploaded when the program is executed)
  6011. * @param {string} name uniform block name
  6012. * @param {ArrayBufferView} data
  6013. */
  6014. UBOHelper.prototype.set = function (name, data) {
  6015. const gl = this._gl;
  6016. // create UBO entry
  6017. if (this._ubo[name] === undefined) {
  6018. this._ubo[name] = {
  6019. buffer: gl.createBuffer(),
  6020. blockBindingIndex: this._nextIndex++,
  6021. blockIndex: -1,
  6022. data: null
  6023. };
  6024. }
  6025. // get UBO entry for the given block name
  6026. const ubo = this._ubo[name];
  6027. // read block index & assign binding point
  6028. if (ubo.blockIndex < 0) {
  6029. const blockIndex = gl.getUniformBlockIndex(this._program, name); // GLuint
  6030. gl.uniformBlockBinding(this._program, blockIndex, ubo.blockBindingIndex);
  6031. ubo.blockIndex = blockIndex;
  6032. }
  6033. // store the data - we'll upload it later
  6034. ubo.data = data;
  6035. };
  6036. /**
  6037. * Update UBO data
  6038. * Called when we're using the appropriate WebGLProgram
  6039. */
  6040. UBOHelper.prototype.update = function () {
  6041. const gl = this._gl;
  6042. for (const name in this._ubo) {
  6043. const ubo = this._ubo[name];
  6044. gl.bindBuffer(gl.UNIFORM_BUFFER, ubo.buffer);
  6045. gl.bufferData(gl.UNIFORM_BUFFER, ubo.data, gl.DYNAMIC_DRAW);
  6046. gl.bindBufferBase(gl.UNIFORM_BUFFER, ubo.blockBindingIndex, ubo.buffer);
  6047. gl.bindBuffer(gl.UNIFORM_BUFFER, null);
  6048. }
  6049. };
  6050. /**
  6051. * Release allocated buffers
  6052. * @returns {null}
  6053. */
  6054. UBOHelper.prototype.release = function () {
  6055. const gl = this._gl;
  6056. for (const name in this._ubo) {
  6057. const ubo = this._ubo[name];
  6058. gl.deleteBuffer(ubo.buffer);
  6059. ubo.data = null;
  6060. }
  6061. return null;
  6062. };
  6063. /**
  6064. * Generates an indexed variable name, as in variable[index]
  6065. * @param {string} variable
  6066. * @param {number} index
  6067. * @returns {string} variable[index]
  6068. */
  6069. function indexedVariable(variable, index) {
  6070. //return `${variable}[${index}]`; // no caching
  6071. // is this cache lookup really faster than string concatenation?
  6072. // what about memory consumption?
  6073. const cache = indexedVariable.cache;
  6074. let nameList = cache.get(variable);
  6075. if (nameList === undefined) cache.set(variable, nameList = []);
  6076. if (nameList[index] === undefined) nameList[index] = `${variable}[${index}]`;
  6077. return nameList[index];
  6078. }
  6079. /** @type {Map<string,string[]>} cached argument names */
  6080. indexedVariable.cache = new Map(); // Object.create(null)
  6081. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-group.js
  6082. /*
  6083. * speedy-vision.js
  6084. * GPU-accelerated Computer Vision for JavaScript
  6085. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6086. *
  6087. * Licensed under the Apache License, Version 2.0 (the "License");
  6088. * you may not use this file except in compliance with the License.
  6089. * You may obtain a copy of the License at
  6090. *
  6091. * http://www.apache.org/licenses/LICENSE-2.0
  6092. *
  6093. * Unless required by applicable law or agreed to in writing, software
  6094. * distributed under the License is distributed on an "AS IS" BASIS,
  6095. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6096. * See the License for the specific language governing permissions and
  6097. * limitations under the License.
  6098. *
  6099. * speedy-program-group.js
  6100. * An abstract group of programs that run on the GPU
  6101. */
  6102. /** @typedef {import('./speedy-program').SpeedyProgramOptions} SpeedyProgramOptions */
  6103. /**
  6104. * @typedef {object} SpeedyProgramHelpers
  6105. * @property {function(): SpeedyProgramOptions} usesPingpongRendering
  6106. * @property {function(): SpeedyProgramOptions} rendersToCanvas
  6107. */
  6108. /** @const {SpeedyProgramHelpers} Program settings generator */
  6109. const PROGRAM_HELPERS = Object.freeze({
  6110. /**
  6111. * Pingpong Rendering: the output texture of a
  6112. * program cannot be used as an input to itself.
  6113. * This is a convenient helper in these situations
  6114. * @returns {SpeedyProgramOptions}
  6115. */
  6116. usesPingpongRendering() {
  6117. return {
  6118. pingpong: true
  6119. };
  6120. },
  6121. /**
  6122. * Render to canvas
  6123. * Use it when we're supposed to see the texture
  6124. * @returns {SpeedyProgramOptions}
  6125. */
  6126. rendersToCanvas() {
  6127. return {
  6128. renderToTexture: false
  6129. };
  6130. }
  6131. });
  6132. /**
  6133. * SpeedyProgramGroup
  6134. * A semantically correlated group
  6135. * of programs that run on the GPU
  6136. * @abstract
  6137. */
  6138. class SpeedyProgramGroup {
  6139. /**
  6140. * Class constructor
  6141. * @protected
  6142. * @param {SpeedyGPU} gpu
  6143. */
  6144. constructor(gpu) {
  6145. /** @type {SpeedyGPU} GPU-accelerated routines */
  6146. this._gpu = gpu;
  6147. /** @type {SpeedyProgram[]} the list of all programs that belong to this group */
  6148. this._programs = [];
  6149. }
  6150. /**
  6151. * Declare a program
  6152. * @protected
  6153. * @param {string} name Program name
  6154. * @param {ShaderDeclarationBuilder} builder Builder of a ShaderDeclaration
  6155. * @param {SpeedyProgramOptions} [options] Program settings
  6156. * @returns {this}
  6157. */
  6158. declare(name, builder, options = {}) {
  6159. // lazy instantiation of kernels
  6160. Object.defineProperty(this, name, {
  6161. get: (() => {
  6162. // Why cast a symbol to symbol?
  6163. // Suppress error TS9005: Declaration emit for this file requires using private name 'key'.
  6164. const key = /** @type {symbol} */Symbol(name);
  6165. return () => this[key] || (this[key] = this._createProgram(builder.build(), options));
  6166. })()
  6167. });
  6168. return this;
  6169. }
  6170. /**
  6171. * Neat helpers to be used when declaring programs
  6172. * @returns {SpeedyProgramHelpers}
  6173. */
  6174. get program() {
  6175. return PROGRAM_HELPERS;
  6176. }
  6177. /**
  6178. * Releases all programs from this group
  6179. * @returns {null}
  6180. */
  6181. release() {
  6182. for (let i = 0; i < this._programs.length; i++) this._programs[i].release();
  6183. return null;
  6184. }
  6185. /**
  6186. * Spawn a SpeedyProgram
  6187. * @param {ShaderDeclaration} shaderdecl Shader declaration
  6188. * @param {SpeedyProgramOptions} [options] Program settings
  6189. * @returns {SpeedyProgram}
  6190. */
  6191. _createProgram(shaderdecl, options = {}) {
  6192. const program = new SpeedyProgram(this._gpu.gl, shaderdecl, options);
  6193. this._programs.push(program);
  6194. return program;
  6195. }
  6196. }
  6197. ;// CONCATENATED MODULE: ./src/gpu/programs/utils.js
  6198. /*
  6199. * speedy-vision.js
  6200. * GPU-accelerated Computer Vision for JavaScript
  6201. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6202. *
  6203. * Licensed under the Apache License, Version 2.0 (the "License");
  6204. * you may not use this file except in compliance with the License.
  6205. * You may obtain a copy of the License at
  6206. *
  6207. * http://www.apache.org/licenses/LICENSE-2.0
  6208. *
  6209. * Unless required by applicable law or agreed to in writing, software
  6210. * distributed under the License is distributed on an "AS IS" BASIS,
  6211. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6212. * See the License for the specific language governing permissions and
  6213. * limitations under the License.
  6214. *
  6215. * utils.js
  6216. * GPU utilities
  6217. */
  6218. //
  6219. // Shaders
  6220. //
  6221. // Copy image
  6222. const copy = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl').withArguments('image');
  6223. // Copy keypoints
  6224. const copyKeypoints = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
  6225. 'TYPE': 1
  6226. }).withArguments('image');
  6227. // Copy 2D vectors
  6228. const copy2DVectors = (0,shader_declaration/* importShader */.bf)('utils/copy-raster.glsl').withDefines({
  6229. 'TYPE': 2
  6230. }).withArguments('image');
  6231. // Flip y-axis for output
  6232. const flipY = (0,shader_declaration/* importShader */.bf)('utils/copy.glsl', 'utils/flip-y.vs.glsl').withArguments('image');
  6233. // Fill image with a constant
  6234. const fill = (0,shader_declaration/* importShader */.bf)('utils/fill.glsl').withArguments('value');
  6235. // Fill zero or more color components of the input image with a constant value
  6236. const fillComponents = (0,shader_declaration/* importShader */.bf)('utils/fill-components.glsl').withArguments('image', 'pixelComponents', 'value');
  6237. // Copy the src component of src to zero or more color components of a copy of dest
  6238. const copyComponents = (0,shader_declaration/* importShader */.bf)('utils/copy-components.glsl').withArguments('dest', 'src', 'destComponents', 'srcComponentId');
  6239. // Scan the entire image and find the minimum & maximum pixel intensity
  6240. const scanMinMax2D = (0,shader_declaration/* importShader */.bf)('utils/scan-minmax2d.glsl').withArguments('image', 'iterationNumber');
  6241. // Compute the partial derivatives of an image
  6242. const sobelDerivatives = (0,shader_declaration/* importShader */.bf)('utils/sobel-derivatives.glsl', 'utils/sobel-derivatives.vs.glsl').withArguments('pyramid', 'lod');
  6243. /**
  6244. * SpeedyProgramGroupUtils
  6245. * Utility operations
  6246. */
  6247. class SpeedyProgramGroupUtils extends SpeedyProgramGroup {
  6248. /**
  6249. * Class constructor
  6250. * @param {SpeedyGPU} gpu
  6251. */
  6252. constructor(gpu) {
  6253. super(gpu);
  6254. this
  6255. // render to the canvas
  6256. .declare('renderToCanvas', flipY, Object.assign({}, this.program.rendersToCanvas()))
  6257. // copy image
  6258. .declare('copy', copy)
  6259. // copy keypoints
  6260. .declare('copyKeypoints', copyKeypoints)
  6261. // copy 2D vectors
  6262. .declare('copy2DVectors', copy2DVectors)
  6263. // Fill image with a constant
  6264. .declare('fill', fill)
  6265. // Fill zero or more color components of the input image with a constant value
  6266. .declare('fillComponents', fillComponents)
  6267. // Copy the src component of src to zero or more color components of a copy of dest
  6268. .declare('copyComponents', copyComponents)
  6269. // find minimum & maximum pixel intensity
  6270. .declare('scanMinMax2D', scanMinMax2D, Object.assign({}, this.program.usesPingpongRendering()))
  6271. // Compute the partial derivatives of an image
  6272. .declare('sobelDerivatives', sobelDerivatives);
  6273. }
  6274. }
  6275. // EXTERNAL MODULE: ./src/gpu/shaders/filters/convolution.js
  6276. var convolution = __nested_webpack_require_314174__(1672);
  6277. ;// CONCATENATED MODULE: ./src/gpu/programs/filters.js
  6278. /*
  6279. * speedy-vision.js
  6280. * GPU-accelerated Computer Vision for JavaScript
  6281. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6282. *
  6283. * Licensed under the Apache License, Version 2.0 (the "License");
  6284. * you may not use this file except in compliance with the License.
  6285. * You may obtain a copy of the License at
  6286. *
  6287. * http://www.apache.org/licenses/LICENSE-2.0
  6288. *
  6289. * Unless required by applicable law or agreed to in writing, software
  6290. * distributed under the License is distributed on an "AS IS" BASIS,
  6291. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6292. * See the License for the specific language governing permissions and
  6293. * limitations under the License.
  6294. *
  6295. * filters.js
  6296. * Image filtering on the GPU
  6297. */
  6298. //
  6299. // Shaders
  6300. //
  6301. // Convert to greyscale
  6302. const rgb2grey = (0,shader_declaration/* importShader */.bf)('filters/rgb2grey.glsl').withArguments('image');
  6303. // Convolution
  6304. const filters_convolution = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution2d.glsl').withDefines({
  6305. 'KERNEL_SIZE_SQUARED': ksize * ksize
  6306. }).withArguments('image', 'kernel'), obj), {});
  6307. // Separable convolution
  6308. const convolutionX = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
  6309. 'KERNEL_SIZE': ksize,
  6310. 'AXIS': 0
  6311. }).withArguments('image', 'kernel'), obj), {});
  6312. const convolutionY = [3, 5, 7, 9, 11, 13, 15].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/convolution1d.glsl').withDefines({
  6313. 'KERNEL_SIZE': ksize,
  6314. 'AXIS': 1
  6315. }).withArguments('image', 'kernel'), obj), {});
  6316. // Median filter
  6317. const median = [3, 5, 7].reduce((obj, ksize) => (obj[ksize] = (0,shader_declaration/* importShader */.bf)('filters/fast-median.glsl').withDefines({
  6318. 'KERNEL_SIZE': ksize
  6319. }).withArguments('image'), obj), {});
  6320. // Normalize image
  6321. const normalizeGreyscale = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
  6322. 'GREYSCALE': 1
  6323. }).withArguments('minmax2d', 'minValue', 'maxValue');
  6324. const normalizeColored = (0,shader_declaration/* importShader */.bf)('filters/normalize-image.glsl').withDefines({
  6325. 'GREYSCALE': 0
  6326. }).withArguments('minmax2dRGB', 'minValue', 'maxValue');
  6327. // Nightvision
  6328. const nightvision = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
  6329. 'GREYSCALE': 0
  6330. }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6331. const nightvisionGreyscale = (0,shader_declaration/* importShader */.bf)('filters/nightvision.glsl').withDefines({
  6332. 'GREYSCALE': 1
  6333. }).withArguments('image', 'illuminationMap', 'gain', 'offset', 'decay');
  6334. //
  6335. // Utilities
  6336. //
  6337. // Handy conversion for Gaussian filters
  6338. // (symmetric kernel, approx. zero after 3*sigma)
  6339. const ksize2sigma = ksize => Math.max(1.0, ksize / 6.0);
  6340. // Generate a 1D Gaussian kernel
  6341. const gaussian = ksize => utils/* Utils */.A.gaussianKernel(ksize2sigma(ksize), ksize);
  6342. // Generate a 1D Box filter
  6343. const box = ksize => new Array(ksize).fill(1.0 / ksize);
  6344. /**
  6345. * SpeedyProgramGroupFilters
  6346. * Image filtering
  6347. */
  6348. class SpeedyProgramGroupFilters extends SpeedyProgramGroup {
  6349. /**
  6350. * Class constructor
  6351. * @param {SpeedyGPU} gpu
  6352. */
  6353. constructor(gpu) {
  6354. super(gpu);
  6355. this
  6356. // convert to greyscale
  6357. .declare('rgb2grey', rgb2grey)
  6358. // median filters
  6359. .declare('median3', median[3]) // 3x3 window
  6360. .declare('median5', median[5]) // 5x5 window
  6361. .declare('median7', median[7]) // 7x7 window
  6362. // 2D convolution
  6363. .declare('convolution3', filters_convolution[3]) // 3x3 kernel
  6364. .declare('convolution5', filters_convolution[5]) // 5x5 kernel
  6365. .declare('convolution7', filters_convolution[7]) // 7x7 kernel
  6366. // 1D separable convolution
  6367. .declare('convolution3x', convolutionX[3]) // 1x3 kernel
  6368. .declare('convolution3y', convolutionY[3]) // 3x1 kernel
  6369. .declare('convolution5x', convolutionX[5]) // 1x5 kernel
  6370. .declare('convolution5y', convolutionY[5]) // 5x1 kernel
  6371. .declare('convolution7x', convolutionX[7]).declare('convolution7y', convolutionY[7]).declare('convolution9x', convolutionX[9]).declare('convolution9y', convolutionY[9]).declare('convolution11x', convolutionX[11]).declare('convolution11y', convolutionY[11]).declare('convolution13x', convolutionX[13]).declare('convolution13y', convolutionY[13]).declare('convolution15x', convolutionX[15]).declare('convolution15y', convolutionY[15])
  6372. // normalize image
  6373. .declare('normalizeGreyscale', normalizeGreyscale).declare('normalizeColored', normalizeColored)
  6374. // nightvision
  6375. .declare('nightvision', nightvision).declare('nightvisionGreyscale', nightvisionGreyscale).declare('illuminationMapLoX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapLoY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 31))).declare('illuminationMapX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 63))).declare('illuminationMapHiX', (0,convolution.convX)(utils/* Utils */.A.gaussianKernel(80, 255))).declare('illuminationMapHiY', (0,convolution.convY)(utils/* Utils */.A.gaussianKernel(80, 255)))
  6376. // gaussian: separable kernels
  6377. // see also: http://dev.theomader.com/gaussian-kernel-calculator/
  6378. .declare('gaussian3x', (0,convolution.convX)([0.25, 0.5, 0.25])) // sigma ~ 1.0
  6379. .declare('gaussian3y', (0,convolution.convY)([0.25, 0.5, 0.25])).declare('gaussian5x', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])) // sigma ~ 1.0
  6380. .declare('gaussian5y', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('gaussian7x', (0,convolution.convX)(gaussian(7))).declare('gaussian7y', (0,convolution.convY)(gaussian(7))).declare('gaussian9x', (0,convolution.convX)(gaussian(9))).declare('gaussian9y', (0,convolution.convY)(gaussian(9))).declare('gaussian11x', (0,convolution.convX)(gaussian(11))).declare('gaussian11y', (0,convolution.convY)(gaussian(11)))
  6381. // box filter: separable kernels
  6382. .declare('box3x', (0,convolution.convX)(box(3))).declare('box3y', (0,convolution.convY)(box(3))).declare('box5x', (0,convolution.convX)(box(5))).declare('box5y', (0,convolution.convY)(box(5))).declare('box7x', (0,convolution.convX)(box(7))).declare('box7y', (0,convolution.convY)(box(7))).declare('box9x', (0,convolution.convX)(box(9))).declare('box9y', (0,convolution.convY)(box(9))).declare('box11x', (0,convolution.convX)(box(11))).declare('box11y', (0,convolution.convY)(box(11)));
  6383. }
  6384. }
  6385. // EXTERNAL MODULE: ./src/core/speedy-namespace.js
  6386. var speedy_namespace = __nested_webpack_require_314174__(6634);
  6387. ;// CONCATENATED MODULE: ./src/gpu/speedy-descriptordb.js
  6388. /*
  6389. * speedy-vision.js
  6390. * GPU-accelerated Computer Vision for JavaScript
  6391. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6392. *
  6393. * Licensed under the Apache License, Version 2.0 (the "License");
  6394. * you may not use this file except in compliance with the License.
  6395. * You may obtain a copy of the License at
  6396. *
  6397. * http://www.apache.org/licenses/LICENSE-2.0
  6398. *
  6399. * Unless required by applicable law or agreed to in writing, software
  6400. * distributed under the License is distributed on an "AS IS" BASIS,
  6401. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6402. * See the License for the specific language governing permissions and
  6403. * limitations under the License.
  6404. *
  6405. * speedy-descriptordb.js
  6406. * A database of binary descriptors in video memory
  6407. */
  6408. //
  6409. // A database of binary descriptors is a texture that stores
  6410. // a set of (descriptor: uint8_t[]) entries.
  6411. //
  6412. /** @type {number} we use RGBA8 textures to store the descriptors */
  6413. const DESCRIPTORDB_BYTESPERPIXEL = 4;
  6414. /** @type {number} texture size goes up to 16 MB */
  6415. const DESCRIPTORDB_MAXLOG2STRIDE = 11; // 2048x2048 RGBA8 textures are guaranteed to be available in WebGL2 (where is the source of this?)
  6416. /**
  6417. * Utility for generating a database of binary descriptors in video memory
  6418. */
  6419. class SpeedyDescriptorDB extends speedy_namespace/* SpeedyNamespace */.Q {
  6420. /**
  6421. * Create a database of binary descriptors
  6422. * @param {SpeedyTexture} texture output texture
  6423. * @param {Uint8Array[]} descriptors binary descriptors
  6424. * @param {number} descriptorSize in bytes, a multiple of 4
  6425. * @returns {SpeedyTexture} texture
  6426. */
  6427. static create(texture, descriptors, descriptorSize) {
  6428. utils/* Utils */.A.assert(descriptorSize % DESCRIPTORDB_BYTESPERPIXEL == 0, `Invalid descriptorSize: ${descriptorSize}`);
  6429. const numberOfDescriptors = descriptors.length;
  6430. const pixelsPerDescriptor = descriptorSize / DESCRIPTORDB_BYTESPERPIXEL;
  6431. // find an appropriate texture size
  6432. const n = Math.log2(pixelsPerDescriptor * Math.max(numberOfDescriptors, 1)) / 2;
  6433. const log2stride = Math.min(DESCRIPTORDB_MAXLOG2STRIDE, Math.ceil(n));
  6434. // setup texture parameters
  6435. const stride = 1 << log2stride;
  6436. const width = stride,
  6437. height = stride; // we use powers-of-two
  6438. // are we within storage capacity?
  6439. const capacity = width * height / pixelsPerDescriptor;
  6440. if (numberOfDescriptors > capacity) throw new utils_errors/* NotSupportedError */.EM(`The capacity of the descriptorDB (${capacity} for ${descriptorSize * 8}-bit descriptors) has been exceeded`);
  6441. // create texture data
  6442. const data = new Uint8Array(width * height * DESCRIPTORDB_BYTESPERPIXEL);
  6443. for (let i = 0; i < numberOfDescriptors; i++) {
  6444. const byteOffset = i * descriptorSize;
  6445. const descriptor = descriptors[i];
  6446. // validate input
  6447. utils/* Utils */.A.assert(descriptor.byteLength === descriptorSize);
  6448. utils/* Utils */.A.assert(byteOffset + descriptorSize <= data.byteLength);
  6449. // write data
  6450. data.set(descriptor, byteOffset);
  6451. }
  6452. // log data for further study
  6453. const MEGABYTE = 1048576;
  6454. const totalSize = numberOfDescriptors * descriptorSize;
  6455. utils/* Utils */.A.log(`Creating a ${width}x${height} database of ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors ` + `(total size: ${(totalSize / MEGABYTE).toFixed(2)} MB)`);
  6456. // upload to the texture
  6457. texture.resize(width, height);
  6458. texture.upload(data);
  6459. return texture;
  6460. }
  6461. }
  6462. ;// CONCATENATED MODULE: ./src/gpu/speedy-lsh.js
  6463. /*
  6464. * speedy-vision.js
  6465. * GPU-accelerated Computer Vision for JavaScript
  6466. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6467. *
  6468. * Licensed under the Apache License, Version 2.0 (the "License");
  6469. * you may not use this file except in compliance with the License.
  6470. * You may obtain a copy of the License at
  6471. *
  6472. * http://www.apache.org/licenses/LICENSE-2.0
  6473. *
  6474. * Unless required by applicable law or agreed to in writing, software
  6475. * distributed under the License is distributed on an "AS IS" BASIS,
  6476. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6477. * See the License for the specific language governing permissions and
  6478. * limitations under the License.
  6479. *
  6480. * speedy-lsh.js
  6481. * GPU-based LSH tables for fast matching of binary descriptors
  6482. */
  6483. /*
  6484. * ALE'S GPU-BASED LSH FOR APPROXIMATE KNN MATCHING
  6485. * ------------------------------------------------
  6486. *
  6487. * Here is my variant of Locality Sensitive Hashing for GPU-based KNN matching!
  6488. * Indices of keypoint descriptors are stored in several tables, each with many
  6489. * buckets of fixed capacity. In a nutshell, I create a data structure of fixed
  6490. * size to match the keypoints.
  6491. *
  6492. * Buckets in video memory may get full. Wouldn't it be cool if we could use a
  6493. * probabilistic approach to let us work within their storage capacity?
  6494. *
  6495. * Let there be n buckets in a table, each with storage capacity c (holding
  6496. * up to c elements). Buckets are numbered from 0 to n-1.
  6497. *
  6498. * We pick uniformly a random bucket to store a new element in the table. Let
  6499. * X be the chosen bucket. The probability that we'll store the new element in
  6500. * any particular bucket k is:
  6501. *
  6502. * P(X = k) = 1/n (k = 0, 1, 2, ... n-1)
  6503. *
  6504. * On average, each new element stored in the table inserts 1/n of an element
  6505. * in each bucket. If we add m new elements to the table, each bucket receives
  6506. * m/n elements, on average(*).
  6507. *
  6508. * (*) for all k, define the Ik random variable as 1 if X = k and 0 otherwise.
  6509. * It follows that the expected value of Ik, E(Ik), is 1/n for all k. In
  6510. * addition, the expected value of (m Ik) is m * E(ik) = m/n.
  6511. *
  6512. * Now let Yi be the number of elements inserted in bucket i in m additions to
  6513. * the table. We model Yi as Poisson(m/n), since on average, m additions to
  6514. * the table result in m/n new elements being inserted in bucket i. Buckets
  6515. * are picked independently. Hence, for all i, the probability that we insert
  6516. * q elements in bucket i in m additions to the table is:
  6517. *
  6518. * P(Yi = q) = (m/n)^q * exp(-m/n) / q! (q = 0, 1, 2...)
  6519. *
  6520. * Given that each bucket has storage capacity c, we require Yi <= c with a
  6521. * high probability p (say, p = 0.99). This means that, in m additions, we
  6522. * don't want to exceed the capacity c with high probability. So, let us find
  6523. * a (large) value of m such that:
  6524. *
  6525. * P(Yi <= c) >= p
  6526. *
  6527. * Sounds good! We can find the largest matching m using binary search.
  6528. *
  6529. * I don't think we need to enforce a high probability that ALL buckets stay
  6530. * within their capacity - n is large, we need to use the available space, and
  6531. * we have multiple tables anyway.
  6532. *
  6533. * In practice, the assumption that buckets are picked uniformly doesn't hold:
  6534. * keypoints that are nearby tend to have similar descriptors and buckets are
  6535. * picked according to those descriptors. Still, this model works well enough
  6536. * in practice and it is simple! That's what I like about it!
  6537. *
  6538. * ... now, how I actually do the matching is the theme of the next episode!
  6539. */
  6540. /** @type {number} Default number of tables in a LSH data structure */
  6541. const LSH_DEFAULT_NUMBER_OF_TABLES = 8;
  6542. /** @type {number} Default number of bits of a hash */
  6543. const LSH_DEFAULT_HASH_SIZE = 15;
  6544. /** @type {number[]} Acceptable number of tables for a LSH data structure */
  6545. const LSH_ACCEPTABLE_NUMBER_OF_TABLES = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32];
  6546. /** @type {number[]} Acceptable values for hashSize, in bits */
  6547. const LSH_ACCEPTABLE_HASH_SIZES = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20];
  6548. /** @type {number[]} Acceptable sizes for keypoint descriptors, in bytes */
  6549. const LSH_ACCEPTABLE_DESCRIPTOR_SIZES = [32, 64];
  6550. /**
  6551. * @typedef {Object} LSHProfile LSH profile
  6552. * @property {string} name name of the profile
  6553. * @property {number} capacity maximum number of keypoints that can be stored in such a table
  6554. * @property {number} hashSize number of bits in a keypoint descriptor hash (at most 16)
  6555. * @property {number} tableCount number of tables, preferably a power of 2 (at most 16)
  6556. * @property {number} bucketCapacity maximum number of entries of a bucket of a table
  6557. */
  6558. /** @type {function(number,number,number):LSHProfile[]|null} generate LSH profiles sorted by increasing capacity */
  6559. const generateLSHProfiles = (t, h, p) => !LSH_ACCEPTABLE_HASH_SIZES.includes(h) || !LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(t) ? null : [{
  6560. name: 'x-small',
  6561. bucketCapacity: 1,
  6562. tableCount: t,
  6563. hashSize: h,
  6564. capacity: findTableCapacity(h, 1, p)
  6565. }, {
  6566. name: 'small',
  6567. bucketCapacity: 2,
  6568. tableCount: t,
  6569. hashSize: h,
  6570. capacity: findTableCapacity(h, 2, p)
  6571. }, {
  6572. name: 'small-plus',
  6573. bucketCapacity: 3,
  6574. tableCount: t,
  6575. hashSize: h,
  6576. capacity: findTableCapacity(h, 3, p)
  6577. }, {
  6578. name: 'medium',
  6579. bucketCapacity: 4,
  6580. tableCount: t,
  6581. hashSize: h,
  6582. capacity: findTableCapacity(h, 4, p)
  6583. }, {
  6584. name: 'medium-plus',
  6585. bucketCapacity: 5,
  6586. tableCount: t,
  6587. hashSize: h,
  6588. capacity: findTableCapacity(h, 5, p)
  6589. }, {
  6590. name: 'large',
  6591. bucketCapacity: 6,
  6592. tableCount: t,
  6593. hashSize: h,
  6594. capacity: findTableCapacity(h, 6, p)
  6595. }, {
  6596. name: 'x-large',
  6597. bucketCapacity: 8,
  6598. tableCount: t,
  6599. hashSize: h,
  6600. capacity: findTableCapacity(h, 8, p)
  6601. }];
  6602. //
  6603. // LSH hash sequences: random bits in increasing order
  6604. // We generate a few sequences (one for each table) supporting up to 16 hash bits
  6605. // We pad each sequence with invalid values at the end - we want to pick any bit with equal probability
  6606. //
  6607. /** @typedef {Uint32Array} BitSequences flattened array of LSH_SEQUENCE_COUNT sequences of LSH_SEQUENCE_MAXLEN elements each - each entry represents a bit index */
  6608. /** @typedef {Object<number,BitSequences>} BitSequencesIndexedByDescriptorSize */
  6609. /** @typedef {Object<number,BitSequencesIndexedByDescriptorSize>} LSHSequences */
  6610. /** @type {number} maximum number of elements of a sequence */
  6611. const LSH_SEQUENCE_MAXLEN = Math.max(...LSH_ACCEPTABLE_HASH_SIZES);
  6612. /** @type {number} number of sequences in a BitSequences object */
  6613. const LSH_SEQUENCE_COUNT = Math.max(...LSH_ACCEPTABLE_NUMBER_OF_TABLES);
  6614. /** @type {function(BitSequences): BitSequences} Sort subsequences of random bits in ascending order */
  6615. const partitionedSort = seq => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray(i * LSH_SEQUENCE_MAXLEN, (i + 1) * LSH_SEQUENCE_MAXLEN).sort()), seq);
  6616. /** @type {function(number, BitSequences): BitSequences} Set the last p entries of the input subsequences to an invalid value */
  6617. const padSequences = (p, seq) => (utils/* Utils */.A.range(LSH_SEQUENCE_COUNT).forEach(i => seq.subarray((i + 1) * LSH_SEQUENCE_MAXLEN - p, (i + 1) * LSH_SEQUENCE_MAXLEN).fill(0xBADCAFE)), seq);
  6618. /** @type {LSHSequences} the bits we pick to form the hashes, laid out in ascending order and indexed by descriptorSize and hashSize */
  6619. const LSH_SEQUENCES = (f => LSH_ACCEPTABLE_HASH_SIZES.reduce((p, o) => (p[o] = f(o), p), {}))(h => ({
  6620. // for 256-bit descriptors
  6621. 32: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(256))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN)))),
  6622. // for 512-bit descriptors
  6623. 64: partitionedSort(padSequences(LSH_SEQUENCE_MAXLEN - h, new Uint32Array([...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512)), ...utils/* Utils */.A.shuffle(utils/* Utils */.A.range(512))].slice(0, LSH_SEQUENCE_COUNT * LSH_SEQUENCE_MAXLEN))))
  6624. }));
  6625. //
  6626. // Misc
  6627. //
  6628. /** @type {number} we use RGBA8 textures (32 bits per pixel) as storage */
  6629. const LSH_BYTESPERPIXEL = 4;
  6630. /** @type {function(number): number} next power of 2 */
  6631. const nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  6632. /**
  6633. * GPU-based LSH tables for fast matching of binary descriptors
  6634. */
  6635. class SpeedyLSH {
  6636. /**
  6637. * Constructor
  6638. * @param {SpeedyTexture} lshTables texture to be used as the set of LSH tables
  6639. * @param {SpeedyTexture} descriptorDB texture to be used as the descriptor database
  6640. * @param {Uint8Array[]} descriptors the binary descriptors you'll store (make sure you don't repeat them, otherwise they will just waste space)
  6641. * @param {number} [tableCount] number of LSH tables, preferably a power of two
  6642. * @param {number} [hashSize] number of bits of a hash of a descriptor
  6643. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6644. */
  6645. constructor(lshTables, descriptorDB, descriptors, tableCount = LSH_DEFAULT_NUMBER_OF_TABLES, hashSize = LSH_DEFAULT_HASH_SIZE, probability = 0.95) {
  6646. const descriptorCount = descriptors.length;
  6647. const descriptorSize = descriptorCount > 0 ? descriptors[0].byteLength : 0;
  6648. const lshProfiles = generateLSHProfiles(tableCount, hashSize, probability);
  6649. // validate input
  6650. utils/* Utils */.A.assert(descriptorCount > 0, `Can't build LSH tables without descriptors!`);
  6651. utils/* Utils */.A.assert(LSH_ACCEPTABLE_DESCRIPTOR_SIZES.includes(descriptorSize), `Can't build LSH tables: unacceptable descriptor size of ${descriptorSize} bytes`);
  6652. utils/* Utils */.A.assert(descriptors.findIndex(d => d.byteLength !== descriptorSize) < 0, `Can't build LSH tables: incorrectly sized descriptors. Expected ${descriptorSize} bytes for each`);
  6653. utils/* Utils */.A.assert(descriptorCount < globals.MATCH_MAX_INDEX, `Can't build LSH tables: too many descriptors (${descriptors.length})`);
  6654. utils/* Utils */.A.assert(lshProfiles != null, `Can't build LSH tables: unacceptable number of tables (${tableCount}) x hash size (${hashSize})`);
  6655. /** @type {LSHProfile} LSH profile */
  6656. this._profile = lshProfiles.find(profile => descriptorCount <= profile.capacity) || lshProfiles[lshProfiles.length - 1];
  6657. /** @type {number} descriptor size, in bytes */
  6658. this._descriptorSize = descriptorSize;
  6659. /** @type {number} number of descriptors */
  6660. this._descriptorCount = descriptorCount;
  6661. /** @type {BitSequences} bit sequences */
  6662. this._sequences = this._pickSequences(this._descriptorSize);
  6663. /** @type {SpeedyTexture} LSH tables storing indices of descriptors */
  6664. this._tables = this._createStaticTables(lshTables, this._sequences, descriptors, descriptorSize);
  6665. /** @type {SpeedyTexture} a storage of descriptors */
  6666. this._descriptorDB = SpeedyDescriptorDB.create(descriptorDB, descriptors, descriptorSize);
  6667. }
  6668. /**
  6669. * Descriptor size, in bytes
  6670. * @returns {number}
  6671. */
  6672. get descriptorSize() {
  6673. return this._descriptorSize;
  6674. }
  6675. /**
  6676. * Number of descriptors stored in this LSH data structure
  6677. * @returns {number}
  6678. */
  6679. get descriptorCount() {
  6680. return this._descriptorCount;
  6681. }
  6682. /**
  6683. * LSH bit sequences
  6684. * @returns {BitSequences}
  6685. */
  6686. get sequences() {
  6687. return this._sequences;
  6688. }
  6689. /**
  6690. * Number of bits that make a hash
  6691. * @returns {number}
  6692. */
  6693. get hashSize() {
  6694. return this._profile.hashSize;
  6695. }
  6696. /**
  6697. * Maximum number of descriptors that can be stored in a bucket of a table
  6698. * @returns {number}
  6699. */
  6700. get bucketCapacity() {
  6701. return this._profile.bucketCapacity;
  6702. }
  6703. /**
  6704. * How many buckets per table do we have?
  6705. * @returns {number}
  6706. */
  6707. get bucketsPerTable() {
  6708. return 1 << this._profile.hashSize;
  6709. }
  6710. /**
  6711. * Number of LSH tables
  6712. * @returns {number}
  6713. */
  6714. get tableCount() {
  6715. return this._profile.tableCount;
  6716. }
  6717. /**
  6718. * Size of one LSH table, in bytes
  6719. * @returns {number}
  6720. */
  6721. get tableSize() {
  6722. return this.bucketsPerTable * this.bucketCapacity * LSH_BYTESPERPIXEL;
  6723. }
  6724. /**
  6725. * Size of all LSH tables combined, in bytes
  6726. * @returns {number}
  6727. */
  6728. get totalSize() {
  6729. // actually, the total memory in VRAM may be a bit larger than
  6730. // this value, depending on the actual size of the texture
  6731. return this.tableCount * this.tableSize;
  6732. }
  6733. /**
  6734. * LSH tables texture
  6735. * @returns {SpeedyDrawableTexture}
  6736. */
  6737. get tables() {
  6738. return this._tables;
  6739. }
  6740. /**
  6741. * A collection of descriptors
  6742. * @returns {SpeedyDrawableTexture}
  6743. */
  6744. get descriptorDB() {
  6745. return this._descriptorDB;
  6746. }
  6747. /**
  6748. * Pick the appropriate LSH sequences for a particular descriptor size
  6749. * @param {number} descriptorSize in bytes
  6750. * @returns {BitSequences}
  6751. */
  6752. _pickSequences(descriptorSize) {
  6753. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES, this.hashSize));
  6754. utils/* Utils */.A.assert(Object.prototype.hasOwnProperty.call(LSH_SEQUENCES[this.hashSize], descriptorSize));
  6755. return LSH_SEQUENCES[this.hashSize][descriptorSize];
  6756. }
  6757. /**
  6758. * Create LSH tables
  6759. * @param {SpeedyTexture} texture output texture
  6760. * @param {BitSequences} sequences bit sequences
  6761. * @param {Uint8Array[]} descriptors non-empty array of binary descriptors, ALL HAVING THE SAME SIZE
  6762. * @param {number} descriptorSize in bytes
  6763. * @returns {SpeedyTexture} texture
  6764. */
  6765. _createStaticTables(texture, sequences, descriptors, descriptorSize) {
  6766. const END_OF_LIST = 0xFFFFFFFF;
  6767. const profileName = this._profile.name;
  6768. const tableCapacity = this._profile.capacity;
  6769. const tableCount = this.tableCount;
  6770. const bucketsPerTable = this.bucketsPerTable;
  6771. const bucketSize = this.bucketCapacity * LSH_BYTESPERPIXEL;
  6772. const hashSize = this.hashSize;
  6773. const numberOfPixels = this.tableCount * this.bucketsPerTable * this.bucketCapacity; // watch for overflow?
  6774. const textureWidth = Math.min(nextPot(Math.sqrt(numberOfPixels)), 4096); // 4096 is compatible with most devices according to MDN
  6775. const textureHeight = Math.ceil(numberOfPixels / textureWidth);
  6776. const numberOfDescriptors = descriptors.length;
  6777. // validate input
  6778. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN);
  6779. utils/* Utils */.A.assert(tableCount <= LSH_SEQUENCE_COUNT);
  6780. utils/* Utils */.A.assert(numberOfPixels <= textureWidth * textureHeight);
  6781. // log
  6782. const MEGABYTE = 1048576;
  6783. utils/* Utils */.A.log(`Building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits ` + `(${textureWidth}x${textureHeight}, with ${(this.tableSize / MEGABYTE).toFixed(2)} ` + `MB per table and total size = ${(this.totalSize / MEGABYTE).toFixed(2)} MB), `);
  6784. // warn the user if there are too many descriptors
  6785. if (numberOfDescriptors > tableCapacity) {
  6786. const exceedingPercentage = 100 * numberOfDescriptors / tableCapacity;
  6787. utils/* Utils */.A.warning(`There are too many descriptors (${numberOfDescriptors}) for a ${profileName} LSH table. That's ${exceedingPercentage.toFixed(2)}% of its theoretical capacity. Consider increasing the hashSize (currently set to ${hashSize}) or reducing the number of descriptors to avoid degradation.`);
  6788. }
  6789. // create empty LSH tables
  6790. const buffer = new ArrayBuffer(textureWidth * textureHeight * LSH_BYTESPERPIXEL);
  6791. const bytes = new Uint8Array(buffer).fill(0xFF);
  6792. const data = new DataView(buffer);
  6793. // shuffle the descriptors...
  6794. // it seems like a good idea to handle collisions of similar descriptors,
  6795. // which may be located next to each other in the array
  6796. const permutation = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(numberOfDescriptors));
  6797. // for each descriptor
  6798. // do everything in little-endian format!
  6799. const numberOfDiscardedDescriptorsPerTable = new Array(tableCount).fill(0);
  6800. for (let i = 0; i < numberOfDescriptors; i++) {
  6801. const descriptorIndex = permutation[i]; //i;
  6802. const hashes = this._hashCodes(descriptors[descriptorIndex], sequences);
  6803. // for each table
  6804. for (let table = 0; table < tableCount; table++) {
  6805. // compute hash & memory addresses
  6806. const hash = hashes[table];
  6807. const tableByteOffset = table * bucketsPerTable * bucketSize;
  6808. const bucketByteOffset = tableByteOffset + hash * bucketSize;
  6809. // find the end of the list
  6810. let index = END_OF_LIST;
  6811. for (let entryByteOffset = 0; entryByteOffset < bucketSize; entryByteOffset += LSH_BYTESPERPIXEL) {
  6812. const byteOffset = bucketByteOffset + entryByteOffset;
  6813. index = data.getUint32(byteOffset, true);
  6814. // add the keypoint
  6815. if (index == END_OF_LIST) {
  6816. data.setUint32(byteOffset, descriptorIndex, true);
  6817. break;
  6818. }
  6819. }
  6820. // note: if the bucket is full, we just discard the entry :\
  6821. // we give this event a probabilistic treatment (see above),
  6822. // so it happens with low probability
  6823. if (index != END_OF_LIST) numberOfDiscardedDescriptorsPerTable[table]++;
  6824. }
  6825. }
  6826. // log data for further study
  6827. const numberOfDiscardedDescriptors = numberOfDiscardedDescriptorsPerTable.reduce((sum, val) => sum + val, 0);
  6828. const profile = numberOfDiscardedDescriptorsPerTable.map(d => 100 * d / numberOfDescriptors);
  6829. utils/* Utils */.A.log(`When building ${tableCount} ${profileName} LSH tables with ${numberOfDescriptors} ` + `${descriptorSize * 8}-bit descriptors each and hashSize = ${hashSize} bits, ` + `I got the following discard profile: ` + profile.map(x => x.toFixed(2) + '%').join(', ') + `. ` + `Average: ${(100 * numberOfDiscardedDescriptors / (tableCount * numberOfDescriptors)).toFixed(2)}%. ` + `Minimum: ${Math.min(...profile).toFixed(2)}%. ` + `Table capacity: ${tableCapacity}.`);
  6830. // upload the LSH tables to the GPU
  6831. texture.resize(textureWidth, textureHeight);
  6832. texture.upload(bytes);
  6833. return texture;
  6834. }
  6835. /**
  6836. * Pick bits from a binary descriptor
  6837. * @param {Uint8Array} descriptor a single descriptor
  6838. * @param {BitSequences} sequences flattened array of tableCount sequences of LSH_SEQUENCE_MAXLEN elements each
  6839. * @returns {number[]} hash code for each table
  6840. */
  6841. _hashCodes(descriptor, sequences) {
  6842. const tableCount = this.tableCount;
  6843. const hashSize = this.hashSize;
  6844. const bucketsPerTable = this.bucketsPerTable;
  6845. const hashes = new Array(tableCount);
  6846. //const descriptorSize = descriptor.length;
  6847. // just to be sure...
  6848. utils/* Utils */.A.assert(hashSize <= LSH_SEQUENCE_MAXLEN && sequences.length >= LSH_SEQUENCE_MAXLEN * tableCount);
  6849. // for each table
  6850. for (let table = 0; table < tableCount; table++) {
  6851. const offset = LSH_SEQUENCE_MAXLEN * table;
  6852. // pick bits [ sequences[offset] .. sequences[offset + hashSize-1] ]
  6853. let hash = 0;
  6854. for (let i = 0; i < hashSize; i++) {
  6855. let bit = sequences[offset + i];
  6856. let b = bit >>> 3;
  6857. let m = 1 << (bit & 7);
  6858. //Utils.assert(b < descriptorSize);
  6859. hash = hash << 1 | (descriptor[b] & m) != 0;
  6860. }
  6861. // validate & store
  6862. utils/* Utils */.A.assert(hash >= 0 && hash < bucketsPerTable);
  6863. hashes[table] = hash;
  6864. }
  6865. // done!
  6866. return hashes;
  6867. }
  6868. }
  6869. /**
  6870. * Compute P(X <= k), where X ~ Poisson(lambda)
  6871. * @param {number} lambda positive number
  6872. * @param {number} k non-negative integer
  6873. * @returns {number}
  6874. */
  6875. function cumulativePoisson(lambda, k) {
  6876. const exp = Math.exp(-lambda);
  6877. let sum = 1,
  6878. fat = 1,
  6879. pow = 1;
  6880. // k should be small!!!
  6881. for (let i = 1; i <= k; i++) sum += (pow *= lambda) / (fat *= i);
  6882. return sum * exp;
  6883. }
  6884. /**
  6885. * Find the maximum number of keypoint descriptors that a table can hold
  6886. * @param {number} hashSize positive integer
  6887. * @param {number} bucketCapacity positive integer
  6888. * @param {number} [probability] probability of no discard events happening in the theoretical model
  6889. * @return {number} optimal table capacity
  6890. */
  6891. function findTableCapacity(hashSize, bucketCapacity, probability = 0.99) {
  6892. const n = 1 << hashSize; // number of buckets
  6893. const c = bucketCapacity;
  6894. const p = probability;
  6895. let l = 1,
  6896. r = n * c; // watch for overflow!
  6897. let m = 0,
  6898. pm = 0;
  6899. // binary search
  6900. while (l < r) {
  6901. m = Math.floor((l + r) / 2);
  6902. pm = cumulativePoisson(m / n, c);
  6903. if (pm > p)
  6904. //if(1-pm < 1-p)
  6905. l = m + 1;else r = m;
  6906. }
  6907. return m;
  6908. }
  6909. ;// CONCATENATED MODULE: ./src/gpu/programs/keypoints.js
  6910. /*
  6911. * speedy-vision.js
  6912. * GPU-accelerated Computer Vision for JavaScript
  6913. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  6914. *
  6915. * Licensed under the Apache License, Version 2.0 (the "License");
  6916. * you may not use this file except in compliance with the License.
  6917. * You may obtain a copy of the License at
  6918. *
  6919. * http://www.apache.org/licenses/LICENSE-2.0
  6920. *
  6921. * Unless required by applicable law or agreed to in writing, software
  6922. * distributed under the License is distributed on an "AS IS" BASIS,
  6923. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  6924. * See the License for the specific language governing permissions and
  6925. * limitations under the License.
  6926. *
  6927. * keypoints.js
  6928. * Facade for various keypoint detection algorithms
  6929. */
  6930. // FAST corner detector
  6931. const fast9_16 = (0,shader_declaration/* importShader */.bf)('keypoints/fast.glsl', 'keypoints/fast.vs.glsl').withDefines({
  6932. 'FAST_TYPE': 916
  6933. }).withArguments('corners', 'pyramid', 'lod', 'threshold');
  6934. // Harris corner detector
  6935. const harris = [1, 3, 5, 7].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/harris.glsl').withDefines({
  6936. 'WINDOW_SIZE': win
  6937. }).withArguments('corners', 'pyramid', 'derivatives', 'lod', 'lodStep', 'gaussian'), obj), {});
  6938. const harrisScoreFindMax = (0,shader_declaration/* importShader */.bf)('keypoints/score-findmax.glsl').withArguments('corners', 'iterationNumber');
  6939. const harrisScoreCutoff = (0,shader_declaration/* importShader */.bf)('keypoints/harris-cutoff.glsl').withArguments('corners', 'maxScore', 'quality');
  6940. // Subpixel refinement
  6941. const subpixelQuadratic1d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6942. 'METHOD': 0
  6943. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6944. const subpixelTaylor2d = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6945. 'METHOD': 1
  6946. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6947. const subpixelBilinear = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6948. 'METHOD': 2
  6949. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6950. const subpixelBicubic = (0,shader_declaration/* importShader */.bf)('keypoints/subpixel-refinement.glsl').withDefines({
  6951. 'METHOD': 3
  6952. }).withArguments('pyramid', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxIterations', 'epsilon');
  6953. // Scale refinement
  6954. const refineScaleLoG = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
  6955. 'METHOD': 0
  6956. }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6957. const refineScaleFAST916 = (0,shader_declaration/* importShader */.bf)('keypoints/refine-scale.glsl').withDefines({
  6958. 'METHOD': 1
  6959. }).withArguments('pyramid', 'lodStep', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  6960. // Pixel allocation
  6961. const allocateDescriptors = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-descriptors.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  6962. const allocateExtra = (0,shader_declaration/* importShader */.bf)('keypoints/allocate-extra.glsl').withArguments('inputEncodedKeypoints', 'inputDescriptorSize', 'inputExtraSize', 'inputEncoderLength', 'outputDescriptorSize', 'outputExtraSize', 'outputEncoderLength');
  6963. const transferToExtra = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-to-extra.glsl').withArguments('encodedData', 'strideOfEncodedData', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6964. // ORB descriptors
  6965. const orbDescriptor = (0,shader_declaration/* importShader */.bf)('keypoints/orb-descriptor.glsl').withArguments('image', 'encodedCorners', 'extraSize', 'encoderLength');
  6966. const orbOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/orb-orientation.glsl').withArguments('image', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6967. // Non-maximum suppression
  6968. const nonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
  6969. 'MULTISCALE': 0
  6970. }).withArguments('image', 'lodStep');
  6971. const multiscaleNonMaxSuppression = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-suppression.glsl').withDefines({
  6972. 'MULTISCALE': 1
  6973. }).withArguments('image', 'lodStep');
  6974. const nonmaxSpace = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-space.glsl').withArguments('corners');
  6975. const nonmaxScale = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
  6976. 'USE_LAPLACIAN': 1
  6977. }).withArguments('corners', 'pyramid', 'pyrLaplacian', 'lodStep');
  6978. const nonmaxScaleSimple = (0,shader_declaration/* importShader */.bf)('keypoints/nonmax-scale.glsl').withDefines({
  6979. 'USE_LAPLACIAN': 0
  6980. }).withArguments('corners', 'pyramid', 'lodStep');
  6981. const laplacian = (0,shader_declaration/* importShader */.bf)('keypoints/laplacian.glsl').withArguments('corners', 'pyramid', 'lodStep', 'lodOffset');
  6982. // Keypoint tracking & optical-flow
  6983. const lk = [3, 5, 7, 9, 11, 13, 15, 17, 19, 21].reduce((obj, win) => (obj[win] = (0,shader_declaration/* importShader */.bf)('keypoints/lk.glsl').withDefines({
  6984. 'WINDOW_SIZE': win
  6985. }).withArguments('encodedFlow', 'prevKeypoints', 'nextPyramid', 'prevPyramid', 'level', 'depth', 'numberOfIterations', 'discardThreshold', 'epsilon', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {});
  6986. const transferFlow = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-flow.glsl').withArguments('encodedFlow', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  6987. // Brute-force matching
  6988. const bfMatcherInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  6989. 'ENCODE_FILTERS': 0
  6990. });
  6991. const bfMatcherInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  6992. 'ENCODE_FILTERS': 1
  6993. });
  6994. const bfMatcherTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  6995. const bfMatcher32 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
  6996. 'DESCRIPTOR_SIZE': 32,
  6997. 'NUMBER_OF_KEYPOINTS_PER_PASS': 16
  6998. }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  6999. const bfMatcher64 = (0,shader_declaration/* importShader */.bf)('keypoints/bf-knn.glsl').withDefines({
  7000. 'DESCRIPTOR_SIZE': 64,
  7001. 'NUMBER_OF_KEYPOINTS_PER_PASS': 8
  7002. }).withArguments('encodedMatches', 'encodedFilters', 'matcherLength', 'dbEncodedKeypoints', 'dbDescriptorSize', 'dbExtraSize', 'dbEncoderLength', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'passId');
  7003. // LSH-based KNN matching
  7004. const lshKnnInitCandidates = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  7005. 'ENCODE_FILTERS': 0
  7006. });
  7007. const lshKnnInitFilters = (0,shader_declaration/* importShader */.bf)('keypoints/knn-init.glsl').withDefines({
  7008. 'ENCODE_FILTERS': 1
  7009. });
  7010. const lshKnn = LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((obj, descriptorSize) => (obj[descriptorSize] = LSH_ACCEPTABLE_HASH_SIZES.reduce((obj, hashSize) => (obj[hashSize] = [0, 1, 2].reduce((obj, level) => (obj[level] = (0,shader_declaration/* importShader */.bf)('keypoints/lsh-knn.glsl').withDefines({
  7011. 'DESCRIPTOR_SIZE': descriptorSize,
  7012. 'HASH_SIZE': hashSize,
  7013. 'LEVEL': level,
  7014. 'SEQUENCE_MAXLEN': LSH_SEQUENCE_MAXLEN,
  7015. 'SEQUENCE_COUNT': LSH_SEQUENCE_COUNT
  7016. }).withArguments('candidates', 'filters', 'matcherLength', 'tables', 'descriptorDB', 'tableIndex', 'bucketCapacity', 'bucketsPerTable', 'tablesStride', 'descriptorDBStride', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength'), obj), {}), obj), {}), obj), {});
  7017. const lshKnnTransfer = (0,shader_declaration/* importShader */.bf)('keypoints/knn-transfer.glsl').withArguments('encodedMatches', 'encodedKthMatches', 'numberOfMatchesPerKeypoint', 'kthMatch');
  7018. // Keypoint sorting
  7019. const sortCreatePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7020. 'STAGE': 1
  7021. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7022. const sortMergePermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7023. 'STAGE': 2
  7024. }).withArguments('permutation', 'blockSize', 'dblLog2BlockSize');
  7025. const sortApplyPermutation = (0,shader_declaration/* importShader */.bf)('keypoints/sort-keypoints.glsl').withDefines({
  7026. 'STAGE': 3
  7027. }).withArguments('permutation', 'maxKeypoints', 'encodedKeypoints', 'descriptorSize', 'extraSize');
  7028. // Keypoint mixing
  7029. const mixKeypointsPreInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7030. 'STAGE': 1
  7031. }).withArguments('encodedKeypointsA', 'encodedKeypointsB', 'encoderLengthA', 'encoderLengthB', 'encoderCapacityA', 'encoderCapacityB', 'descriptorSize', 'extraSize', 'encoderLength');
  7032. const mixKeypointsInit = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7033. 'STAGE': 2
  7034. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7035. const mixKeypointsSort = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7036. 'STAGE': 3
  7037. }).withArguments('array', 'blockSize');
  7038. const mixKeypointsView = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7039. 'STAGE': 5
  7040. }).withArguments('array');
  7041. const mixKeypointsApply = (0,shader_declaration/* importShader */.bf)('keypoints/mix-keypoints.glsl').withDefines({
  7042. 'STAGE': 4
  7043. }).withArguments('array', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7044. // Keypoint encoding
  7045. const initLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
  7046. 'FS_OUTPUT_TYPE': 2,
  7047. 'STAGE': 1
  7048. }).withArguments('corners');
  7049. const sortLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl', 'keypoints/lookup-of-locations.vs.glsl').withDefines({
  7050. 'FS_OUTPUT_TYPE': 2,
  7051. 'FS_USE_CUSTOM_PRECISION': 1,
  7052. 'STAGE': 2
  7053. }).withArguments('lookupTable', 'blockSize', 'width', 'height');
  7054. const viewLookupTable = (0,shader_declaration/* importShader */.bf)('keypoints/lookup-of-locations.glsl').withDefines({
  7055. 'STAGE': -1
  7056. }).withArguments('lookupTable');
  7057. const encodeKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoints.glsl').withArguments('corners', 'lookupTable', 'stride', 'descriptorSize', 'extraSize', 'encoderLength', 'encoderCapacity');
  7058. const encodeKeypointSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-offsets.glsl').withArguments('corners', 'imageSize');
  7059. const encodeKeypointLongSkipOffsets = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-long-offsets.glsl').withDefines({
  7060. 'MAX_ITERATIONS': 6
  7061. }) // dependent texture reads :(
  7062. .withArguments('offsetsImage', 'imageSize');
  7063. const encodeKeypointPositions = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-positions.glsl').withArguments('offsetsImage', 'imageSize', 'passId', 'numPasses', 'keypointLimit', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7064. const encodeKeypointProperties = (0,shader_declaration/* importShader */.bf)('keypoints/encode-keypoint-properties.glsl').withArguments('corners', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7065. const encodeNullKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/encode-null-keypoints.glsl').withArguments();
  7066. const transferOrientation = (0,shader_declaration/* importShader */.bf)('keypoints/transfer-orientation.glsl').withArguments('encodedOrientations', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7067. const uploadKeypoints = (0,shader_declaration/* importShader */.bf)('keypoints/upload-keypoints.glsl').withDefines({
  7068. // UBOs can hold at least 16KB of data;
  7069. // gl.MAX_UNIFORM_BLOCK_SIZE >= 16384
  7070. // according to the GL ES 3 reference.
  7071. // Each keypoint uses 16 bytes (vec4)
  7072. 'BUFFER_SIZE': 1024 //16384 / 16
  7073. }).withArguments('encodedKeypoints', 'startIndex', 'endIndex', 'descriptorSize', 'extraSize', 'encoderLength');
  7074. // Geometric transformations
  7075. const applyHomography = (0,shader_declaration/* importShader */.bf)('keypoints/apply-homography.glsl').withArguments('homography', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7076. // Keypoint filters
  7077. const clipBorder = (0,shader_declaration/* importShader */.bf)('keypoints/clip-border.glsl').withArguments('imageWidth', 'imageHeight', 'borderTop', 'borderRight', 'borderBottom', 'borderLeft', 'encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7078. const distanceFilter = (0,shader_declaration/* importShader */.bf)('keypoints/distance-filter.glsl').withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7079. const hammingDistanceFilter32 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
  7080. 'DESCRIPTOR_SIZE': 32
  7081. }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7082. const hammingDistanceFilter64 = (0,shader_declaration/* importShader */.bf)('keypoints/hamming-distance-filter.glsl').withDefines({
  7083. 'DESCRIPTOR_SIZE': 64
  7084. }).withArguments('encodedKeypointsA', 'encoderLengthA', 'encodedKeypointsB', 'encoderLengthB', 'descriptorSize', 'extraSize', 'encoderLength', 'threshold');
  7085. // Other utilities
  7086. const shuffle = (0,shader_declaration/* importShader */.bf)('keypoints/shuffle.glsl').withDefines({
  7087. 'PERMUTATION_MAXLEN': 2048
  7088. }).withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength');
  7089. const clip = (0,shader_declaration/* importShader */.bf)('keypoints/clip.glsl').withArguments('encodedKeypoints', 'descriptorSize', 'extraSize', 'encoderLength', 'maxKeypoints');
  7090. /**
  7091. * SpeedyProgramGroupKeypoints
  7092. * Keypoint detection
  7093. */
  7094. class SpeedyProgramGroupKeypoints extends SpeedyProgramGroup {
  7095. /**
  7096. * Class constructor
  7097. * @param {SpeedyGPU} gpu
  7098. */
  7099. constructor(gpu) {
  7100. super(gpu);
  7101. this
  7102. //
  7103. // FAST corner detector
  7104. //
  7105. .declare('fast9_16', fast9_16, Object.assign({}, this.program.usesPingpongRendering()))
  7106. //
  7107. // Harris corner detector
  7108. //
  7109. .declare('harris1', harris[1], Object.assign({}, this.program.usesPingpongRendering())).declare('harris3', harris[3], Object.assign({}, this.program.usesPingpongRendering())).declare('harris5', harris[5], Object.assign({}, this.program.usesPingpongRendering())).declare('harris7', harris[7], Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreFindMax', harrisScoreFindMax, Object.assign({}, this.program.usesPingpongRendering())).declare('harrisScoreCutoff', harrisScoreCutoff)
  7110. //
  7111. // Subpixel refinement
  7112. //
  7113. .declare('subpixelQuadratic1d', subpixelQuadratic1d).declare('subpixelTaylor2d', subpixelTaylor2d).declare('subpixelBicubic', subpixelBicubic).declare('subpixelBilinear', subpixelBilinear)
  7114. //
  7115. // Scale refinement
  7116. //
  7117. .declare('refineScaleLoG', refineScaleLoG).declare('refineScaleFAST916', refineScaleFAST916)
  7118. //
  7119. // Pixel allocation
  7120. //
  7121. .declare('allocateDescriptors', allocateDescriptors).declare('allocateExtra', allocateExtra).declare('transferToExtra', transferToExtra)
  7122. //
  7123. // ORB descriptors
  7124. //
  7125. .declare('orbDescriptor', orbDescriptor).declare('orbOrientation', orbOrientation)
  7126. //
  7127. // Non-maximum suppression
  7128. //
  7129. .declare('nonmax', nonMaxSuppression).declare('pyrnonmax', multiscaleNonMaxSuppression).declare('nonmaxSpace', nonmaxSpace).declare('nonmaxScale', nonmaxScale).declare('nonmaxScaleSimple', nonmaxScaleSimple).declare('laplacian', laplacian)
  7130. //
  7131. // LK optical-flow
  7132. //
  7133. .declare('lk21', lk[21], Object.assign({}, this.program.usesPingpongRendering())).declare('lk19', lk[19], Object.assign({}, this.program.usesPingpongRendering())).declare('lk17', lk[17], Object.assign({}, this.program.usesPingpongRendering())).declare('lk15', lk[15], Object.assign({}, this.program.usesPingpongRendering())).declare('lk13', lk[13], Object.assign({}, this.program.usesPingpongRendering())).declare('lk11', lk[11], Object.assign({}, this.program.usesPingpongRendering())).declare('lk9', lk[9], Object.assign({}, this.program.usesPingpongRendering())).declare('lk7', lk[7], Object.assign({}, this.program.usesPingpongRendering())).declare('lk5', lk[5], Object.assign({}, this.program.usesPingpongRendering())).declare('lk3', lk[3], Object.assign({}, this.program.usesPingpongRendering())).declare('transferFlow', transferFlow)
  7134. //
  7135. // Brute-force KNN matching
  7136. //
  7137. .declare('bfMatcherInitCandidates', bfMatcherInitCandidates).declare('bfMatcherInitFilters', bfMatcherInitFilters).declare('bfMatcherTransfer', bfMatcherTransfer, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher32', bfMatcher32, Object.assign({}, this.program.usesPingpongRendering())).declare('bfMatcher64', bfMatcher64, Object.assign({}, this.program.usesPingpongRendering()))
  7138. //
  7139. // LSH-based KNN matching
  7140. //
  7141. .declare('lshKnnInitCandidates', lshKnnInitCandidates).declare('lshKnnInitFilters', lshKnnInitFilters).declare('lshKnnTransfer', lshKnnTransfer, Object.assign({}, this.program.usesPingpongRendering()))
  7142. //
  7143. // Keypoint sorting
  7144. //
  7145. .declare('sortCreatePermutation', sortCreatePermutation).declare('sortMergePermutation', sortMergePermutation, Object.assign({}, this.program.usesPingpongRendering())).declare('sortApplyPermutation', sortApplyPermutation)
  7146. //
  7147. // Keypoint mixing
  7148. //
  7149. .declare('mixKeypointsPreInit', mixKeypointsPreInit).declare('mixKeypointsInit', mixKeypointsInit).declare('mixKeypointsSort', mixKeypointsSort, Object.assign({}, this.program.usesPingpongRendering())).declare('mixKeypointsView', mixKeypointsView).declare('mixKeypointsApply', mixKeypointsApply)
  7150. //
  7151. // Keypoint encoders
  7152. //
  7153. .declare('encodeNullKeypoints', encodeNullKeypoints).declare('encodeKeypoints', encodeKeypoints).declare('initLookupTable', initLookupTable).declare('sortLookupTable', sortLookupTable, Object.assign({}, this.program.usesPingpongRendering())).declare('viewLookupTable', viewLookupTable).declare('encodeKeypointSkipOffsets', encodeKeypointSkipOffsets).declare('encodeKeypointLongSkipOffsets', encodeKeypointLongSkipOffsets, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointPositions', encodeKeypointPositions, Object.assign({}, this.program.usesPingpongRendering())).declare('encodeKeypointProperties', encodeKeypointProperties).declare('transferOrientation', transferOrientation).declare('uploadKeypoints', uploadKeypoints, Object.assign({}, this.program.usesPingpongRendering()))
  7154. //
  7155. // Geometric transformations
  7156. //
  7157. .declare('applyHomography', applyHomography)
  7158. //
  7159. // Keypoint filters
  7160. //
  7161. .declare('clipBorder', clipBorder).declare('distanceFilter', distanceFilter).declare('hammingDistanceFilter32', hammingDistanceFilter32).declare('hammingDistanceFilter64', hammingDistanceFilter64)
  7162. //
  7163. // Other utilities
  7164. //
  7165. .declare('shuffle', shuffle).declare('clip', clip);
  7166. //
  7167. // LSH-based KNN matching
  7168. //
  7169. for (const descriptorSize of Object.keys(lshKnn)) {
  7170. for (const hashSize of Object.keys(lshKnn[descriptorSize])) {
  7171. for (const level of Object.keys(lshKnn[descriptorSize][hashSize])) {
  7172. const name = `lshKnn${descriptorSize}h${hashSize}lv${level}`;
  7173. this.declare(name, lshKnn[descriptorSize][hashSize][level], Object.assign({}, this.program.usesPingpongRendering()));
  7174. }
  7175. }
  7176. }
  7177. }
  7178. }
  7179. ;// CONCATENATED MODULE: ./src/gpu/programs/pyramids.js
  7180. /*
  7181. * speedy-vision.js
  7182. * GPU-accelerated Computer Vision for JavaScript
  7183. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7184. *
  7185. * Licensed under the Apache License, Version 2.0 (the "License");
  7186. * you may not use this file except in compliance with the License.
  7187. * You may obtain a copy of the License at
  7188. *
  7189. * http://www.apache.org/licenses/LICENSE-2.0
  7190. *
  7191. * Unless required by applicable law or agreed to in writing, software
  7192. * distributed under the License is distributed on an "AS IS" BASIS,
  7193. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7194. * See the License for the specific language governing permissions and
  7195. * limitations under the License.
  7196. *
  7197. * pyramids.js
  7198. * Image pyramids
  7199. */
  7200. //
  7201. // Shaders
  7202. //
  7203. const upsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/upsample2.glsl').withArguments('image');
  7204. const downsample2 = (0,shader_declaration/* importShader */.bf)('pyramids/downsample2.glsl').withArguments('image');
  7205. /**
  7206. * SpeedyProgramGroupPyramids
  7207. * Image pyramids
  7208. */
  7209. class SpeedyProgramGroupPyramids extends SpeedyProgramGroup {
  7210. /**
  7211. * Class constructor
  7212. * @param {SpeedyGPU} gpu
  7213. */
  7214. constructor(gpu) {
  7215. super(gpu);
  7216. this
  7217. // upsampling & downsampling
  7218. .declare('upsample2', upsample2).declare('downsample2', downsample2)
  7219. // separable kernels for gaussian smoothing
  7220. // use [c, b, a, b, c] where a+2c = 2b and a+2b+2c = 1
  7221. // pick a = 0.4 for gaussian approximation (sigma = 1)
  7222. .declare('smoothX', (0,convolution.convX)([0.05, 0.25, 0.4, 0.25, 0.05])).declare('smoothY', (0,convolution.convY)([0.05, 0.25, 0.4, 0.25, 0.05]))
  7223. /*
  7224. .declare('reduce', conv2D([
  7225. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250,
  7226. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7227. 0.02000, 0.10000, 0.16000, 0.10000, 0.02000,
  7228. 0.01250, 0.06250, 0.10000, 0.06250, 0.01250,
  7229. 0.00250, 0.01250, 0.02000, 0.01250, 0.00250
  7230. ]))
  7231. */
  7232. // smoothing for 2x image
  7233. // same rules as above with sum(k) = 2
  7234. .declare('smoothX2', (0,convolution.convX)([0.1, 0.5, 0.8, 0.5, 0.1
  7235. // NOTE: this would saturate the image, but we apply it
  7236. // on a 2x upsampled version with lots of zero pixels
  7237. ])).declare('smoothY2', (0,convolution.convY)([0.1, 0.5, 0.8, 0.5, 0.1], 1.0 / 2.0));
  7238. }
  7239. }
  7240. ;// CONCATENATED MODULE: ./src/gpu/programs/transforms.js
  7241. /*
  7242. * speedy-vision.js
  7243. * GPU-accelerated Computer Vision for JavaScript
  7244. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7245. *
  7246. * Licensed under the Apache License, Version 2.0 (the "License");
  7247. * you may not use this file except in compliance with the License.
  7248. * You may obtain a copy of the License at
  7249. *
  7250. * http://www.apache.org/licenses/LICENSE-2.0
  7251. *
  7252. * Unless required by applicable law or agreed to in writing, software
  7253. * distributed under the License is distributed on an "AS IS" BASIS,
  7254. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7255. * See the License for the specific language governing permissions and
  7256. * limitations under the License.
  7257. *
  7258. * transforms.js
  7259. * Geometric transformations
  7260. */
  7261. //
  7262. // Shaders
  7263. //
  7264. // Perspective warp
  7265. const warpPerspective = (0,shader_declaration/* importShader */.bf)('transforms/warp-perspective.glsl').withArguments('image', 'inverseHomography');
  7266. // Resize image
  7267. const resizeNearest = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
  7268. 'INTERPOLATION_METHOD': 0 // Nearest neighbors
  7269. }).withArguments('image');
  7270. const resizeBilinear = (0,shader_declaration/* importShader */.bf)('transforms/resize.glsl').withDefines({
  7271. 'INTERPOLATION_METHOD': 1 // Bilinear interpolation
  7272. }).withArguments('image');
  7273. // Additive mix (TODO create a new program group?)
  7274. const additiveMix = (0,shader_declaration/* importShader */.bf)('transforms/additive-mix.glsl').withArguments('image0', 'image1', 'alpha', 'beta', 'gamma');
  7275. /**
  7276. * SpeedyProgramGroupTransforms
  7277. * Geometric transformations
  7278. */
  7279. class SpeedyProgramGroupTransforms extends SpeedyProgramGroup {
  7280. /**
  7281. * Class constructor
  7282. * @param {SpeedyGPU} gpu
  7283. */
  7284. constructor(gpu) {
  7285. super(gpu);
  7286. this.declare('warpPerspective', warpPerspective).declare('resizeNearest', resizeNearest).declare('resizeBilinear', resizeBilinear).declare('additiveMix', additiveMix);
  7287. }
  7288. }
  7289. ;// CONCATENATED MODULE: ./src/gpu/speedy-program-center.js
  7290. /*
  7291. * speedy-vision.js
  7292. * GPU-accelerated Computer Vision for JavaScript
  7293. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7294. *
  7295. * Licensed under the Apache License, Version 2.0 (the "License");
  7296. * you may not use this file except in compliance with the License.
  7297. * You may obtain a copy of the License at
  7298. *
  7299. * http://www.apache.org/licenses/LICENSE-2.0
  7300. *
  7301. * Unless required by applicable law or agreed to in writing, software
  7302. * distributed under the License is distributed on an "AS IS" BASIS,
  7303. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7304. * See the License for the specific language governing permissions and
  7305. * limitations under the License.
  7306. *
  7307. * speedy-program-center.js
  7308. * An access point to all programs that run on the GPU
  7309. */
  7310. /**
  7311. * An access point to all programs that run on the CPU
  7312. * All program groups can be accessed via this class
  7313. */
  7314. class SpeedyProgramCenter {
  7315. /**
  7316. * Class constructor
  7317. * @param {SpeedyGPU} gpu reference to SpeedyGPU
  7318. */
  7319. constructor(gpu) {
  7320. // Note: we instantiate the program groups lazily
  7321. /** @type {SpeedyGPU} reference to SpeedyGPU */
  7322. this._gpu = gpu;
  7323. /** @type {SpeedyProgramGroupFilters} image filters */
  7324. this._filters = null;
  7325. /** @type {SpeedyProgramGroupTransforms} geometric transformations */
  7326. this._transforms = null;
  7327. /** @type {SpeedyProgramGroupPyramids} pyramids & scale-space */
  7328. this._pyramids = null;
  7329. /** @type {SpeedyProgramGroupKeypoints} keypoint routines */
  7330. this._keypoints = null;
  7331. /** @type {SpeedyProgramGroupUtils} utility programs */
  7332. this._utils = null;
  7333. }
  7334. /**
  7335. * Image filters & convolutions
  7336. * @returns {SpeedyProgramGroupFilters}
  7337. */
  7338. get filters() {
  7339. return this._filters || (this._filters = new SpeedyProgramGroupFilters(this._gpu));
  7340. }
  7341. /**
  7342. * Geometric transformations
  7343. * @returns {SpeedyProgramGroupTransforms}
  7344. */
  7345. get transforms() {
  7346. return this._transforms || (this._transforms = new SpeedyProgramGroupTransforms(this._gpu));
  7347. }
  7348. /**
  7349. * Image pyramids & scale-space
  7350. * @returns {SpeedyProgramGroupPyramids}
  7351. */
  7352. get pyramids() {
  7353. return this._pyramids || (this._pyramids = new SpeedyProgramGroupPyramids(this._gpu));
  7354. }
  7355. /**
  7356. * Keypoint detection & description
  7357. * @returns {SpeedyProgramGroupKeypoints}
  7358. */
  7359. get keypoints() {
  7360. return this._keypoints || (this._keypoints = new SpeedyProgramGroupKeypoints(this._gpu));
  7361. }
  7362. /**
  7363. * Utility programs
  7364. * @returns {SpeedyProgramGroupUtils}
  7365. */
  7366. get utils() {
  7367. return this._utils || (this._utils = new SpeedyProgramGroupUtils(this._gpu));
  7368. }
  7369. /**
  7370. * Release all programs from all groups. You'll
  7371. * no longer be able to use any of them.
  7372. * @returns {null}
  7373. */
  7374. release() {
  7375. for (const key in this) {
  7376. if (Object.prototype.hasOwnProperty.call(this, key) && this[key] != null) {
  7377. const group = this[key];
  7378. if (group instanceof SpeedyProgramGroup) group.release();
  7379. }
  7380. }
  7381. return null;
  7382. }
  7383. }
  7384. ;// CONCATENATED MODULE: ./src/gpu/speedy-texture-pool.js
  7385. /*
  7386. * speedy-vision.js
  7387. * GPU-accelerated Computer Vision for JavaScript
  7388. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7389. *
  7390. * Licensed under the Apache License, Version 2.0 (the "License");
  7391. * you may not use this file except in compliance with the License.
  7392. * You may obtain a copy of the License at
  7393. *
  7394. * http://www.apache.org/licenses/LICENSE-2.0
  7395. *
  7396. * Unless required by applicable law or agreed to in writing, software
  7397. * distributed under the License is distributed on an "AS IS" BASIS,
  7398. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7399. * See the License for the specific language governing permissions and
  7400. * limitations under the License.
  7401. *
  7402. * speedy-texture-pool.js
  7403. * Texture pool
  7404. */
  7405. // Constants
  7406. const DEFAULT_CAPACITY = 1024;
  7407. const BUCKET = Symbol('Bucket');
  7408. /*
  7409. === Heuristics to figure out the capacity of a texture pool ===
  7410. 1. Decide the maximum amount of VRAM you'd like to use in a pool (say, 64 MB).
  7411. 2. Figure out the average texture size in your application (say, 640x360 pixels).
  7412. 3. Figure out the average texture size in bytes (say, 921600 bytes). Each pixel
  7413. uses 4 bytes (RGBA format).
  7414. 4. Divide the maximum amount of VRAM by the average texture size in bytes
  7415. (say, 72). That's the capacity of the pool.
  7416. Note that textures are allocated lazily, so VRAM usage is kept to a minimum.
  7417. Adapted from: https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices
  7418. */
  7419. /**
  7420. * @typedef {number} TextureBucketIndex index of a bucket in a pool
  7421. */
  7422. /**
  7423. * A bucket
  7424. */
  7425. class TextureBucket {
  7426. /**
  7427. * Constructor
  7428. * @param {SpeedyDrawableTexture} texture managed texture
  7429. * @param {TextureBucketIndex} index index of this bucket
  7430. * @param {TextureBucketIndex} next index of the next bucket
  7431. */
  7432. constructor(texture, index, next) {
  7433. /** @type {SpeedyDrawableTexture} managed texture */
  7434. this.texture = texture;
  7435. /** @type {TextureBucketIndex} index of this bucket */
  7436. this.index = index;
  7437. /** @type {TextureBucketIndex} index of the next bucket */
  7438. this.next = next;
  7439. /** @type {boolean} whether the texture is available or not */
  7440. this.free = true;
  7441. }
  7442. }
  7443. /**
  7444. * Texture pool
  7445. */
  7446. class SpeedyTexturePool {
  7447. /**
  7448. * Constructor
  7449. * @param {SpeedyGPU} gpu
  7450. * @param {number} [capacity] number of textures in the pool
  7451. */
  7452. constructor(gpu, capacity = DEFAULT_CAPACITY) {
  7453. utils/* Utils */.A.assert(capacity > 0);
  7454. /** @type {TextureBucket[]} buckets */
  7455. this._bucket = Array.from({
  7456. length: capacity
  7457. }, (_, i) => new TextureBucket(null, i, i - 1));
  7458. /** @type {TextureBucketIndex} index of an available bucket */
  7459. this._head = capacity - 1;
  7460. /** @type {SpeedyGPU} GPU instance */
  7461. this._gpu = gpu;
  7462. }
  7463. /**
  7464. * Get a texture from the pool
  7465. * @returns {SpeedyDrawableTexture}
  7466. */
  7467. allocate() {
  7468. if (this._head < 0) throw new utils_errors/* OutOfMemoryError */.l(`Exhausted pool (capacity: ${this._bucket.length})`);
  7469. const bucket = this._bucket[this._head];
  7470. bucket.free = false;
  7471. this._head = bucket.next;
  7472. if (bucket.texture == null)
  7473. // lazy instantiation
  7474. bucket.texture = SpeedyTexturePool._createManagedTexture(this._gpu.gl, bucket);
  7475. return bucket.texture;
  7476. }
  7477. /**
  7478. * Put a texture back in the pool
  7479. * @param {SpeedyDrawableTexture} texture
  7480. * @returns {null}
  7481. */
  7482. free(texture) {
  7483. const bucket = texture[BUCKET];
  7484. utils/* Utils */.A.assert(bucket !== undefined && !bucket.free, `Unmanaged texture or double free`);
  7485. bucket.next = this._head;
  7486. bucket.free = true;
  7487. this._head = bucket.index;
  7488. return null;
  7489. }
  7490. /**
  7491. * Release the texture pool
  7492. * @returns {null}
  7493. */
  7494. release() {
  7495. for (let i = 0; i < this._bucket.length; i++) {
  7496. if (this._bucket[i].texture != null) this._bucket[i].texture = this._bucket[i].texture.release();
  7497. }
  7498. return null;
  7499. }
  7500. /**
  7501. * Create a texture with a reference to a bucket
  7502. * @param {WebGL2RenderingContext} gl
  7503. * @param {TextureBucket} bucket
  7504. * @returns {SpeedyDrawableTexture}
  7505. */
  7506. static _createManagedTexture(gl, bucket) {
  7507. const texture = new SpeedyDrawableTexture(gl, 1, 1);
  7508. return Object.defineProperty(texture, BUCKET, {
  7509. configurable: false,
  7510. enumerable: false,
  7511. writable: false,
  7512. value: bucket
  7513. });
  7514. }
  7515. }
  7516. // EXTERNAL MODULE: ./src/utils/types.js
  7517. var types = __nested_webpack_require_314174__(6049);
  7518. ;// CONCATENATED MODULE: ./src/core/speedy-media-source.js
  7519. /*
  7520. * speedy-vision.js
  7521. * GPU-accelerated Computer Vision for JavaScript
  7522. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  7523. *
  7524. * Licensed under the Apache License, Version 2.0 (the "License");
  7525. * you may not use this file except in compliance with the License.
  7526. * You may obtain a copy of the License at
  7527. *
  7528. * http://www.apache.org/licenses/LICENSE-2.0
  7529. *
  7530. * Unless required by applicable law or agreed to in writing, software
  7531. * distributed under the License is distributed on an "AS IS" BASIS,
  7532. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  7533. * See the License for the specific language governing permissions and
  7534. * limitations under the License.
  7535. *
  7536. * speedy-media-source.js
  7537. * Wrappers around <img>, <video>, <canvas>, etc.
  7538. */
  7539. /** @typedef {HTMLImageElement|HTMLVideoElement|HTMLCanvasElement|OffscreenCanvas|ImageBitmap|ImageData} SpeedyMediaSourceNativeElement */
  7540. /** Internal token for protected constructors */
  7541. const PRIVATE_TOKEN = Symbol();
  7542. /**
  7543. * An abstract media source: a wrapper around native
  7544. * elements such as: HTMLImageElement, HTMLVideoElement,
  7545. * and so on
  7546. * @abstract
  7547. */
  7548. class SpeedyMediaSource {
  7549. /**
  7550. * @protected Constructor
  7551. * @param {symbol} token
  7552. */
  7553. constructor(token) {
  7554. // the constructor is not public
  7555. if (token !== PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
  7556. /** @type {SpeedyMediaSourceNativeElement} underlying media object */
  7557. this._data = null;
  7558. }
  7559. /**
  7560. * Load a media source
  7561. * @param {SpeedyMediaSourceNativeElement} wrappedObject
  7562. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7563. */
  7564. static load(wrappedObject) {
  7565. if (wrappedObject instanceof HTMLImageElement) return SpeedyImageMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLVideoElement) return SpeedyVideoMediaSource.load(wrappedObject);else if (wrappedObject instanceof HTMLCanvasElement) return SpeedyCanvasMediaSource.load(wrappedObject);else if (typeof OffscreenCanvas !== 'undefined' && wrappedObject instanceof OffscreenCanvas) return SpeedyOffscreenCanvasMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageBitmap) return SpeedyBitmapMediaSource.load(wrappedObject);else if (wrappedObject instanceof ImageData) return SpeedyDataMediaSource.load(wrappedObject);else throw new utils_errors/* IllegalArgumentError */.qw(`Unsupported media type: ${wrappedObject}`);
  7566. }
  7567. /**
  7568. * The underlying wrapped object
  7569. * @returns {SpeedyMediaSourceNativeElement}
  7570. */
  7571. get data() {
  7572. return this._data;
  7573. }
  7574. /**
  7575. * Is the underlying media loaded?
  7576. * @returns {boolean}
  7577. */
  7578. isLoaded() {
  7579. return this._data !== null;
  7580. }
  7581. /**
  7582. * The type of the underlying media source
  7583. * @abstract
  7584. * @returns {MediaType}
  7585. */
  7586. get type() {
  7587. throw new utils_errors/* AbstractMethodError */.aQ();
  7588. }
  7589. /**
  7590. * Media width, in pixels
  7591. * @abstract
  7592. * @returns {number}
  7593. */
  7594. get width() {
  7595. throw new utils_errors/* AbstractMethodError */.aQ();
  7596. }
  7597. /**
  7598. * Media height, in pixels
  7599. * @abstract
  7600. * @returns {number}
  7601. */
  7602. get height() {
  7603. throw new utils_errors/* AbstractMethodError */.aQ();
  7604. }
  7605. /**
  7606. * Clone this media source
  7607. * @abstract
  7608. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7609. */
  7610. clone() {
  7611. throw new utils_errors/* AbstractMethodError */.aQ();
  7612. }
  7613. /**
  7614. * Release resources associated with this object
  7615. * @returns {null}
  7616. */
  7617. release() {
  7618. return this._data = null;
  7619. }
  7620. /**
  7621. * Load the underlying media
  7622. * @abstract
  7623. * @param {SpeedyMediaSourceNativeElement} element
  7624. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7625. */
  7626. _load(element) {
  7627. throw new utils_errors/* AbstractMethodError */.aQ();
  7628. }
  7629. /**
  7630. * Wait for an event to be triggered in an element
  7631. * @param {Element} element
  7632. * @param {string} eventName
  7633. * @param {number} [timeout] in ms
  7634. * @returns {SpeedyPromise<Element>}
  7635. */
  7636. static _waitUntil(element, eventName, timeout = 30000) {
  7637. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7638. utils/* Utils */.A.log(`Waiting for ${eventName} to be triggered in ${element}...`);
  7639. const timer = setTimeout(() => {
  7640. clear();
  7641. reject(new utils_errors/* TimeoutError */.MU(`${eventName} has not been triggered in ${element}: timeout (${timeout}ms)`));
  7642. }, timeout);
  7643. function clear() {
  7644. clearTimeout(timer);
  7645. element.removeEventListener('error', handleError, false);
  7646. element.removeEventListener(eventName, handleSuccess, false);
  7647. }
  7648. function handleError() {
  7649. const hasError = element.error !== null && typeof element.error === 'object';
  7650. const error = hasError ? element.error : {
  7651. code: -1,
  7652. message: ''
  7653. };
  7654. const info = `${error.message} (error code ${error.code})`;
  7655. clear();
  7656. reject(new utils_errors/* ResourceNotLoadedError */.FJ(`Can't load ${element}. ${info}`));
  7657. }
  7658. function handleSuccess() {
  7659. clear();
  7660. resolve(element);
  7661. }
  7662. element.addEventListener('error', handleError, false);
  7663. element.addEventListener(eventName, handleSuccess, false);
  7664. });
  7665. }
  7666. }
  7667. /**
  7668. * Image media source:
  7669. * a wrapper around HTMLImageElement
  7670. */
  7671. class SpeedyImageMediaSource extends SpeedyMediaSource {
  7672. /**
  7673. * @private Constructor
  7674. * @param {symbol} token
  7675. */
  7676. constructor(token) {
  7677. super(token);
  7678. /** @type {HTMLImageElement} image element */
  7679. this._data = null;
  7680. }
  7681. /**
  7682. * The underlying wrapped object
  7683. * @returns {HTMLImageElement}
  7684. */
  7685. get data() {
  7686. return this._data;
  7687. }
  7688. /**
  7689. * The type of the underlying media source
  7690. * @returns {MediaType}
  7691. */
  7692. get type() {
  7693. return types/* MediaType */.zu.Image;
  7694. }
  7695. /**
  7696. * Media width, in pixels
  7697. * @returns {number}
  7698. */
  7699. get width() {
  7700. return this._data ? this._data.naturalWidth : 0;
  7701. }
  7702. /**
  7703. * Media height, in pixels
  7704. * @returns {number}
  7705. */
  7706. get height() {
  7707. return this._data ? this._data.naturalHeight : 0;
  7708. }
  7709. /**
  7710. * Clone this media source
  7711. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7712. */
  7713. clone() {
  7714. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7715. const newNode = /** @type {HTMLImageElement} */this._data.cloneNode(true);
  7716. return SpeedyImageMediaSource.load(newNode);
  7717. }
  7718. /**
  7719. * Load the underlying media
  7720. * @param {HTMLImageElement} image
  7721. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7722. */
  7723. _load(image) {
  7724. if (this.isLoaded()) this.release();
  7725. if (image.complete && image.naturalWidth !== 0) {
  7726. // already loaded?
  7727. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7728. this._data = image;
  7729. resolve(this);
  7730. });
  7731. } else {
  7732. return SpeedyMediaSource._waitUntil(image, 'load').then(() => {
  7733. this._data = image;
  7734. return this;
  7735. });
  7736. }
  7737. }
  7738. /**
  7739. * Load the underlying media
  7740. * @param {HTMLImageElement} image
  7741. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7742. */
  7743. static load(image) {
  7744. return new SpeedyImageMediaSource(PRIVATE_TOKEN)._load(image);
  7745. }
  7746. }
  7747. /**
  7748. * Video media source:
  7749. * a wrapper around HTMLVideoElement
  7750. */
  7751. class SpeedyVideoMediaSource extends SpeedyMediaSource {
  7752. /**
  7753. * @private Constructor
  7754. * @param {symbol} token
  7755. */
  7756. constructor(token) {
  7757. super(token);
  7758. /** @type {HTMLVideoElement} video element */
  7759. this._data = null;
  7760. }
  7761. /**
  7762. * The underlying wrapped object
  7763. * @returns {HTMLVideoElement}
  7764. */
  7765. get data() {
  7766. return this._data;
  7767. }
  7768. /**
  7769. * The type of the underlying media source
  7770. * @returns {MediaType}
  7771. */
  7772. get type() {
  7773. return types/* MediaType */.zu.Video;
  7774. }
  7775. /**
  7776. * Media width, in pixels
  7777. * @returns {number}
  7778. */
  7779. get width() {
  7780. // Warning: videoWidth & videoHeight may change at any time !!!
  7781. // so you can't cache these dimensions
  7782. return this._data ? this._data.videoWidth : 0;
  7783. }
  7784. /**
  7785. * Media height, in pixels
  7786. * @returns {number}
  7787. */
  7788. get height() {
  7789. return this._data ? this._data.videoHeight : 0;
  7790. }
  7791. /**
  7792. * Clone this media source
  7793. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7794. */
  7795. clone() {
  7796. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7797. const newNode = /** @type {HTMLVideoElement} */this._data.cloneNode(true);
  7798. return SpeedyVideoMediaSource.load(newNode);
  7799. }
  7800. /**
  7801. * Load the underlying media
  7802. * @param {HTMLVideoElement} video
  7803. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7804. */
  7805. _load(video) {
  7806. if (this.isLoaded()) this.release();
  7807. utils/* Utils */.A.log('Loading a video...');
  7808. video.load();
  7809. return SpeedyVideoMediaSource._waitUntilPlayable(video).then(() => {
  7810. return SpeedyVideoMediaSource._handleAutoplay(video).then(() => {
  7811. this._data = video;
  7812. return this;
  7813. });
  7814. });
  7815. }
  7816. /**
  7817. * Load the underlying media
  7818. * @param {HTMLVideoElement} video
  7819. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7820. */
  7821. static load(video) {
  7822. return new SpeedyVideoMediaSource(PRIVATE_TOKEN)._load(video);
  7823. }
  7824. /**
  7825. * Handle browser quirks concerning autoplay
  7826. * @param {HTMLVideoElement} video
  7827. * @returns {SpeedyPromise<void>} gets rejected if we can't autoplay
  7828. */
  7829. static _handleAutoplay(video) {
  7830. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  7831. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  7832. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  7833. // videos marked as autoplay may not play if not visible on-screen
  7834. // videos marked as autoplay should be muted
  7835. if (video.autoplay /*&& video.muted*/) {
  7836. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7837. const promise = video.play();
  7838. // handle older browsers
  7839. if (promise === undefined) {
  7840. resolve();
  7841. return;
  7842. }
  7843. // wrap promise
  7844. promise.then(resolve, reject);
  7845. });
  7846. }
  7847. // nothing to do
  7848. return speedy_promise/* SpeedyPromise */.i.resolve();
  7849. }
  7850. /**
  7851. * Wait for the input video to be playable
  7852. * @param {HTMLVideoElement} video
  7853. * @returns {SpeedyPromise<HTMLVideoElement>} resolves to the input video when it can be played
  7854. */
  7855. static _waitUntilPlayable(video) {
  7856. const TIMEOUT = 30000,
  7857. INTERVAL = 500;
  7858. if (video.readyState >= 3) return speedy_promise/* SpeedyPromise */.i.resolve(video);
  7859. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  7860. let ms = 0,
  7861. t = setInterval(() => {
  7862. //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
  7863. if (video.readyState >= 3) {
  7864. clearInterval(t);
  7865. resolve(video);
  7866. } else if ((ms += INTERVAL) >= TIMEOUT) {
  7867. clearInterval(t);
  7868. reject(new utils_errors/* TimeoutError */.MU('The video took too long to load'));
  7869. }
  7870. }, INTERVAL);
  7871. });
  7872. }
  7873. }
  7874. /**
  7875. * Canvas media source:
  7876. * a wrapper around HTMLCanvasElement
  7877. */
  7878. class SpeedyCanvasMediaSource extends SpeedyMediaSource {
  7879. /**
  7880. * @private Constructor
  7881. * @param {symbol} token
  7882. */
  7883. constructor(token) {
  7884. super(token);
  7885. /** @type {HTMLCanvasElement} canvas element */
  7886. this._data = null;
  7887. }
  7888. /**
  7889. * The underlying wrapped object
  7890. * @returns {HTMLCanvasElement}
  7891. */
  7892. get data() {
  7893. return this._data;
  7894. }
  7895. /**
  7896. * The type of the underlying media source
  7897. * @returns {MediaType}
  7898. */
  7899. get type() {
  7900. return types/* MediaType */.zu.Canvas;
  7901. }
  7902. /**
  7903. * Media width, in pixels
  7904. * @returns {number}
  7905. */
  7906. get width() {
  7907. return this._data ? this._data.width : 0;
  7908. }
  7909. /**
  7910. * Media height, in pixels
  7911. * @returns {number}
  7912. */
  7913. get height() {
  7914. return this._data ? this._data.height : 0;
  7915. }
  7916. /**
  7917. * Clone this media source
  7918. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7919. */
  7920. clone() {
  7921. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7922. const newCanvas = utils/* Utils */.A.createCanvas(this.width, this.height);
  7923. const newContext = newCanvas.getContext('2d');
  7924. newContext.drawImage(this._data, 0, 0);
  7925. return SpeedyCanvasMediaSource.load(newCanvas);
  7926. }
  7927. /**
  7928. * Load the underlying media
  7929. * @param {HTMLCanvasElement} canvas
  7930. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7931. */
  7932. _load(canvas) {
  7933. if (this.isLoaded()) this.release();
  7934. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  7935. this._data = canvas;
  7936. resolve(this);
  7937. });
  7938. }
  7939. /**
  7940. * Load the underlying media
  7941. * @param {HTMLCanvasElement} canvas
  7942. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7943. */
  7944. static load(canvas) {
  7945. return new SpeedyCanvasMediaSource(PRIVATE_TOKEN)._load(canvas);
  7946. }
  7947. }
  7948. /**
  7949. * OffscreenCanvas media source:
  7950. * a wrapper around OffscreenCanvas
  7951. */
  7952. class SpeedyOffscreenCanvasMediaSource extends SpeedyMediaSource {
  7953. /**
  7954. * @private Constructor
  7955. * @param {symbol} token
  7956. */
  7957. constructor(token) {
  7958. super(token);
  7959. /** @type {OffscreenCanvas} offscreen canvas element */
  7960. this._data = null;
  7961. }
  7962. /**
  7963. * The underlying wrapped object
  7964. * @returns {OffscreenCanvas}
  7965. */
  7966. get data() {
  7967. return this._data;
  7968. }
  7969. /**
  7970. * The type of the underlying media source
  7971. * @returns {MediaType}
  7972. */
  7973. get type() {
  7974. return types/* MediaType */.zu.OffscreenCanvas;
  7975. }
  7976. /**
  7977. * Media width, in pixels
  7978. * @returns {number}
  7979. */
  7980. get width() {
  7981. return this._data ? this._data.width : 0;
  7982. }
  7983. /**
  7984. * Media height, in pixels
  7985. * @returns {number}
  7986. */
  7987. get height() {
  7988. return this._data ? this._data.height : 0;
  7989. }
  7990. /**
  7991. * Clone this media source
  7992. * @returns {SpeedyPromise<SpeedyMediaSource>}
  7993. */
  7994. clone() {
  7995. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  7996. const newCanvas = new OffscreenCanvas(this.width, this.height);
  7997. const newContext = newCanvas.getContext('2d');
  7998. newContext.drawImage(this._data, 0, 0);
  7999. return SpeedyOffscreenCanvasMediaSource.load(newCanvas);
  8000. }
  8001. /**
  8002. * Load the underlying media
  8003. * @param {OffscreenCanvas} offscreenCanvas
  8004. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8005. */
  8006. _load(offscreenCanvas) {
  8007. if (this.isLoaded()) this.release();
  8008. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8009. this._data = offscreenCanvas;
  8010. resolve(this);
  8011. });
  8012. }
  8013. /**
  8014. * Load the underlying media
  8015. * @param {OffscreenCanvas} offscreenCanvas
  8016. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8017. */
  8018. static load(offscreenCanvas) {
  8019. return new SpeedyOffscreenCanvasMediaSource(PRIVATE_TOKEN)._load(offscreenCanvas);
  8020. }
  8021. }
  8022. /**
  8023. * Bitmap media source:
  8024. * a wrapper around ImageBitmap
  8025. */
  8026. class SpeedyBitmapMediaSource extends SpeedyMediaSource {
  8027. /**
  8028. * @private Constructor
  8029. * @param {symbol} token
  8030. */
  8031. constructor(token) {
  8032. super(token);
  8033. /** @type {ImageBitmap} image bitmap */
  8034. this._data = null;
  8035. }
  8036. /**
  8037. * The underlying wrapped object
  8038. * @returns {ImageBitmap}
  8039. */
  8040. get data() {
  8041. return this._data;
  8042. }
  8043. /**
  8044. * The type of the underlying media source
  8045. * @returns {MediaType}
  8046. */
  8047. get type() {
  8048. return types/* MediaType */.zu.Bitmap;
  8049. }
  8050. /**
  8051. * Media width, in pixels
  8052. * @returns {number}
  8053. */
  8054. get width() {
  8055. return this._data ? this._data.width : 0;
  8056. }
  8057. /**
  8058. * Media height, in pixels
  8059. * @returns {number}
  8060. */
  8061. get height() {
  8062. return this._data ? this._data.height : 0;
  8063. }
  8064. /**
  8065. * Clone this media source
  8066. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8067. */
  8068. clone() {
  8069. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8070. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  8071. createImageBitmap(this._data).then(newBitmap => {
  8072. const newSource = new SpeedyBitmapMediaSource(PRIVATE_TOKEN);
  8073. newSource._load(newBitmap).then(resolve, reject);
  8074. }, reject);
  8075. });
  8076. }
  8077. /**
  8078. * Release resources associated with this object
  8079. * @returns {null}
  8080. */
  8081. release() {
  8082. if (this._data != null) this._data.close();
  8083. return super.release();
  8084. }
  8085. /**
  8086. * Load the underlying media
  8087. * @param {ImageBitmap} bitmap
  8088. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8089. */
  8090. _load(bitmap) {
  8091. if (this.isLoaded()) this.release();
  8092. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8093. this._data = bitmap;
  8094. resolve(this);
  8095. });
  8096. }
  8097. /**
  8098. * Load the underlying media
  8099. * @param {ImageBitmap} bitmap
  8100. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8101. */
  8102. static load(bitmap) {
  8103. return new SpeedyBitmapMediaSource(PRIVATE_TOKEN)._load(bitmap);
  8104. }
  8105. }
  8106. /**
  8107. * Data media source:
  8108. * a wrapper around ImageData
  8109. */
  8110. class SpeedyDataMediaSource extends SpeedyMediaSource {
  8111. /**
  8112. * @private Constructor
  8113. * @param {symbol} token
  8114. */
  8115. constructor(token) {
  8116. super(token);
  8117. /** @type {ImageData} image data */
  8118. this._data = null;
  8119. }
  8120. /**
  8121. * The underlying wrapped object
  8122. * @returns {ImageData}
  8123. */
  8124. get data() {
  8125. return this._data;
  8126. }
  8127. /**
  8128. * The type of the underlying media source
  8129. * @returns {MediaType}
  8130. */
  8131. get type() {
  8132. return types/* MediaType */.zu.Data;
  8133. }
  8134. /**
  8135. * Media width, in pixels
  8136. * @returns {number}
  8137. */
  8138. get width() {
  8139. return this._data ? this._data.width : 0;
  8140. }
  8141. /**
  8142. * Media height, in pixels
  8143. * @returns {number}
  8144. */
  8145. get height() {
  8146. return this._data ? this._data.height : 0;
  8147. }
  8148. /**
  8149. * Clone this media source
  8150. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8151. */
  8152. clone() {
  8153. if (this._data == null) throw new utils_errors/* IllegalOperationError */.Er(`Media not loaded`);
  8154. const imageDataCopy = new ImageData(new Uint8ClampedArray(this._data.data), this._data.width, this._data.height);
  8155. return SpeedyDataMediaSource.load(imageDataCopy);
  8156. }
  8157. /**
  8158. * Load the underlying media
  8159. * @param {ImageData} imageData
  8160. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8161. */
  8162. _load(imageData) {
  8163. if (this.isLoaded()) this.release();
  8164. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  8165. this._data = imageData;
  8166. resolve(this);
  8167. });
  8168. }
  8169. /**
  8170. * Load the underlying media
  8171. * @param {ImageData} imageData
  8172. * @returns {SpeedyPromise<SpeedyMediaSource>}
  8173. */
  8174. static load(imageData) {
  8175. return new SpeedyDataMediaSource(PRIVATE_TOKEN)._load(imageData);
  8176. }
  8177. }
  8178. // EXTERNAL MODULE: ./src/utils/observable.js
  8179. var observable = __nested_webpack_require_314174__(3211);
  8180. ;// CONCATENATED MODULE: ./src/gpu/speedy-gpu.js
  8181. /*
  8182. * speedy-vision.js
  8183. * GPU-accelerated Computer Vision for JavaScript
  8184. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8185. *
  8186. * Licensed under the Apache License, Version 2.0 (the "License");
  8187. * you may not use this file except in compliance with the License.
  8188. * You may obtain a copy of the License at
  8189. *
  8190. * http://www.apache.org/licenses/LICENSE-2.0
  8191. *
  8192. * Unless required by applicable law or agreed to in writing, software
  8193. * distributed under the License is distributed on an "AS IS" BASIS,
  8194. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8195. * See the License for the specific language governing permissions and
  8196. * limitations under the License.
  8197. *
  8198. * speedy-gpu.js
  8199. * GPU-accelerated routines for Computer Vision
  8200. */
  8201. /**
  8202. * GPU-accelerated routines for Computer Vision
  8203. */
  8204. class SpeedyGPU extends observable/* Observable */.c {
  8205. /**
  8206. * Constructor
  8207. */
  8208. constructor() {
  8209. super();
  8210. /** @type {SpeedyGL} cached reference */
  8211. this._speedyGL = speedy_gl/* SpeedyGL */.c.instance;
  8212. /** @type {SpeedyProgramCenter} GPU-based programs */
  8213. this._programs = new SpeedyProgramCenter(this);
  8214. /** @type {SpeedyTexturePool} texture pool */
  8215. this._texturePool = new SpeedyTexturePool(this);
  8216. // recreate the state if necessary
  8217. this._speedyGL.subscribe(this._reset, this);
  8218. }
  8219. /**
  8220. * Access point to all GPU programs
  8221. * @returns {SpeedyProgramCenter}
  8222. */
  8223. get programs() {
  8224. return this._programs;
  8225. }
  8226. /**
  8227. * The WebGL Rendering Context
  8228. * Be careful not to cache this, as the WebGL Rendering Context may be lost!
  8229. * @returns {WebGL2RenderingContext}
  8230. */
  8231. get gl() {
  8232. return this._speedyGL.gl;
  8233. }
  8234. /**
  8235. * Internal canvas
  8236. * @returns {HTMLCanvasElement}
  8237. */
  8238. get canvas() {
  8239. return this._speedyGL.canvas;
  8240. }
  8241. /**
  8242. * Texture pool
  8243. * @returns {SpeedyTexturePool}
  8244. */
  8245. get texturePool() {
  8246. return this._texturePool;
  8247. }
  8248. /**
  8249. * Renders a texture to the canvas
  8250. * @param {SpeedyTexture} texture
  8251. * @returns {HTMLCanvasElement} returned for convenience
  8252. */
  8253. renderToCanvas(texture) {
  8254. const width = texture.width;
  8255. const height = texture.height;
  8256. const canvas = this.canvas;
  8257. // do we need to resize the canvas?
  8258. if (width > canvas.width || height > canvas.height) {
  8259. utils/* Utils */.A.warning(`Resizing the canvas to ${width} x ${height}`);
  8260. canvas.width = width;
  8261. canvas.height = height;
  8262. }
  8263. // render
  8264. this.programs.utils.renderToCanvas.outputs(width, height, null);
  8265. this.programs.utils.renderToCanvas(texture);
  8266. // done!
  8267. return canvas;
  8268. }
  8269. /**
  8270. * Upload an image to the GPU
  8271. * @param {SpeedyMediaSource} source
  8272. * @param {SpeedyTexture} outputTexture
  8273. * @returns {SpeedyTexture} outputTexture
  8274. */
  8275. upload(source, outputTexture) {
  8276. return outputTexture.upload(source.data, source.width, source.height);
  8277. }
  8278. /**
  8279. * Releases resources
  8280. * @returns {null}
  8281. */
  8282. release() {
  8283. utils/* Utils */.A.assert(!this.isReleased());
  8284. // release internal components
  8285. this._programs = this._programs.release();
  8286. this._texturePool = this._texturePool.release();
  8287. // unsubscribe
  8288. this._speedyGL.unsubscribe(this._reset);
  8289. return null;
  8290. }
  8291. /**
  8292. * Has this SpeedyGPU been released?
  8293. * @returns {boolean}
  8294. */
  8295. isReleased() {
  8296. return this._programs == null;
  8297. }
  8298. /**
  8299. * Lose & restore the WebGL context (useful for testing purposes)
  8300. * @return {SpeedyPromise<void>} resolves as soon as the context is restored
  8301. */
  8302. loseAndRestoreWebGLContext() {
  8303. return this._speedyGL.loseAndRestoreContext().then(() => void 0);
  8304. }
  8305. /**
  8306. * Reset the internal state
  8307. * (called on context reset)
  8308. */
  8309. _reset() {
  8310. if (this.isReleased()) return;
  8311. this._programs = new SpeedyProgramCenter(this);
  8312. this._texturePool = new SpeedyTexturePool(this);
  8313. this._notify();
  8314. }
  8315. }
  8316. ;// CONCATENATED MODULE: ./src/core/speedy-size.js
  8317. /*
  8318. * speedy-vision.js
  8319. * GPU-accelerated Computer Vision for JavaScript
  8320. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8321. *
  8322. * Licensed under the Apache License, Version 2.0 (the "License");
  8323. * you may not use this file except in compliance with the License.
  8324. * You may obtain a copy of the License at
  8325. *
  8326. * http://www.apache.org/licenses/LICENSE-2.0
  8327. *
  8328. * Unless required by applicable law or agreed to in writing, software
  8329. * distributed under the License is distributed on an "AS IS" BASIS,
  8330. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8331. * See the License for the specific language governing permissions and
  8332. * limitations under the License.
  8333. *
  8334. * speedy-size.js
  8335. * Size of a rectangle
  8336. */
  8337. /**
  8338. * Size of a rectangle
  8339. */
  8340. class SpeedySize {
  8341. /**
  8342. * Constructor
  8343. * @param {number} width non-negative number
  8344. * @param {number} height non-negative number
  8345. */
  8346. constructor(width, height) {
  8347. /** @type {number} width */
  8348. this._width = Math.max(0, +width);
  8349. /** @type {number} height */
  8350. this._height = Math.max(0, +height);
  8351. }
  8352. //
  8353. // ===== METHODS =====
  8354. //
  8355. /**
  8356. * Width
  8357. * @returns {number}
  8358. */
  8359. get width() {
  8360. return this._width;
  8361. }
  8362. /**
  8363. * Width
  8364. * @param {number} value
  8365. */
  8366. set width(value) {
  8367. this._width = Math.max(0, +value);
  8368. }
  8369. /**
  8370. * Height
  8371. * @returns {number}
  8372. */
  8373. get height() {
  8374. return this._height;
  8375. }
  8376. /**
  8377. * Height
  8378. * @param {number} value
  8379. */
  8380. set height(value) {
  8381. this._height = Math.max(0, +value);
  8382. }
  8383. /**
  8384. * Convert to string
  8385. * @returns {string}
  8386. */
  8387. toString() {
  8388. return `SpeedySize(${this.width}, ${this.height})`;
  8389. }
  8390. /**
  8391. * Is this size equal to anotherSize?
  8392. * @param {SpeedySize} anotherSize
  8393. * @returns {boolean}
  8394. */
  8395. equals(anotherSize) {
  8396. return this.width === anotherSize.width && this.height === anotherSize.height;
  8397. }
  8398. /**
  8399. * The area of the rectangle
  8400. * @returns {number}
  8401. */
  8402. area() {
  8403. return this.width * this.height;
  8404. }
  8405. }
  8406. ;// CONCATENATED MODULE: ./src/core/speedy-media.js
  8407. /*
  8408. * speedy-vision.js
  8409. * GPU-accelerated Computer Vision for JavaScript
  8410. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8411. *
  8412. * Licensed under the Apache License, Version 2.0 (the "License");
  8413. * you may not use this file except in compliance with the License.
  8414. * You may obtain a copy of the License at
  8415. *
  8416. * http://www.apache.org/licenses/LICENSE-2.0
  8417. *
  8418. * Unless required by applicable law or agreed to in writing, software
  8419. * distributed under the License is distributed on an "AS IS" BASIS,
  8420. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8421. * See the License for the specific language governing permissions and
  8422. * limitations under the License.
  8423. *
  8424. * speedy-media.js
  8425. * SpeedyMedia implementation
  8426. */
  8427. /** @typedef {import('./speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  8428. /**
  8429. * @typedef {object} SpeedyMediaOptions
  8430. * @property {ImageFormat} [format] default is RGBA
  8431. */
  8432. /** A helper used to keep the constructor of SpeedyMedia private */
  8433. const speedy_media_PRIVATE_TOKEN = Symbol();
  8434. /**
  8435. * SpeedyMedia encapsulates a media element
  8436. * (e.g., image, video, canvas)
  8437. */
  8438. class SpeedyMedia {
  8439. /**
  8440. * @private Constructor. It receives a VALID media source that is ALREADY LOADED.
  8441. * @param {symbol} token
  8442. * @param {SpeedyMediaSource} source
  8443. * @param {SpeedyMediaOptions} [options] options object
  8444. */
  8445. constructor(token, source, options = {}) {
  8446. // private constructor
  8447. if (token !== speedy_media_PRIVATE_TOKEN) throw new utils_errors/* IllegalOperationError */.Er();
  8448. /** @type {SpeedyMediaSource} media source */
  8449. this._source = source;
  8450. /** @type {ImageFormat} format */
  8451. this._format = options.format !== undefined ? options.format : types/* ImageFormat */.f5.RGBA;
  8452. /** @type {SpeedyMediaOptions} options */
  8453. this._options = Object.freeze(Object.assign(Object.assign({}, options), {}, {
  8454. format: this._format
  8455. }));
  8456. // validate
  8457. if (!source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er(`Source not loaded: ${source}`);else if (this._format !== types/* ImageFormat */.f5.RGBA && this._format !== types/* ImageFormat */.f5.GREY) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid format: ${this._format}`);
  8458. }
  8459. /**
  8460. * Load a media source
  8461. * Will wait until the HTML media source is loaded
  8462. * @param {SpeedyMediaSourceNativeElement} mediaSource An image, video or canvas
  8463. * @param {SpeedyMediaOptions} [options] options object
  8464. * @param {boolean} [log] show log message?
  8465. * @returns {SpeedyPromise<SpeedyMedia>}
  8466. */
  8467. static load(mediaSource, options = {}, log = true) {
  8468. return SpeedyMediaSource.load(mediaSource).then(source => {
  8469. utils/* Utils */.A.assert(source.width !== 0 && source.height !== 0);
  8470. // FIXME user could pass an invalid format in options if ImageFormat is made public
  8471. const media = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, source, options);
  8472. // show log message
  8473. if (log) utils/* Utils */.A.log(`Loaded SpeedyMedia with a ${mediaSource}.`);
  8474. // done!
  8475. return media;
  8476. });
  8477. }
  8478. /**
  8479. * The media element (image, video, canvas) encapsulated by this SpeedyMedia object
  8480. * @returns {SpeedyMediaSourceNativeElement} the media element
  8481. */
  8482. get source() {
  8483. return this._source ? this._source.data : null;
  8484. }
  8485. /**
  8486. * The type of the media attached to this SpeedyMedia object
  8487. * @returns {"image" | "video" | "canvas" | "offscreen-canvas" | "bitmap" | "data" | "unknown"}
  8488. */
  8489. get type() {
  8490. if (this.isReleased()) return 'unknown';
  8491. switch (this._source.type) {
  8492. case types/* MediaType */.zu.Image:
  8493. return 'image';
  8494. case types/* MediaType */.zu.Video:
  8495. return 'video';
  8496. case types/* MediaType */.zu.Canvas:
  8497. return 'canvas';
  8498. case types/* MediaType */.zu.OffscreenCanvas:
  8499. return 'offscreen-canvas';
  8500. case types/* MediaType */.zu.Bitmap:
  8501. return 'bitmap';
  8502. case types/* MediaType */.zu.Data:
  8503. return 'data';
  8504. default:
  8505. // this shouldn't happen
  8506. return 'unknown';
  8507. }
  8508. }
  8509. /**
  8510. * Gets the width of the media
  8511. * @returns {number} media width
  8512. */
  8513. get width() {
  8514. return this._source ? this._source.width : 0;
  8515. }
  8516. /**
  8517. * Gets the height of the media
  8518. * @returns {number} media height
  8519. */
  8520. get height() {
  8521. return this._source ? this._source.height : 0;
  8522. }
  8523. /**
  8524. * The size of this media, in pixels
  8525. * @returns {SpeedySize}
  8526. */
  8527. get size() {
  8528. return this._source ? new SpeedySize(this._source.width, this._source.height) : new SpeedySize(0, 0);
  8529. }
  8530. /**
  8531. * Returns a read-only object featuring advanced options
  8532. * related to this SpeedyMedia object
  8533. * @returns {SpeedyMediaOptions}
  8534. */
  8535. get options() {
  8536. return this._options;
  8537. }
  8538. /**
  8539. * Releases resources associated with this media
  8540. * @returns {null}
  8541. */
  8542. release() {
  8543. if (!this.isReleased()) {
  8544. utils/* Utils */.A.log('Releasing SpeedyMedia object...');
  8545. this._source = this._source.release();
  8546. }
  8547. return null;
  8548. }
  8549. /**
  8550. * Has this media been released?
  8551. * @returns {boolean}
  8552. */
  8553. isReleased() {
  8554. return this._source == null;
  8555. }
  8556. /**
  8557. * Clones the SpeedyMedia object
  8558. * @returns {SpeedyPromise<SpeedyMedia>} a clone object
  8559. */
  8560. clone() {
  8561. // has the media been released?
  8562. if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er(`Can't clone a SpeedyMedia that has been released`);
  8563. // clone the object
  8564. const clone = new SpeedyMedia(speedy_media_PRIVATE_TOKEN, this._source, this._options);
  8565. // done!
  8566. return speedy_promise/* SpeedyPromise */.i.resolve(clone);
  8567. }
  8568. /**
  8569. * Converts the media to an ImageBitmap
  8570. * @returns {SpeedyPromise<ImageBitmap>}
  8571. */
  8572. toBitmap() {
  8573. if (this.isReleased()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to ImageBitmap: the media has been released');else if (!this._source.isLoaded()) throw new utils_errors/* IllegalOperationError */.Er('Can\'t convert SpeedyMedia to bitmap: the media hasn\'t been loaded');else if (this._source.type == types/* MediaType */.zu.Bitmap) return speedy_promise/* SpeedyPromise */.i.resolve(this._source.data);else return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => createImageBitmap(this._source.data).then(resolve, reject));
  8574. }
  8575. }
  8576. ;// CONCATENATED MODULE: ./src/core/speedy-platform.js
  8577. /*
  8578. * speedy-vision.js
  8579. * GPU-accelerated Computer Vision for JavaScript
  8580. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8581. *
  8582. * Licensed under the Apache License, Version 2.0 (the "License");
  8583. * you may not use this file except in compliance with the License.
  8584. * You may obtain a copy of the License at
  8585. *
  8586. * http://www.apache.org/licenses/LICENSE-2.0
  8587. *
  8588. * Unless required by applicable law or agreed to in writing, software
  8589. * distributed under the License is distributed on an "AS IS" BASIS,
  8590. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8591. * See the License for the specific language governing permissions and
  8592. * limitations under the License.
  8593. *
  8594. * speedy-platform.js
  8595. * Utilities to query information about the graphics driver
  8596. */
  8597. /**
  8598. * Utilities to query information about the graphics driver. This information
  8599. * may or may not be available, depending on the privacy settings of the web
  8600. * browser. In addition, it may be more or less accurate in different browsers.
  8601. */
  8602. class SpeedyPlatform extends speedy_namespace/* SpeedyNamespace */.Q {
  8603. /**
  8604. * Renderer string of the graphics driver
  8605. * @returns {string}
  8606. */
  8607. static get renderer() {
  8608. return speedy_gl/* SpeedyGL */.c.instance.renderer;
  8609. }
  8610. /**
  8611. * Vendor string of the graphics driver
  8612. * @returns {string}
  8613. */
  8614. static get vendor() {
  8615. return speedy_gl/* SpeedyGL */.c.instance.vendor;
  8616. }
  8617. }
  8618. ;// CONCATENATED MODULE: ./src/core/speedy-vector.js
  8619. /*
  8620. * speedy-vision.js
  8621. * GPU-accelerated Computer Vision for JavaScript
  8622. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8623. *
  8624. * Licensed under the Apache License, Version 2.0 (the "License");
  8625. * you may not use this file except in compliance with the License.
  8626. * You may obtain a copy of the License at
  8627. *
  8628. * http://www.apache.org/licenses/LICENSE-2.0
  8629. *
  8630. * Unless required by applicable law or agreed to in writing, software
  8631. * distributed under the License is distributed on an "AS IS" BASIS,
  8632. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8633. * See the License for the specific language governing permissions and
  8634. * limitations under the License.
  8635. *
  8636. * speedy-vector.js
  8637. * Vectors
  8638. */
  8639. /**
  8640. * 2D vector of floating-point numbers
  8641. */
  8642. class SpeedyVector2 {
  8643. /**
  8644. * Create a 2D vector
  8645. * @param {number} x
  8646. * @param {number} y
  8647. */
  8648. constructor(x, y) {
  8649. /** @type {number} x coordinate */
  8650. this._x = +x;
  8651. /** @type {number} y coordinate */
  8652. this._y = +y;
  8653. }
  8654. //
  8655. // ===== METHODS =====
  8656. //
  8657. /**
  8658. * x-coordinate
  8659. * @returns {number}
  8660. */
  8661. get x() {
  8662. return this._x;
  8663. }
  8664. /**
  8665. * x-coordinate
  8666. * @param {number} value
  8667. */
  8668. set x(value) {
  8669. this._x = +value;
  8670. }
  8671. /**
  8672. * y-coordinate
  8673. * @returns {number}
  8674. */
  8675. get y() {
  8676. return this._y;
  8677. }
  8678. /**
  8679. * y-coordinate
  8680. * @param {number} value
  8681. */
  8682. set y(value) {
  8683. this._y = +value;
  8684. }
  8685. /**
  8686. * Convert to string
  8687. * @returns {string}
  8688. */
  8689. toString() {
  8690. return `SpeedyVector2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  8691. }
  8692. /**
  8693. * Is this vector equal to v?
  8694. * @param {SpeedyVector2} v
  8695. * @returns {boolean}
  8696. */
  8697. equals(v) {
  8698. return this.x === v.x && this.y === v.y;
  8699. }
  8700. /**
  8701. * Dot product between this vector and another vector
  8702. * @param {SpeedyVector2} v another vector
  8703. * @returns {number}
  8704. */
  8705. dot(v) {
  8706. return this.x * v.x + this.y * v.y;
  8707. }
  8708. /**
  8709. * The distance between this vector and another vector
  8710. * @param {SpeedyVector2} v another vector
  8711. * @returns {number}
  8712. */
  8713. distanceTo(v) {
  8714. const dx = this.x - v.x;
  8715. const dy = this.y - v.y;
  8716. return Math.sqrt(dx * dx + dy * dy);
  8717. }
  8718. /**
  8719. * Euclidean norm
  8720. * @returns {number}
  8721. */
  8722. length() {
  8723. return Math.sqrt(this.x * this.x + this.y * this.y);
  8724. }
  8725. /**
  8726. * Returns a normalized version of this vector
  8727. * @returns {SpeedyVector2}
  8728. */
  8729. normalized() {
  8730. const len = this.length();
  8731. if (len > 0.0) return new SpeedyVector2(this.x / len, this.y / len);else return new SpeedyVector2(0.0, 0.0);
  8732. }
  8733. /**
  8734. * Returns a copy of this vector translated by offset
  8735. * @param {SpeedyVector2} offset
  8736. * @returns {SpeedyVector2}
  8737. */
  8738. plus(offset) {
  8739. return new SpeedyVector2(this.x + offset.x, this.y + offset.y);
  8740. }
  8741. /**
  8742. * Returns a copy of this vector translated by -offset
  8743. * @param {SpeedyVector2} offset
  8744. * @returns {SpeedyVector2}
  8745. */
  8746. minus(offset) {
  8747. return new SpeedyVector2(this.x - offset.x, this.y - offset.y);
  8748. }
  8749. /**
  8750. * Returns a copy of this vector scaled by a scalar
  8751. * @param {number} scalar
  8752. * @returns {SpeedyVector2}
  8753. */
  8754. times(scalar) {
  8755. return new SpeedyVector2(this.x * scalar, this.y * scalar);
  8756. }
  8757. }
  8758. ;// CONCATENATED MODULE: ./src/core/speedy-point.js
  8759. /*
  8760. * speedy-vision.js
  8761. * GPU-accelerated Computer Vision for JavaScript
  8762. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8763. *
  8764. * Licensed under the Apache License, Version 2.0 (the "License");
  8765. * you may not use this file except in compliance with the License.
  8766. * You may obtain a copy of the License at
  8767. *
  8768. * http://www.apache.org/licenses/LICENSE-2.0
  8769. *
  8770. * Unless required by applicable law or agreed to in writing, software
  8771. * distributed under the License is distributed on an "AS IS" BASIS,
  8772. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8773. * See the License for the specific language governing permissions and
  8774. * limitations under the License.
  8775. *
  8776. * speedy-point.js
  8777. * Points in space
  8778. */
  8779. /**
  8780. * 2D point
  8781. */
  8782. class SpeedyPoint2 {
  8783. /**
  8784. * Create a 2D point
  8785. * @param {number} x
  8786. * @param {number} y
  8787. */
  8788. constructor(x, y) {
  8789. /** @type {number} x coordinate */
  8790. this._x = +x;
  8791. /** @type {number} y coordinate */
  8792. this._y = +y;
  8793. }
  8794. //
  8795. // ===== METHODS =====
  8796. //
  8797. /**
  8798. * x-coordinate
  8799. * @returns {number}
  8800. */
  8801. get x() {
  8802. return this._x;
  8803. }
  8804. /**
  8805. * x-coordinate
  8806. * @param {number} value
  8807. */
  8808. set x(value) {
  8809. this._x = +value;
  8810. }
  8811. /**
  8812. * y-coordinate
  8813. * @returns {number}
  8814. */
  8815. get y() {
  8816. return this._y;
  8817. }
  8818. /**
  8819. * y-coordinate
  8820. * @param {number} value
  8821. */
  8822. set y(value) {
  8823. this._y = +value;
  8824. }
  8825. /**
  8826. * Convert to string
  8827. * @returns {string}
  8828. */
  8829. toString() {
  8830. return `SpeedyPoint2(${this.x.toFixed(5)}, ${this.y.toFixed(5)})`;
  8831. }
  8832. /**
  8833. * Add a vector to this point
  8834. * @param {SpeedyVector2} v
  8835. * @returns {SpeedyPoint2}
  8836. */
  8837. plus(v) {
  8838. return new SpeedyPoint2(this.x + v.x, this.y + v.y);
  8839. }
  8840. /**
  8841. * Subtracts a point p from this point
  8842. * @param {SpeedyPoint2} p
  8843. * @returns {SpeedyVector2}
  8844. */
  8845. minus(p) {
  8846. return new SpeedyVector2(this.x - p.x, this.y - p.y);
  8847. }
  8848. /**
  8849. * Is this point equal to p?
  8850. * @param {SpeedyPoint2} p
  8851. * @returns {boolean}
  8852. */
  8853. equals(p) {
  8854. return this.x === p.x && this.y === p.y;
  8855. }
  8856. }
  8857. // EXTERNAL MODULE: ./src/core/speedy-matrix-expr.js
  8858. var speedy_matrix_expr = __nested_webpack_require_314174__(6306);
  8859. // EXTERNAL MODULE: ./src/core/speedy-matrix-wasm.js
  8860. var speedy_matrix_wasm = __nested_webpack_require_314174__(6465);
  8861. // EXTERNAL MODULE: ./src/core/speedy-matrix.js
  8862. var speedy_matrix = __nested_webpack_require_314174__(4188);
  8863. ;// CONCATENATED MODULE: ./src/core/speedy-matrix-factory.js
  8864. /*
  8865. * speedy-vision.js
  8866. * GPU-accelerated Computer Vision for JavaScript
  8867. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  8868. *
  8869. * Licensed under the Apache License, Version 2.0 (the "License");
  8870. * you may not use this file except in compliance with the License.
  8871. * You may obtain a copy of the License at
  8872. *
  8873. * http://www.apache.org/licenses/LICENSE-2.0
  8874. *
  8875. * Unless required by applicable law or agreed to in writing, software
  8876. * distributed under the License is distributed on an "AS IS" BASIS,
  8877. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8878. * See the License for the specific language governing permissions and
  8879. * limitations under the License.
  8880. *
  8881. * speedy-matrix-factory.js
  8882. * A factory of matrices
  8883. */
  8884. /**
  8885. * Matrix routines
  8886. */
  8887. class SpeedyMatrixFactory extends Function {
  8888. /**
  8889. * Constructor
  8890. */
  8891. constructor() {
  8892. // This factory can be invoked as a function
  8893. super('...args', 'return args.length > 1 ? this._create(...args) : this._from(args[0])');
  8894. return this.bind(this);
  8895. }
  8896. /**
  8897. * @private
  8898. *
  8899. * Create a new matrix filled with the specified size and entries
  8900. * @param {number} rows
  8901. * @param {number} [columns]
  8902. * @param {number[]} [entries] in column-major format
  8903. * @returns {SpeedyMatrix}
  8904. */
  8905. _create(rows, columns = rows, entries = []) {
  8906. return speedy_matrix.SpeedyMatrix.Create(rows, columns, entries);
  8907. }
  8908. /**
  8909. * @private
  8910. *
  8911. * Evaluate an expression synchronously and store the result in a new matrix
  8912. * @param {SpeedyMatrixExpr} expr matrix expression
  8913. * @returns {SpeedyMatrix}
  8914. */
  8915. _from(expr) {
  8916. return speedy_matrix.SpeedyMatrix.From(expr);
  8917. }
  8918. /**
  8919. * Create a new matrix filled with zeros with the specified size
  8920. * @param {number} rows
  8921. * @param {number} [columns]
  8922. * @returns {SpeedyMatrix}
  8923. */
  8924. Zeros(rows, columns = rows) {
  8925. return speedy_matrix.SpeedyMatrix.Zeros(rows, columns);
  8926. }
  8927. /**
  8928. * Create a new matrix filled with ones with the specified size
  8929. * @param {number} rows
  8930. * @param {number} [columns]
  8931. * @returns {SpeedyMatrix}
  8932. */
  8933. Ones(rows, columns = rows) {
  8934. return speedy_matrix.SpeedyMatrix.Ones(rows, columns);
  8935. }
  8936. /**
  8937. * Create an identity matrix with the specified size
  8938. * @param {number} rows
  8939. * @param {number} [columns]
  8940. * @returns {SpeedyMatrix}
  8941. */
  8942. Eye(rows, columns = rows) {
  8943. return speedy_matrix.SpeedyMatrix.Eye(rows, columns);
  8944. }
  8945. /**
  8946. * Returns a promise that resolves immediately if the WebAssembly routines
  8947. * are ready to be used, or as soon as they do become ready
  8948. * @returns {SpeedyPromise<void>}
  8949. */
  8950. ready() {
  8951. return speedy_matrix.SpeedyMatrix.ready();
  8952. }
  8953. /**
  8954. * QR decomposition
  8955. * @param {SpeedyMatrix} Q is m x n (reduced) or m x m (full), output
  8956. * @param {SpeedyMatrix} R is n x n (reduced) or m x n (full), output
  8957. * @param {SpeedyMatrix} mat is m x n, input
  8958. * @param {object} [options]
  8959. * @param {'reduced'|'full'} [options.mode]
  8960. * @returns {SpeedyPromise<[SpeedyMatrix,SpeedyMatrix]>} resolves to [Q,R]
  8961. */
  8962. qr(Q, R, mat, {
  8963. mode = 'reduced'
  8964. } = {}) {
  8965. const A = mat,
  8966. m = mat.rows,
  8967. n = mat.columns;
  8968. // validate shapes & mode
  8969. if (mode == 'reduced') {
  8970. if (Q.rows != m || Q.columns != n || R.rows != n || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for reduced QR`);
  8971. } else if (mode == 'full') {
  8972. if (Q.rows != m || Q.columns != m || R.rows != m || R.columns != n) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape for full QR`);
  8973. } else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mode for QR: "${mode}"`);
  8974. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  8975. wasm,
  8976. memory
  8977. }) => {
  8978. // allocate matrices
  8979. const Qptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, Q);
  8980. const Rptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, R);
  8981. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  8982. // copy input matrices to WASM memory
  8983. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  8984. // run the WASM routine
  8985. if (mode == 'reduced') wasm.exports.Mat32_qr_reduced(Qptr, Rptr, Aptr);else wasm.exports.Mat32_qr_full(Qptr, Rptr, Aptr);
  8986. // copy output matrices from WASM memory
  8987. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Qptr, Q);
  8988. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, Rptr, R);
  8989. // deallocate matrices
  8990. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  8991. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Rptr);
  8992. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Qptr);
  8993. // done!
  8994. return [Q, R];
  8995. });
  8996. }
  8997. /**
  8998. * Solve a possibly overdetermined system of linear
  8999. * equations Ax = b for x using ordinary least squares
  9000. * @param {SpeedyMatrix} solution n x 1, output
  9001. * @param {SpeedyMatrix} A m x n, m >= n, input
  9002. * @param {SpeedyMatrix} b m x 1, output
  9003. * @param {object} [options]
  9004. * @param {'qr'} [options.method] method of resolution
  9005. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9006. */
  9007. ols(solution, A, b, {
  9008. method = 'qr'
  9009. } = {}) {
  9010. const m = A.rows,
  9011. n = A.columns;
  9012. const x = solution;
  9013. // validate shapes
  9014. if (m < n || n == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != n || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9015. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9016. wasm,
  9017. memory
  9018. }) => {
  9019. // allocate matrices
  9020. const Aptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, A);
  9021. const bptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, b);
  9022. const xptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, x);
  9023. // copy input matrices to WASM memory
  9024. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, Aptr, A);
  9025. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, bptr, b);
  9026. // run the WASM routine
  9027. switch (method) {
  9028. case 'qr':
  9029. wasm.exports.Mat32_qr_ols(xptr, Aptr, bptr, 2);
  9030. break;
  9031. default:
  9032. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9033. }
  9034. // copy output matrix from WASM memory
  9035. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, xptr, x);
  9036. // deallocate matrices
  9037. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, xptr);
  9038. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, bptr);
  9039. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, Aptr);
  9040. // done!
  9041. return solution;
  9042. });
  9043. }
  9044. /**
  9045. * Solve a system of linear equations Ax = b for x
  9046. * @param {SpeedyMatrix} solution m x 1, output
  9047. * @param {SpeedyMatrix} A m x m, input
  9048. * @param {SpeedyMatrix} b m x 1, output
  9049. * @param {object} [options]
  9050. * @param {'qr'} [options.method] method of resolution
  9051. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to solution
  9052. */
  9053. solve(solution, A, b, {
  9054. method = 'qr'
  9055. } = {}) {
  9056. const m = A.rows,
  9057. n = A.columns;
  9058. const x = solution;
  9059. // validate shapes
  9060. if (m != n) throw new utils_errors/* IllegalArgumentError */.qw(`Can't solve an over or underdetermined system of equations`);else if (b.rows != m || b.columns != 1 || x.rows != m || x.columns != 1) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);
  9061. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9062. wasm,
  9063. memory
  9064. }) => {
  9065. // select method
  9066. switch (method) {
  9067. case 'qr':
  9068. return this.ols(x, A, b, {
  9069. method
  9070. });
  9071. /*case 'lu':
  9072. break;*/
  9073. default:
  9074. throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${method}"`);
  9075. }
  9076. });
  9077. }
  9078. /**
  9079. * Compute a perspective transformation using 4 correspondences of points
  9080. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9081. * @param {SpeedyMatrix} src 2x4 input points - source coordinates
  9082. * @param {SpeedyMatrix} dest 2x4 input points - destination coordinates
  9083. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9084. */
  9085. perspective(homography, src, dest) {
  9086. // validate shapes
  9087. if (src.rows != 2 || src.columns != 4 || dest.rows != 2 || dest.columns != 4) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x4 input matrices to compute a perspective transformation`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of perspective() is a 3x3 homography`);
  9088. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9089. wasm,
  9090. memory
  9091. }) => {
  9092. // allocate matrices
  9093. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  9094. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9095. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9096. // copy input matrices to WASM memory
  9097. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9098. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9099. // run the WASM routine
  9100. wasm.exports.Mat32_homography_ndlt4(homptr, srcptr, destptr);
  9101. // copy output matrix from WASM memory
  9102. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  9103. // deallocate matrices
  9104. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9105. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9106. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  9107. // done!
  9108. return homography;
  9109. });
  9110. }
  9111. /**
  9112. * Compute a perspective transformation using n >= 4 correspondences of points
  9113. * @param {SpeedyMatrix} homography 3x3 output - homography matrix
  9114. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9115. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9116. * @param {object} [options]
  9117. * @param {'default'|'pransac'} [options.method] method of computation
  9118. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9119. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9120. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9121. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9122. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9123. */
  9124. findHomography(homography, src, dest, {
  9125. method = 'default',
  9126. mask = null,
  9127. reprojectionError = 3,
  9128. numberOfHypotheses = 512,
  9129. bundleSize = 128
  9130. } = {}) {
  9131. // validate shapes
  9132. if (src.rows != 2 || src.columns < 4 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 4) input matrices to compute a homography`);else if (homography.rows != 3 || homography.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findHomography() is a 3x3 homography`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  9133. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9134. wasm,
  9135. memory
  9136. }) => {
  9137. // allocate matrices
  9138. const homptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, homography);
  9139. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9140. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9141. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  9142. // copy input matrices to WASM memory
  9143. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9144. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9145. // run the WASM routine
  9146. switch (method) {
  9147. case 'pransac':
  9148. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9149. wasm.exports.Mat32_pransac_homography(homptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9150. break;
  9151. case 'default':
  9152. case 'dlt':
  9153. // obsolete
  9154. wasm.exports.Mat32_homography_ndlt(homptr, srcptr, destptr);
  9155. break;
  9156. default:
  9157. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findHomography(): "${method}"`);
  9158. }
  9159. // copy output matrices from WASM memory
  9160. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, homptr, homography);
  9161. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  9162. // deallocate matrices
  9163. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  9164. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9165. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9166. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, homptr);
  9167. // done!
  9168. return homography;
  9169. });
  9170. }
  9171. /**
  9172. * Apply a perspective transformation to a set of 2D points
  9173. * @param {SpeedyMatrix} dest 2 x n output matrix
  9174. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9175. * @param {SpeedyMatrix} transform 3x3 homography matrix
  9176. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9177. */
  9178. applyPerspectiveTransform(dest, src, transform) {
  9179. // validate shapes
  9180. if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 3 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The perspective transformation must be a 3x3 matrix`);
  9181. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9182. wasm,
  9183. memory
  9184. }) => {
  9185. // allocate matrices
  9186. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9187. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9188. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9189. // copy input matrices to WASM memory
  9190. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9191. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  9192. // run the WASM routine
  9193. wasm.exports.Mat32_transform_perspective(destptr, srcptr, matptr);
  9194. // copy output matrix from WASM memory
  9195. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  9196. // deallocate matrices
  9197. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9198. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9199. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9200. // done!
  9201. return dest;
  9202. });
  9203. }
  9204. /**
  9205. * Compute an affine transform using 3 correspondences of points
  9206. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9207. * @param {SpeedyMatrix} src 2x3 input points - source coordinates
  9208. * @param {SpeedyMatrix} dest 2x3 input points - destination coordinates
  9209. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to homography
  9210. */
  9211. affine(transform, src, dest) {
  9212. // validate shapes
  9213. if (src.rows != 2 || src.columns != 3 || dest.rows != 2 || dest.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2x3 input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of affine() is a 2x3 matrix`);
  9214. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9215. wasm,
  9216. memory
  9217. }) => {
  9218. // allocate matrices
  9219. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9220. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9221. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9222. // copy input matrices to WASM memory
  9223. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9224. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9225. // run the WASM routine
  9226. wasm.exports.Mat32_affine_direct3(matptr, srcptr, destptr);
  9227. // copy output matrix from WASM memory
  9228. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  9229. // deallocate matrices
  9230. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9231. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9232. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9233. // done!
  9234. return transform;
  9235. });
  9236. }
  9237. /**
  9238. * Compute an affine transformation using n >= 3 correspondences of points
  9239. * @param {SpeedyMatrix} transform 2x3 output - affine transform
  9240. * @param {SpeedyMatrix} src 2 x n input points - source coordinates
  9241. * @param {SpeedyMatrix} dest 2 x n input points - destination coordinates
  9242. * @param {object} [options]
  9243. * @param {'default'|'pransac'} [options.method] method of computation
  9244. * @param {SpeedyMatrix|null} [options.mask] (pransac) 1 x n output: i-th entry will be 1 if the i-th input point is an inlier, or 0 otherwise
  9245. * @param {number} [options.reprojectionError] (pransac) given in pixels, used to separate inliers from outliers of a particular model (e.g., 1 pixel)
  9246. * @param {number} [options.numberOfHypotheses] (pransac) number of hypotheses to be generated up-front (e.g., 512)
  9247. * @param {number} [options.bundleSize] (pransac) how many points should we check before reducing the number of viable hypotheses (e.g., 128)
  9248. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to an affine transform
  9249. */
  9250. findAffineTransform(transform, src, dest, {
  9251. method = 'default',
  9252. mask = null,
  9253. reprojectionError = 3,
  9254. numberOfHypotheses = 512,
  9255. bundleSize = 128
  9256. } = {}) {
  9257. // validate shapes
  9258. if (src.rows != 2 || src.columns < 3 || dest.rows != 2 || dest.columns != src.columns) throw new utils_errors/* IllegalArgumentError */.qw(`You need two 2 x n (n >= 3) input matrices to compute an affine transform`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The output of findAffineTransform() is a 2x3 matrix`);else if (mask != null && (mask.rows != 1 || mask.columns != src.columns)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shape of the inliers mask`);
  9259. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9260. wasm,
  9261. memory
  9262. }) => {
  9263. // allocate matrices
  9264. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9265. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9266. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9267. const maskptr = mask != null ? speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, mask) : 0;
  9268. // copy input matrices to WASM memory
  9269. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9270. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, destptr, dest);
  9271. // run the WASM routine
  9272. switch (method) {
  9273. case 'pransac':
  9274. utils/* Utils */.A.assert(reprojectionError >= 0 && numberOfHypotheses > 0 && bundleSize > 0);
  9275. wasm.exports.Mat32_pransac_affine(matptr, maskptr, srcptr, destptr, numberOfHypotheses, bundleSize, reprojectionError);
  9276. break;
  9277. case 'default':
  9278. wasm.exports.Mat32_affine_direct(matptr, srcptr, destptr);
  9279. break;
  9280. default:
  9281. throw new utils_errors/* IllegalArgumentError */.qw(`Illegal method for findAffineTransform(): "${method}"`);
  9282. }
  9283. // copy output matrices from WASM memory
  9284. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, matptr, transform);
  9285. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, maskptr, mask);
  9286. // deallocate matrices
  9287. if (mask != null) speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, maskptr);
  9288. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9289. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9290. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9291. // done!
  9292. return transform;
  9293. });
  9294. }
  9295. /**
  9296. * Apply an affine transformation to a set of 2D points
  9297. * @param {SpeedyMatrix} dest 2 x n output matrix
  9298. * @param {SpeedyMatrix} src 2 x n input matrix (a set of points)
  9299. * @param {SpeedyMatrix} transform 2x3 affine transform
  9300. * @returns {SpeedyPromise<SpeedyMatrix>} resolves to dest
  9301. */
  9302. applyAffineTransform(dest, src, transform) {
  9303. // validate shapes
  9304. if (src.rows != 2 || dest.rows != 2 || src.columns != dest.columns) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid shapes`);else if (transform.rows != 2 || transform.columns != 3) throw new utils_errors/* IllegalArgumentError */.qw(`The affine transformation must be a 2x3 matrix`);
  9305. return speedy_matrix_wasm/* SpeedyMatrixWASM */.U.ready().then(({
  9306. wasm,
  9307. memory
  9308. }) => {
  9309. // allocate matrices
  9310. const matptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, transform);
  9311. const srcptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, src);
  9312. const destptr = speedy_matrix_wasm/* SpeedyMatrixWASM */.U.allocateMat32(wasm, memory, dest);
  9313. // copy input matrices to WASM memory
  9314. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, srcptr, src);
  9315. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyToMat32(wasm, memory, matptr, transform);
  9316. // run the WASM routine
  9317. wasm.exports.Mat32_transform_affine(destptr, srcptr, matptr);
  9318. // copy output matrix from WASM memory
  9319. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.copyFromMat32(wasm, memory, destptr, dest);
  9320. // deallocate matrices
  9321. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, destptr);
  9322. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, srcptr);
  9323. speedy_matrix_wasm/* SpeedyMatrixWASM */.U.deallocateMat32(wasm, memory, matptr);
  9324. // done!
  9325. return dest;
  9326. });
  9327. }
  9328. }
  9329. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-message.js
  9330. /*
  9331. * speedy-vision.js
  9332. * GPU-accelerated Computer Vision for JavaScript
  9333. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9334. *
  9335. * Licensed under the Apache License, Version 2.0 (the "License");
  9336. * you may not use this file except in compliance with the License.
  9337. * You may obtain a copy of the License at
  9338. *
  9339. * http://www.apache.org/licenses/LICENSE-2.0
  9340. *
  9341. * Unless required by applicable law or agreed to in writing, software
  9342. * distributed under the License is distributed on an "AS IS" BASIS,
  9343. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9344. * See the License for the specific language governing permissions and
  9345. * limitations under the License.
  9346. *
  9347. * pipeline-message.js
  9348. * A message that is shared between nodes of a pipeline
  9349. */
  9350. /**
  9351. * Types of messages
  9352. * @enum {Symbol}
  9353. */
  9354. const SpeedyPipelineMessageType = Object.freeze({
  9355. Nothing: Symbol('Nothing'),
  9356. Image: Symbol('Image'),
  9357. Keypoints: Symbol('Keypoints'),
  9358. Vector2: Symbol('Vector2'),
  9359. LSHTables: Symbol('LSHTables'),
  9360. KeypointMatches: Symbol('KeypointMatches')
  9361. });
  9362. /**
  9363. * Diagnostic data
  9364. * @typedef {Object.<string, string|number>} SpeedyPipelineMessageDiagnosticData
  9365. */
  9366. /**
  9367. * A message that is shared between nodes of a pipeline
  9368. * @abstract
  9369. */
  9370. class SpeedyPipelineMessage {
  9371. /**
  9372. * Constructor
  9373. * @param {SpeedyPipelineMessageType} type message type
  9374. */
  9375. constructor(type) {
  9376. /** @type {SpeedyPipelineMessageType} message type */
  9377. this._type = type;
  9378. }
  9379. /**
  9380. * Message type
  9381. * @returns {SpeedyPipelineMessageType}
  9382. */
  9383. get type() {
  9384. return this._type;
  9385. }
  9386. /**
  9387. * Checks if the type of this message is equal to parameter type
  9388. * @param {SpeedyPipelineMessageType} type
  9389. * @returns {boolean}
  9390. */
  9391. hasType(type) {
  9392. return this._type === type;
  9393. }
  9394. /**
  9395. * Is this an empty message?
  9396. * @returns {boolean}
  9397. */
  9398. isEmpty() {
  9399. return this.hasType(SpeedyPipelineMessageType.Nothing);
  9400. }
  9401. /**
  9402. * Convert to string
  9403. * @returns {string}
  9404. */
  9405. toString() {
  9406. const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this.type);
  9407. return `message of type ${type}`;
  9408. }
  9409. /**
  9410. * Inspect this message for debugging purposes
  9411. * @param {SpeedyGPU} gpu
  9412. * @returns {SpeedyPipelineMessageDiagnosticData}
  9413. */
  9414. inspect(gpu) {
  9415. throw new utils_errors/* AbstractMethodError */.aQ();
  9416. }
  9417. /**
  9418. * Set parameters
  9419. * @abstract
  9420. * @param {...any} args
  9421. * @returns {SpeedyPipelineMessage} this message
  9422. */
  9423. set(...args) {
  9424. throw new utils_errors/* AbstractMethodError */.aQ();
  9425. }
  9426. /**
  9427. * Create a message of the specified type
  9428. * @param {SpeedyPipelineMessageType} type
  9429. * @returns {SpeedyPipelineMessage}
  9430. */
  9431. static create(type) {
  9432. return createMessage(type);
  9433. }
  9434. }
  9435. /**
  9436. * An empty message carrying nothing
  9437. */
  9438. class SpeedyPipelineMessageWithNothing extends SpeedyPipelineMessage {
  9439. /**
  9440. * Constructor
  9441. */
  9442. constructor() {
  9443. super(SpeedyPipelineMessageType.Nothing);
  9444. }
  9445. /**
  9446. * Set parameters
  9447. * @returns {SpeedyPipelineMessage} this message
  9448. */
  9449. set() {
  9450. return this;
  9451. }
  9452. /**
  9453. * Inspect this message for debugging purposes
  9454. * @param {SpeedyGPU} gpu
  9455. * @returns {SpeedyPipelineMessageDiagnosticData}
  9456. */
  9457. inspect(gpu) {
  9458. return {
  9459. type: this.constructor.name
  9460. };
  9461. }
  9462. }
  9463. /**
  9464. * A message transporting an image
  9465. */
  9466. class SpeedyPipelineMessageWithImage extends SpeedyPipelineMessage {
  9467. /**
  9468. * Constructor
  9469. */
  9470. constructor() {
  9471. super(SpeedyPipelineMessageType.Image);
  9472. /** @type {SpeedyDrawableTexture} the image we carry */
  9473. this._image = null;
  9474. /** @type {ImageFormat} image format */
  9475. this._format = types/* ImageFormat */.f5.RGBA;
  9476. }
  9477. /**
  9478. * Set parameters
  9479. * @param {SpeedyDrawableTexture} image the image we carry
  9480. * @param {ImageFormat} [format] image format
  9481. * @returns {SpeedyPipelineMessage} this message
  9482. */
  9483. set(image, format = types/* ImageFormat */.f5.RGBA) {
  9484. // set parameters
  9485. this._image = image;
  9486. this._format = format;
  9487. // done!
  9488. return this;
  9489. }
  9490. /**
  9491. * Inspect this message for debugging purposes
  9492. * @param {SpeedyGPU} gpu
  9493. * @returns {SpeedyPipelineMessageDiagnosticData}
  9494. */
  9495. inspect(gpu) {
  9496. const formatName = Object.keys(types/* ImageFormat */.f5).find(format => types/* ImageFormat */.f5[format] === this.format);
  9497. return {
  9498. type: this.constructor.name,
  9499. format: String(formatName),
  9500. imageSize: this.image ? `${this.image.width}x${this.image.height}` : '0x0',
  9501. image: this.image ? '<image data>' /* possibly MBs of data */ : '',
  9502. hasMipmaps: this.image && this.image.hasMipmaps() ? 'yes' : 'no'
  9503. };
  9504. }
  9505. /**
  9506. * The image we carry
  9507. * @returns {SpeedyDrawableTexture}
  9508. */
  9509. get image() {
  9510. return this._image;
  9511. }
  9512. /**
  9513. * Image format
  9514. * @returns {ImageFormat}
  9515. */
  9516. get format() {
  9517. return this._format;
  9518. }
  9519. }
  9520. /**
  9521. * A message transporting keypoints
  9522. */
  9523. class SpeedyPipelineMessageWithKeypoints extends SpeedyPipelineMessage {
  9524. /**
  9525. * Constructor
  9526. */
  9527. constructor() {
  9528. super(SpeedyPipelineMessageType.Keypoints);
  9529. /** @type {SpeedyDrawableTexture} encoded keypoints */
  9530. this._encodedKeypoints = null;
  9531. /** @type {number} descriptor size in bytes */
  9532. this._descriptorSize = 0;
  9533. /** @type {number} extra size in bytes */
  9534. this._extraSize = 0;
  9535. /** @type {number} encoder length */
  9536. this._encoderLength = 1;
  9537. }
  9538. /**
  9539. * Set parameters
  9540. * @param {SpeedyDrawableTexture} encodedKeypoints encoded keypoints
  9541. * @param {number} descriptorSize in bytes
  9542. * @param {number} extraSize in bytes
  9543. * @param {number} encoderLength positive integer
  9544. * @returns {SpeedyPipelineMessage} this message
  9545. */
  9546. set(encodedKeypoints, descriptorSize, extraSize, encoderLength) {
  9547. // set parameters
  9548. this._encodedKeypoints = encodedKeypoints;
  9549. this._descriptorSize = descriptorSize | 0;
  9550. this._extraSize = extraSize | 0;
  9551. this._encoderLength = encoderLength | 0;
  9552. // validate
  9553. utils/* Utils */.A.assert(this._descriptorSize >= 0 && this._extraSize >= 0);
  9554. utils/* Utils */.A.assert(this._encoderLength === this._encodedKeypoints.width, 'Invalid encoderLength');
  9555. utils/* Utils */.A.assert(this._encodedKeypoints.width === this._encodedKeypoints.height, 'Invalid encodedKeypoints texture');
  9556. // done!
  9557. return this;
  9558. }
  9559. /**
  9560. * Inspect this message for debugging purposes
  9561. * @param {SpeedyGPU} gpu
  9562. * @returns {SpeedyPipelineMessageDiagnosticData}
  9563. */
  9564. inspect(gpu) {
  9565. return {
  9566. type: this.constructor.name,
  9567. descriptorSize: this.descriptorSize,
  9568. extraSize: this.extraSize,
  9569. encoderLength: this.encoderLength,
  9570. encodedKeypointsSize: this.encodedKeypoints ? `${this.encodedKeypoints.width}x${this.encodedKeypoints.height}` : '0x0',
  9571. encodedKeypoints: this.encodedKeypoints ? utils/* Utils */.A.formatBinaryData(this.encodedKeypoints.inspect(gpu).buffer) : ''
  9572. };
  9573. }
  9574. /**
  9575. * Encoded keypoints
  9576. * @returns {SpeedyDrawableTexture}
  9577. */
  9578. get encodedKeypoints() {
  9579. return this._encodedKeypoints;
  9580. }
  9581. /**
  9582. * Descriptor size, in bytes
  9583. * @returns {number}
  9584. */
  9585. get descriptorSize() {
  9586. return this._descriptorSize;
  9587. }
  9588. /**
  9589. * Extra size, in bytes
  9590. * @returns {number}
  9591. */
  9592. get extraSize() {
  9593. return this._extraSize;
  9594. }
  9595. /**
  9596. * Encoder length
  9597. * @returns {number}
  9598. */
  9599. get encoderLength() {
  9600. return this._encoderLength;
  9601. }
  9602. }
  9603. /*
  9604. * A message transporting a set of 2D vectors
  9605. */
  9606. class SpeedyPipelineMessageWith2DVectors extends SpeedyPipelineMessage {
  9607. /**
  9608. * Constructor
  9609. */
  9610. constructor() {
  9611. super(SpeedyPipelineMessageType.Vector2);
  9612. /** @type {SpeedyDrawableTexture} the set of vectors */
  9613. this._vectors = null;
  9614. }
  9615. /**
  9616. * Set parameters
  9617. * @param {SpeedyDrawableTexture} vectors the set of vectors
  9618. * @returns {SpeedyPipelineMessage} this message
  9619. */
  9620. set(vectors) {
  9621. // set parameters
  9622. this._vectors = vectors;
  9623. // done!
  9624. return this;
  9625. }
  9626. /**
  9627. * Inspect this message for debugging purposes
  9628. * @param {SpeedyGPU} gpu
  9629. * @returns {SpeedyPipelineMessageDiagnosticData}
  9630. */
  9631. inspect(gpu) {
  9632. return {
  9633. type: this.constructor.name,
  9634. vectorsSize: this.vectors ? `${this.vectors.width}x${this.vectors.height}` : '0x0',
  9635. vectors: this.vectors ? utils/* Utils */.A.formatBinaryData(this.vectors.inspect(gpu).buffer) : ''
  9636. };
  9637. }
  9638. /**
  9639. * The set of vectors
  9640. * @returns {SpeedyDrawableTexture}
  9641. */
  9642. get vectors() {
  9643. return this._vectors;
  9644. }
  9645. }
  9646. /**
  9647. * A message transporting LSH tables
  9648. */
  9649. class SpeedyPipelineMessageWithLSHTables extends SpeedyPipelineMessage {
  9650. /**
  9651. * Constructor
  9652. */
  9653. constructor() {
  9654. super(SpeedyPipelineMessageType.LSHTables);
  9655. /** @type {SpeedyLSH} LSH data structure */
  9656. this._lsh = null;
  9657. }
  9658. /**
  9659. * Set parameters
  9660. * @param {SpeedyLSH} lsh
  9661. * @returns {SpeedyPipelineMessage} this message
  9662. */
  9663. set(lsh) {
  9664. // set parameters
  9665. this._lsh = lsh;
  9666. // done!
  9667. return this;
  9668. }
  9669. /**
  9670. * Inspect this message for debugging purposes
  9671. * @param {SpeedyGPU} gpu
  9672. * @returns {SpeedyPipelineMessageDiagnosticData}
  9673. */
  9674. inspect(gpu) {
  9675. return {
  9676. type: this.constructor.name,
  9677. lsh: '<LSH tables>'
  9678. };
  9679. }
  9680. /**
  9681. * LSH data structure
  9682. * @returns {SpeedyLSH}
  9683. */
  9684. get lsh() {
  9685. return this._lsh;
  9686. }
  9687. }
  9688. /*
  9689. * A message transporting a set of keypoint matches
  9690. */
  9691. class SpeedyPipelineMessageWithKeypointMatches extends SpeedyPipelineMessage {
  9692. /**
  9693. * Constructor
  9694. */
  9695. constructor() {
  9696. super(SpeedyPipelineMessageType.KeypointMatches);
  9697. /** @type {SpeedyDrawableTexture} keypoint matches (note: 1 pixel encodes 1 match) */
  9698. this._encodedMatches = null;
  9699. /** @type {number} number of matches per keypoint */
  9700. this._matchesPerKeypoint = 1;
  9701. }
  9702. /**
  9703. * Set parameters
  9704. * @param {SpeedyDrawableTexture} encodedMatches
  9705. * @param {number} matchesPerKeypoint
  9706. * @returns {SpeedyPipelineMessage} this message
  9707. */
  9708. set(encodedMatches, matchesPerKeypoint) {
  9709. // set parameters
  9710. this._encodedMatches = encodedMatches;
  9711. this._matchesPerKeypoint = matchesPerKeypoint | 0;
  9712. // validate
  9713. utils/* Utils */.A.assert(this._matchesPerKeypoint > 0);
  9714. // done!
  9715. return this;
  9716. }
  9717. /**
  9718. * Inspect this message for debugging purposes
  9719. * @param {SpeedyGPU} gpu
  9720. * @returns {SpeedyPipelineMessageDiagnosticData}
  9721. */
  9722. inspect(gpu) {
  9723. return {
  9724. type: this.constructor.name,
  9725. matchesPerKeypoint: this.matchesPerKeypoint,
  9726. encodedMatchesSize: this.encodedMatches ? `${this.encodedMatches.width}x${this.encodedMatches.height}` : '0x0',
  9727. encodedMatches: this.encodedMatches ? utils/* Utils */.A.formatBinaryData(this.encodedMatches.inspect(gpu).buffer) : ''
  9728. };
  9729. }
  9730. /**
  9731. * The matches
  9732. * @returns {SpeedyDrawableTexture}
  9733. */
  9734. get encodedMatches() {
  9735. return this._encodedMatches;
  9736. }
  9737. /**
  9738. * Number of matches per keypoint
  9739. * @returns {number}
  9740. */
  9741. get matchesPerKeypoint() {
  9742. return this._matchesPerKeypoint;
  9743. }
  9744. }
  9745. //
  9746. // Utilities
  9747. //
  9748. /** Map message type to message class */
  9749. const MESSAGE_CLASS = Object.freeze({
  9750. [SpeedyPipelineMessageType.Nothing]: SpeedyPipelineMessageWithNothing,
  9751. [SpeedyPipelineMessageType.Image]: SpeedyPipelineMessageWithImage,
  9752. [SpeedyPipelineMessageType.Keypoints]: SpeedyPipelineMessageWithKeypoints,
  9753. [SpeedyPipelineMessageType.Vector2]: SpeedyPipelineMessageWith2DVectors,
  9754. [SpeedyPipelineMessageType.LSHTables]: SpeedyPipelineMessageWithLSHTables,
  9755. [SpeedyPipelineMessageType.KeypointMatches]: SpeedyPipelineMessageWithKeypointMatches
  9756. });
  9757. /**
  9758. * Create a message of the specified type
  9759. * @param {SpeedyPipelineMessageType} type
  9760. * @returns {SpeedyPipelineMessage}
  9761. */
  9762. function createMessage(type) {
  9763. //return Reflect.construct(MESSAGE_CLASS[type], []);
  9764. return new MESSAGE_CLASS[
  9765. // error TS2538: Type 'Symbol' cannot be used as an index type.
  9766. // heck, what the hack...
  9767. /** @type {any} */
  9768. type]();
  9769. }
  9770. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portspec.js
  9771. /*
  9772. * speedy-vision.js
  9773. * GPU-accelerated Computer Vision for JavaScript
  9774. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9775. *
  9776. * Licensed under the Apache License, Version 2.0 (the "License");
  9777. * you may not use this file except in compliance with the License.
  9778. * You may obtain a copy of the License at
  9779. *
  9780. * http://www.apache.org/licenses/LICENSE-2.0
  9781. *
  9782. * Unless required by applicable law or agreed to in writing, software
  9783. * distributed under the License is distributed on an "AS IS" BASIS,
  9784. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9785. * See the License for the specific language governing permissions and
  9786. * limitations under the License.
  9787. *
  9788. * pipeline-portspec.js
  9789. * Specification (requirements) of a port of a node of a pipeline
  9790. */
  9791. /**
  9792. * A message constraint is a message validation predicate
  9793. * @typedef {function(SpeedyPipelineMessage): boolean} SpeedyPipelineMessageConstraint
  9794. */
  9795. /**
  9796. * A validation predicate that validates all messages
  9797. * @type {SpeedyPipelineMessageConstraint}
  9798. */
  9799. const always = message => true;
  9800. /**
  9801. * Specification (requirements) of a port of a node of a pipeline
  9802. */
  9803. class SpeedyPipelinePortSpec {
  9804. /**
  9805. * Constructor
  9806. * @param {SpeedyPipelineMessageType} expectedMessageType expected message type
  9807. * @param {SpeedyPipelineMessageConstraint} [messageConstraint] message validation function
  9808. */
  9809. constructor(expectedMessageType, messageConstraint = always) {
  9810. /** @type {SpeedyPipelineMessageType} expected message type */
  9811. this._expectedMessageType = expectedMessageType;
  9812. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  9813. this._isValidMessage = typeof messageConstraint === 'function' ? messageConstraint : always;
  9814. // expect a valid type
  9815. utils/* Utils */.A.assert(this._expectedMessageType != SpeedyPipelineMessageType.Nothing);
  9816. }
  9817. /**
  9818. * Checks if two specs have the same expected type
  9819. * @param {SpeedyPipelinePortSpec} spec
  9820. * @returns {boolean}
  9821. */
  9822. isCompatibleWith(spec) {
  9823. return this._expectedMessageType == spec._expectedMessageType;
  9824. }
  9825. /**
  9826. * Is the given message accepted by a port that abides by this specification?
  9827. * @param {SpeedyPipelineMessage} message
  9828. * @returns {boolean}
  9829. */
  9830. accepts(message) {
  9831. return message.hasType(this._expectedMessageType) && this._isValidMessage(message);
  9832. }
  9833. /**
  9834. * Convert to string
  9835. * @returns {string}
  9836. */
  9837. toString() {
  9838. const type = Object.keys(SpeedyPipelineMessageType).find(type => SpeedyPipelineMessageType[type] === this._expectedMessageType);
  9839. return `Port expects ${type} satisfying ${this._isValidMessage}`;
  9840. }
  9841. /**
  9842. * Expected message type
  9843. * @returns {SpeedyPipelineMessageType}
  9844. */
  9845. get expectedMessageType() {
  9846. return this._expectedMessageType;
  9847. }
  9848. }
  9849. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-port.js
  9850. /*
  9851. * speedy-vision.js
  9852. * GPU-accelerated Computer Vision for JavaScript
  9853. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  9854. *
  9855. * Licensed under the Apache License, Version 2.0 (the "License");
  9856. * you may not use this file except in compliance with the License.
  9857. * You may obtain a copy of the License at
  9858. *
  9859. * http://www.apache.org/licenses/LICENSE-2.0
  9860. *
  9861. * Unless required by applicable law or agreed to in writing, software
  9862. * distributed under the License is distributed on an "AS IS" BASIS,
  9863. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9864. * See the License for the specific language governing permissions and
  9865. * limitations under the License.
  9866. *
  9867. * pipeline-port.js
  9868. * Port of a node of a pipeline
  9869. */
  9870. // Constants
  9871. const DEFAULT_INPUT_PORT_NAME = 'in';
  9872. const DEFAULT_OUTPUT_PORT_NAME = 'out';
  9873. const ACCEPTABLE_PORT_NAME = /^[a-z][a-zA-Z0-9]*$/;
  9874. const EMPTY_MESSAGE = new SpeedyPipelineMessageWithNothing();
  9875. /**
  9876. * Diagnostic data
  9877. * @typedef {import('./pipeline-message.js').SpeedyPipelineMessageDiagnosticData} SpeedyPipelinePortDiagnosticData
  9878. */
  9879. /**
  9880. * Port of a node of a pipeline
  9881. * @abstract
  9882. */
  9883. class SpeedyPipelinePort {
  9884. /**
  9885. * Constructor
  9886. * @param {string} name the name of this port
  9887. * @param {SpeedyPipelinePortSpec} spec port specification
  9888. * @param {SpeedyPipelineNode} node the node to which this port belongs
  9889. */
  9890. constructor(name, spec, node) {
  9891. /** @type {string} the name of this port */
  9892. this._name = String(name);
  9893. /** @type {SpeedyPipelinePortSpec} the specification of this port */
  9894. this._spec = spec;
  9895. /** @type {SpeedyPipelineNode} the node to which this port belongs */
  9896. this._node = node;
  9897. /** @type {SpeedyPipelineMessage} the message located in this port */
  9898. this._message = EMPTY_MESSAGE;
  9899. // check if we've got an acceptable port name
  9900. utils/* Utils */.A.assert(ACCEPTABLE_PORT_NAME.test(this._name), `Port name "${this._name}" is not acceptable`);
  9901. }
  9902. /**
  9903. * The name of this port
  9904. * @returns {string}
  9905. */
  9906. get name() {
  9907. return this._name;
  9908. }
  9909. /**
  9910. * The node to which this port belongs
  9911. * @returns {SpeedyPipelineNode}
  9912. */
  9913. get node() {
  9914. return this._node;
  9915. }
  9916. /**
  9917. * Connect this port to another
  9918. * @abstract
  9919. * @param {SpeedyPipelinePort} port
  9920. */
  9921. connectTo(port) {
  9922. throw new utils_errors/* AbstractMethodError */.aQ();
  9923. }
  9924. /**
  9925. * Is this an input port?
  9926. * @abstract
  9927. * @returns {boolean}
  9928. */
  9929. isInputPort() {
  9930. throw new utils_errors/* AbstractMethodError */.aQ();
  9931. }
  9932. /**
  9933. * Is this an output port?
  9934. * @returns {boolean}
  9935. */
  9936. isOutputPort() {
  9937. return !this.isInputPort();
  9938. }
  9939. /**
  9940. * Clear the message stored in this port
  9941. */
  9942. clearMessage() {
  9943. this._message = EMPTY_MESSAGE;
  9944. }
  9945. /**
  9946. * Is there a valid message located in this port?
  9947. * @returns {boolean}
  9948. */
  9949. hasMessage() {
  9950. return !this._message.isEmpty();
  9951. }
  9952. /**
  9953. * Read the message that is in this port
  9954. * @returns {SpeedyPipelineMessage}
  9955. */
  9956. read() {
  9957. if (this._message.isEmpty()) throw new utils_errors/* IllegalOperationError */.Er(`Can't read from port ${this.name}: nothing to read`);
  9958. return this._message;
  9959. }
  9960. /**
  9961. * Write a message to this port
  9962. * @param {SpeedyPipelineMessage} message
  9963. */
  9964. write(message) {
  9965. throw new utils_errors/* NotSupportedError */.EM(`Can't write ${message} to port ${this.name}: unsupported operation`);
  9966. }
  9967. /**
  9968. * Inspect this port for debugging purposes
  9969. * @param {SpeedyGPU} gpu
  9970. * @returns {SpeedyPipelinePortDiagnosticData} diagnostic data
  9971. */
  9972. inspect(gpu) {
  9973. return this._message.inspect(gpu);
  9974. }
  9975. /**
  9976. * Default port name
  9977. * @abstract
  9978. * @returns {string}
  9979. */
  9980. static get DEFAULT_NAME() {
  9981. throw new utils_errors/* AbstractMethodError */.aQ();
  9982. }
  9983. }
  9984. /**
  9985. * Output port
  9986. */
  9987. class SpeedyPipelineOutputPort extends SpeedyPipelinePort {
  9988. /**
  9989. * Constructor
  9990. * @param {string} name the name of this port
  9991. * @param {SpeedyPipelinePortSpec} spec port specification
  9992. * @param {SpeedyPipelineNode} node the node to which this port belongs
  9993. */
  9994. constructor(name, spec, node) {
  9995. super(name, spec, node);
  9996. /** @type {SpeedyPipelineMessage} cached message */
  9997. this._cachedMessage = null;
  9998. }
  9999. /**
  10000. * Connect this port to another
  10001. * @param {SpeedyPipelineInputPort} port
  10002. */
  10003. connectTo(port) {
  10004. if (!port.isInputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect output port ${this.name} to port ${port.name}: expected an input port`);
  10005. port.connectTo(this);
  10006. }
  10007. /**
  10008. * Is this an input port?
  10009. * @returns {boolean}
  10010. */
  10011. isInputPort() {
  10012. return false;
  10013. }
  10014. /**
  10015. * Write a message to this port
  10016. * @param {SpeedyPipelineMessage} message
  10017. */
  10018. write(message) {
  10019. if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't write ${message} to port ${this.name}. ${this._spec}`);
  10020. this._message = message;
  10021. }
  10022. /**
  10023. * Write a message to this port using a cached message object
  10024. * @param {...any} args to be passed to SpeedyPipelineMessage.set()
  10025. */
  10026. swrite(...args) {
  10027. if (this._cachedMessage == null) this._cachedMessage = SpeedyPipelineMessage.create(this._spec.expectedMessageType);
  10028. this.write(this._cachedMessage.set(...args));
  10029. }
  10030. /**
  10031. * Default port name
  10032. * @returns {string}
  10033. */
  10034. static get DEFAULT_NAME() {
  10035. return DEFAULT_OUTPUT_PORT_NAME;
  10036. }
  10037. }
  10038. /**
  10039. * Input port
  10040. */
  10041. class SpeedyPipelineInputPort extends SpeedyPipelinePort {
  10042. /**
  10043. * Constructor
  10044. * @param {string} name the name of this port
  10045. * @param {SpeedyPipelinePortSpec} spec port specification
  10046. * @param {SpeedyPipelineNode} node the node to which this port belongs
  10047. */
  10048. constructor(name, spec, node) {
  10049. super(name, spec, node);
  10050. /** @type {SpeedyPipelineOutputPort|null} incoming link */
  10051. this._incomingLink = null;
  10052. }
  10053. /**
  10054. * Incoming link
  10055. * @returns {SpeedyPipelineOutputPort|null}
  10056. */
  10057. get incomingLink() {
  10058. return this._incomingLink;
  10059. }
  10060. /**
  10061. * Connect this port to another
  10062. * @param {SpeedyPipelineOutputPort} port
  10063. */
  10064. connectTo(port) {
  10065. if (!port.isOutputPort()) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect input port ${this.name} of "${this.node.fullName}" to input port ${port.name} of "${port.node.fullName}": expected an output port`);else if (!this._spec.isCompatibleWith(port._spec)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't connect port ${this.name} of "${this.node.fullName}" to port ${port.name} of "${port.node.fullName}": incompatible types`);
  10066. this._incomingLink = port;
  10067. }
  10068. /**
  10069. * Unlink this port
  10070. */
  10071. disconnect() {
  10072. this._incomingLink = null;
  10073. }
  10074. /**
  10075. * Is this an input port?
  10076. * @returns {boolean}
  10077. */
  10078. isInputPort() {
  10079. return true;
  10080. }
  10081. /**
  10082. * Receive a message using the incoming link
  10083. * @param {string} [nodeName]
  10084. * @returns {SpeedyPipelineMessage}
  10085. */
  10086. pullMessage(nodeName = '') {
  10087. const name = nodeName.length > 0 ? `${this.name} of ${nodeName}` : this.name;
  10088. if (this._incomingLink == null) throw new utils_errors/* IllegalOperationError */.Er(`No incoming link for input port ${name}`);
  10089. const message = this._incomingLink.read();
  10090. if (!this._spec.accepts(message)) throw new utils_errors/* IllegalArgumentError */.qw(`Can't receive ${message} at port ${name}: ${this._spec}`);
  10091. return this._message = message;
  10092. }
  10093. /**
  10094. * Default port name
  10095. * @returns {string}
  10096. */
  10097. static get DEFAULT_NAME() {
  10098. return DEFAULT_INPUT_PORT_NAME;
  10099. }
  10100. }
  10101. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-portbuilder.js
  10102. /*
  10103. * speedy-vision.js
  10104. * GPU-accelerated Computer Vision for JavaScript
  10105. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10106. *
  10107. * Licensed under the Apache License, Version 2.0 (the "License");
  10108. * you may not use this file except in compliance with the License.
  10109. * You may obtain a copy of the License at
  10110. *
  10111. * http://www.apache.org/licenses/LICENSE-2.0
  10112. *
  10113. * Unless required by applicable law or agreed to in writing, software
  10114. * distributed under the License is distributed on an "AS IS" BASIS,
  10115. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10116. * See the License for the specific language governing permissions and
  10117. * limitations under the License.
  10118. *
  10119. * pipeline-portbuilder.js
  10120. * Builder of a port of a node of a pipeline
  10121. */
  10122. /**
  10123. * @typedef {import('./pipeline-portspec').SpeedyPipelineMessageConstraint} SpeedyPipelineMessageConstraint
  10124. */
  10125. /**
  10126. * Builder of a port of a node of a pipeline
  10127. */
  10128. class SpeedyPipelinePortBuilder {
  10129. /**
  10130. * Constructor
  10131. * @param {typeof SpeedyPipelinePort} portClass input or output?
  10132. * @param {string} portName
  10133. */
  10134. constructor(portClass, portName) {
  10135. /** @type {typeof SpeedyPipelinePort} input or output? */
  10136. this._class = portClass;
  10137. /** @type {string} port name */
  10138. this._name = String(portName);
  10139. /** @type {SpeedyPipelineMessageType} accepted message type */
  10140. this._type = SpeedyPipelineMessageType.Nothing;
  10141. /** @type {SpeedyPipelineMessageConstraint} message validation function */
  10142. this._messageConstraint = undefined;
  10143. }
  10144. /**
  10145. * Declare that the new port expects a certain type of message
  10146. * @param {SpeedyPipelineMessageType} type expected type
  10147. * @returns {SpeedyPipelinePortBuilder} this builder
  10148. */
  10149. expects(type) {
  10150. utils/* Utils */.A.assert(this._type == SpeedyPipelineMessageType.Nothing);
  10151. utils/* Utils */.A.assert(type != SpeedyPipelineMessageType.Nothing);
  10152. this._type = type;
  10153. return this;
  10154. }
  10155. /**
  10156. * Declare that the new port expects messages satisfying a constraint
  10157. * @param {SpeedyPipelineMessageConstraint} constraint
  10158. * @returns {SpeedyPipelinePortBuilder} this builder
  10159. */
  10160. satisfying(constraint) {
  10161. utils/* Utils */.A.assert(this._type != SpeedyPipelineMessageType.Nothing, 'You must first declare what type of message this port expects');
  10162. utils/* Utils */.A.assert(this._messageConstraint === undefined);
  10163. utils/* Utils */.A.assert(typeof constraint === 'function');
  10164. this._messageConstraint = constraint;
  10165. return this;
  10166. }
  10167. /**
  10168. * Build a port
  10169. * @param {SpeedyPipelineNode} node the node to which the new port will belong
  10170. * @returns {SpeedyPipelinePort}
  10171. */
  10172. build(node) {
  10173. const spec = new SpeedyPipelinePortSpec(this._type, this._messageConstraint);
  10174. return Reflect.construct(this._class, [this._name, spec, node]);
  10175. }
  10176. }
  10177. /**
  10178. * Creates a builder for an input port
  10179. * @param {string} [portName]
  10180. * @returns {SpeedyPipelinePortBuilder}
  10181. */
  10182. function InputPort(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
  10183. return new SpeedyPipelinePortBuilder(SpeedyPipelineInputPort, portName);
  10184. }
  10185. /**
  10186. * Creates a builder for an output port
  10187. * @param {string} [portName]
  10188. * @returns {SpeedyPipelinePortBuilder}
  10189. */
  10190. function OutputPort(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
  10191. return new SpeedyPipelinePortBuilder(SpeedyPipelineOutputPort, portName);
  10192. }
  10193. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline-node.js
  10194. /*
  10195. * speedy-vision.js
  10196. * GPU-accelerated Computer Vision for JavaScript
  10197. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10198. *
  10199. * Licensed under the Apache License, Version 2.0 (the "License");
  10200. * you may not use this file except in compliance with the License.
  10201. * You may obtain a copy of the License at
  10202. *
  10203. * http://www.apache.org/licenses/LICENSE-2.0
  10204. *
  10205. * Unless required by applicable law or agreed to in writing, software
  10206. * distributed under the License is distributed on an "AS IS" BASIS,
  10207. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10208. * See the License for the specific language governing permissions and
  10209. * limitations under the License.
  10210. *
  10211. * pipeline-node.js
  10212. * Node of a pipeline
  10213. */
  10214. /** @typedef {Object<string,SpeedyPipelineInputPort>} InputPortDictionary */
  10215. /** @typedef {Object<string,SpeedyPipelineOutputPort>} OutputPortDictionary */
  10216. /** Generate a random name for a node */
  10217. const generateRandomName = () => Math.random().toString(16).substr(2);
  10218. /** Create an empty input port dictionary */
  10219. const createInputPortDictionary = () => ( /** @type {InputPortDictionary} */Object.create(null));
  10220. /** Create an empty output port dictionary */
  10221. const createOutputPortDictionary = () => ( /** @type {OutputPortDictionary} */Object.create(null));
  10222. /**
  10223. * Map an array of input ports to an InputPortDictionary whose keys are their names
  10224. * @param {SpeedyPipelineInputPort[]} ports
  10225. * @returns {InputPortDictionary}
  10226. */
  10227. function InputPortDictionary(ports) {
  10228. return ports.reduce((dict, port) => (dict[port.name] = port, dict), createInputPortDictionary());
  10229. }
  10230. /**
  10231. * Map an array of output ports to an OutputPortDictionary whose keys are their names
  10232. * @param {SpeedyPipelineOutputPort[]} ports
  10233. * @returns {OutputPortDictionary}
  10234. */
  10235. function OutputPortDictionary(ports) {
  10236. return ports.reduce((dict, port) => (dict[port.name] = port, dict), createOutputPortDictionary());
  10237. }
  10238. /** A flag used for debugging purposes */
  10239. let _texView = false;
  10240. /**
  10241. * Node of a pipeline
  10242. * @abstract
  10243. */
  10244. class SpeedyPipelineNode {
  10245. /**
  10246. * Constructor
  10247. * @param {string} [name] the name of this node
  10248. * @param {number} [texCount] number of work textures
  10249. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10250. */
  10251. constructor(name = generateRandomName(), texCount = 0, portBuilders = []) {
  10252. /** @type {string} the name of this node */
  10253. this._name = String(name);
  10254. /** @type {SpeedyDrawableTexture[]} work texture(s) */
  10255. this._tex = new Array(texCount).fill(null);
  10256. // build the ports
  10257. const ports = portBuilders.map(builder => builder.build(this));
  10258. const inputPorts = /** @type {SpeedyPipelineInputPort[]} */ports.filter(port => port.isInputPort());
  10259. const outputPorts = /** @type {SpeedyPipelineOutputPort[]} */ports.filter(port => port.isOutputPort());
  10260. /** @type {InputPortDictionary} input ports */
  10261. this._inputPorts = InputPortDictionary(inputPorts);
  10262. /** @type {OutputPortDictionary} output ports */
  10263. this._outputPorts = OutputPortDictionary(outputPorts);
  10264. // validate
  10265. if (this._name.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid name "${this._name}" for node ${this.fullName}`);else if (portBuilders.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`No ports have been found in node ${this.fullName}`);
  10266. }
  10267. /**
  10268. * The name of this node
  10269. * @returns {string}
  10270. */
  10271. get name() {
  10272. return this._name;
  10273. }
  10274. /**
  10275. * Name and type of this node
  10276. * @returns {string}
  10277. */
  10278. get fullName() {
  10279. return `${this.constructor.name}[${this.name}]`;
  10280. }
  10281. /**
  10282. * Find input port by name
  10283. * @param {string} [portName]
  10284. * @returns {SpeedyPipelineInputPort}
  10285. */
  10286. input(portName = SpeedyPipelineInputPort.DEFAULT_NAME) {
  10287. if (portName in this._inputPorts) return this._inputPorts[portName];
  10288. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find input port ${portName} in node ${this.fullName}`);
  10289. }
  10290. /**
  10291. * Find output port by name
  10292. * @param {string} [portName]
  10293. * @returns {SpeedyPipelineOutputPort}
  10294. */
  10295. output(portName = SpeedyPipelineOutputPort.DEFAULT_NAME) {
  10296. if (portName in this._outputPorts) return this._outputPorts[portName];
  10297. throw new utils_errors/* IllegalArgumentError */.qw(`Can't find output port ${portName} in node ${this.fullName}`);
  10298. }
  10299. /**
  10300. * Get data from the input ports and execute
  10301. * the task that this node is supposed to!
  10302. * @param {SpeedyGPU} gpu
  10303. * @returns {void|SpeedyPromise<void>}
  10304. */
  10305. execute(gpu) {
  10306. let portName;
  10307. // clear output ports
  10308. for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
  10309. // let the input ports receive what is due
  10310. for (portName in this._inputPorts) this._inputPorts[portName].pullMessage(this.fullName);
  10311. // run the task
  10312. const runTask = this._run(gpu);
  10313. if (typeof runTask === 'undefined') return void this._finishExecution(gpu);else return runTask.then(() => this._finishExecution(gpu));
  10314. }
  10315. /**
  10316. * Finish the execution of this node;
  10317. * to be called after execute()
  10318. * @param {SpeedyGPU} gpu
  10319. */
  10320. _finishExecution(gpu) {
  10321. // ensure that no output ports are empty
  10322. for (const portName in this._outputPorts) {
  10323. utils/* Utils */.A.assert(this._outputPorts[portName].hasMessage(), `Did you forget to write data to the output port ${portName} of ${this.fullName}?`);
  10324. }
  10325. // diagnosticize the node / pipeline
  10326. if (settings/* Settings */.w.logging === 'diagnostic') {
  10327. utils/* Utils */.A.log(`%c ${this.fullName} `, 'font-size:12pt;font-weight:bold;color:white;background:blue');
  10328. // Inspecting the data has performance implications.
  10329. // It is for diagnostic purposes only, not meant to be done in production!
  10330. for (const portName in this._inputPorts) utils/* Utils */.A.log(`%c-> ${portName}:`, 'font-size:10pt;font-weight:bold', this._inputPorts[portName].inspect(gpu));
  10331. for (const portName in this._outputPorts) utils/* Utils */.A.log(`%c<- ${portName}:`, 'font-size:10pt;font-weight:bold', this._outputPorts[portName].inspect(gpu));
  10332. }
  10333. }
  10334. /**
  10335. * Run the specific task of this node
  10336. * @abstract
  10337. * @param {SpeedyGPU} gpu
  10338. * @returns {void|SpeedyPromise<void>}
  10339. */
  10340. _run(gpu) {
  10341. throw new utils_errors/* AbstractMethodError */.aQ();
  10342. }
  10343. /**
  10344. * Initializes this node
  10345. * @param {SpeedyGPU} gpu
  10346. */
  10347. init(gpu) {
  10348. gpu.subscribe(this._allocateWorkTextures, this, gpu);
  10349. this._allocateWorkTextures(gpu);
  10350. }
  10351. /**
  10352. * Releases this node
  10353. * @param {SpeedyGPU} gpu
  10354. */
  10355. release(gpu) {
  10356. this._deallocateWorkTextures(gpu);
  10357. gpu.unsubscribe(this._allocateWorkTextures, this);
  10358. }
  10359. /**
  10360. * Clear all ports
  10361. */
  10362. clearPorts() {
  10363. let portName;
  10364. for (portName in this._inputPorts) this._inputPorts[portName].clearMessage();
  10365. for (portName in this._outputPorts) this._outputPorts[portName].clearMessage();
  10366. }
  10367. /**
  10368. * Find all nodes that feed input to this node
  10369. * @returns {SpeedyPipelineNode[]}
  10370. */
  10371. inputNodes() {
  10372. const nodes = [];
  10373. for (const portName in this._inputPorts) {
  10374. const port = this._inputPorts[portName];
  10375. if (port.incomingLink != null) nodes.push(port.incomingLink.node);
  10376. }
  10377. return nodes;
  10378. }
  10379. /**
  10380. * Is this a source of the pipeline?
  10381. * @returns {boolean}
  10382. */
  10383. isSource() {
  10384. return false;
  10385. }
  10386. /**
  10387. * Is this a sink of the pipeline?
  10388. * @returns {boolean}
  10389. */
  10390. isSink() {
  10391. return false;
  10392. // note: a portal sink has no output ports, but it isn't a sink of the pipeline!
  10393. //return Object.keys(this._outputPorts).length == 0;
  10394. }
  10395. /**
  10396. * Allocate work texture(s)
  10397. * @param {SpeedyGPU} gpu
  10398. */
  10399. _allocateWorkTextures(gpu) {
  10400. for (let j = 0; j < this._tex.length; j++) this._tex[j] = gpu.texturePool.allocate();
  10401. }
  10402. /**
  10403. * Deallocate work texture(s)
  10404. * @param {SpeedyGPU} gpu
  10405. */
  10406. _deallocateWorkTextures(gpu) {
  10407. for (let j = this._tex.length - 1; j >= 0; j--) this._tex[j] = gpu.texturePool.free(this._tex[j]);
  10408. }
  10409. /**
  10410. * Visually inspect a texture for debugging purposes
  10411. * @param {SpeedyGPU} gpu
  10412. * @param {SpeedyDrawableTexture} texture
  10413. */
  10414. _visualize(gpu, texture) {
  10415. const canvas = gpu.renderToCanvas(texture);
  10416. if (!_texView) {
  10417. document.body.appendChild(canvas);
  10418. _texView = true;
  10419. }
  10420. }
  10421. }
  10422. /**
  10423. * Source node (a node with no input ports)
  10424. * @abstract
  10425. */
  10426. class SpeedyPipelineSourceNode extends SpeedyPipelineNode {
  10427. /**
  10428. * Constructor
  10429. * @param {string} [name] the name of this node
  10430. * @param {number} [texCount] number of work textures
  10431. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10432. */
  10433. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  10434. super(name, texCount, portBuilders);
  10435. utils/* Utils */.A.assert(Object.keys(this._inputPorts).length == 0);
  10436. }
  10437. /**
  10438. * Is this a source of the pipeline?
  10439. * @returns {boolean}
  10440. */
  10441. isSource() {
  10442. return true;
  10443. }
  10444. }
  10445. /**
  10446. * Sink node (a node with no output ports)
  10447. * @abstract
  10448. */
  10449. class SpeedyPipelineSinkNode extends SpeedyPipelineNode {
  10450. /**
  10451. * Constructor
  10452. * @param {string} [name] the name of this node
  10453. * @param {number} [texCount] number of work textures
  10454. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  10455. */
  10456. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  10457. super(name, texCount, portBuilders);
  10458. utils/* Utils */.A.assert(Object.keys(this._outputPorts).length == 0);
  10459. }
  10460. /**
  10461. * Export data from this node to the user
  10462. * @abstract
  10463. * @returns {SpeedyPromise<any>}
  10464. */
  10465. export() {
  10466. throw new utils_errors/* AbstractMethodError */.aQ();
  10467. }
  10468. /**
  10469. * Is this a sink of the pipeline?
  10470. * @returns {boolean}
  10471. */
  10472. isSink() {
  10473. return true;
  10474. }
  10475. }
  10476. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-match.js
  10477. /*
  10478. * speedy-vision.js
  10479. * GPU-accelerated Computer Vision for JavaScript
  10480. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10481. *
  10482. * Licensed under the Apache License, Version 2.0 (the "License");
  10483. * you may not use this file except in compliance with the License.
  10484. * You may obtain a copy of the License at
  10485. *
  10486. * http://www.apache.org/licenses/LICENSE-2.0
  10487. *
  10488. * Unless required by applicable law or agreed to in writing, software
  10489. * distributed under the License is distributed on an "AS IS" BASIS,
  10490. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10491. * See the License for the specific language governing permissions and
  10492. * limitations under the License.
  10493. *
  10494. * speedy-match.js
  10495. * A match between two keypoint descriptors
  10496. */
  10497. // Constants
  10498. const MATCH_NOT_FOUND = -1;
  10499. /**
  10500. * A match between two keypoint descriptors
  10501. */
  10502. class SpeedyKeypointMatch {
  10503. /**
  10504. * Constructor
  10505. * @param {number} index index of the stored keypoint, a non-negative integer
  10506. * @param {number} distance a measure of the quality of the match, a non-negative number
  10507. */
  10508. constructor(index, distance) {
  10509. const isValid = distance < globals.MATCH_MAX_DISTANCE;
  10510. /** @type {number} index of the stored keypoint */
  10511. this._index = isValid ? index | 0 : MATCH_NOT_FOUND;
  10512. /** @type {number} a measure of the quality of the match */
  10513. this._distance = isValid ? +distance : Number.POSITIVE_INFINITY;
  10514. // done!
  10515. return Object.freeze(this);
  10516. }
  10517. /**
  10518. * The index of the stored keypoint
  10519. * @returns {number}
  10520. */
  10521. get index() {
  10522. return this._index;
  10523. }
  10524. /**
  10525. * A measure of the quality of the match (lower values indicate better matches)
  10526. * @returns {number}
  10527. */
  10528. get distance() {
  10529. return this._distance;
  10530. }
  10531. /**
  10532. * A string representation of the keypoint match
  10533. * @returns {string}
  10534. */
  10535. toString() {
  10536. return `SpeedyKeypointMatch(${this.index},${this.distance})`;
  10537. }
  10538. }
  10539. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint.js
  10540. /*
  10541. * speedy-vision.js
  10542. * GPU-accelerated Computer Vision for JavaScript
  10543. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10544. *
  10545. * Licensed under the Apache License, Version 2.0 (the "License");
  10546. * you may not use this file except in compliance with the License.
  10547. * You may obtain a copy of the License at
  10548. *
  10549. * http://www.apache.org/licenses/LICENSE-2.0
  10550. *
  10551. * Unless required by applicable law or agreed to in writing, software
  10552. * distributed under the License is distributed on an "AS IS" BASIS,
  10553. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10554. * See the License for the specific language governing permissions and
  10555. * limitations under the License.
  10556. *
  10557. * speedy-keypoint.js
  10558. * Keypoint class
  10559. */
  10560. /**
  10561. * Represents a keypoint
  10562. */
  10563. class SpeedyKeypoint {
  10564. /**
  10565. * Constructor
  10566. * @param {number} x X position
  10567. * @param {number} y Y position
  10568. * @param {number} [lod] Level-of-detail
  10569. * @param {number} [rotation] Rotation in radians
  10570. * @param {number} [score] Cornerness measure
  10571. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10572. */
  10573. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null) {
  10574. /** @type {SpeedyPoint2} keypoint position */
  10575. this._position = new SpeedyPoint2(+x, +y);
  10576. /** @type {number} level of detail */
  10577. this._lod = +lod;
  10578. /** @type {number} rotation in radians */
  10579. this._rotation = +rotation;
  10580. /** @type {number} a cornerness measure */
  10581. this._score = +score;
  10582. /** @type {SpeedyKeypointDescriptor|null} keypoint descriptor, if any */
  10583. this._descriptor = descriptor;
  10584. }
  10585. /**
  10586. * Converts this keypoint to a descriptive string
  10587. * @returns {string}
  10588. */
  10589. toString() {
  10590. return `SpeedyKeypoint(${this.x},${this.y})`;
  10591. }
  10592. /**
  10593. * The position of this keypoint
  10594. * @returns {SpeedyPoint2}
  10595. */
  10596. get position() {
  10597. return this._position;
  10598. }
  10599. /**
  10600. * The x-position of this keypoint
  10601. * @returns {number}
  10602. */
  10603. get x() {
  10604. return this._position.x;
  10605. }
  10606. /**
  10607. * The x-position of this keypoint
  10608. * @param {number} value
  10609. */
  10610. set x(value) {
  10611. this._position.x = +value;
  10612. }
  10613. /**
  10614. * The y-position of this keypoint
  10615. * @returns {number}
  10616. */
  10617. get y() {
  10618. return this._position.y;
  10619. }
  10620. /**
  10621. * The y-position of this keypoint
  10622. * @param {number} value
  10623. */
  10624. set y(value) {
  10625. this._position.y = +value;
  10626. }
  10627. /**
  10628. * The pyramid level-of-detail from which this keypoint was extracted
  10629. * @returns {number}
  10630. */
  10631. get lod() {
  10632. return this._lod;
  10633. }
  10634. /**
  10635. * Scale: 2^lod
  10636. * @returns {number}
  10637. */
  10638. get scale() {
  10639. return Math.pow(2, this._lod);
  10640. }
  10641. /**
  10642. * The orientation of the keypoint, in radians
  10643. * @returns {number} Angle in radians
  10644. */
  10645. get rotation() {
  10646. return this._rotation;
  10647. }
  10648. /**
  10649. * Score: a cornerness measure
  10650. * @returns {number} Score
  10651. */
  10652. get score() {
  10653. return this._score;
  10654. }
  10655. /**
  10656. * Keypoint descriptor
  10657. * @return {SpeedyKeypointDescriptor|null}
  10658. */
  10659. get descriptor() {
  10660. return this._descriptor;
  10661. }
  10662. }
  10663. /**
  10664. * Represents a tracked keypoint
  10665. */
  10666. class SpeedyTrackedKeypoint extends SpeedyKeypoint {
  10667. /**
  10668. * Constructor
  10669. * @param {number} x X position
  10670. * @param {number} y Y position
  10671. * @param {number} [lod] Level-of-detail
  10672. * @param {number} [rotation] Rotation in radians
  10673. * @param {number} [score] Cornerness measure
  10674. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10675. * @param {SpeedyVector2} [flow] flow vector
  10676. */
  10677. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, flow = new SpeedyVector2(0, 0)) {
  10678. super(x, y, lod, rotation, score, descriptor);
  10679. /** @type {SpeedyVector2} flow vector */
  10680. this._flow = flow;
  10681. }
  10682. /**
  10683. * Flow vector
  10684. * @returns {SpeedyVector2}
  10685. */
  10686. get flow() {
  10687. return this._flow;
  10688. }
  10689. }
  10690. /**
  10691. * Represents a matched keypoint
  10692. */
  10693. class SpeedyMatchedKeypoint extends SpeedyKeypoint {
  10694. /**
  10695. * Constructor
  10696. * @param {number} x X position
  10697. * @param {number} y Y position
  10698. * @param {number} [lod] Level-of-detail
  10699. * @param {number} [rotation] Rotation in radians
  10700. * @param {number} [score] Cornerness measure
  10701. * @param {SpeedyKeypointDescriptor|null} [descriptor] Keypoint descriptor, if any
  10702. * @param {SpeedyKeypointMatch[]} [matches] Keypoint matches, if any
  10703. */
  10704. constructor(x, y, lod = 0.0, rotation = 0.0, score = 0.0, descriptor = null, matches = []) {
  10705. super(x, y, lod, rotation, score, descriptor);
  10706. /** @type {SpeedyKeypointMatch[]} keypoint matches */
  10707. this._matches = matches;
  10708. }
  10709. /**
  10710. * Keypoint matches
  10711. * @returns {SpeedyKeypointMatch[]}
  10712. */
  10713. get matches() {
  10714. return this._matches;
  10715. }
  10716. }
  10717. ;// CONCATENATED MODULE: ./src/core/pipeline/pipeline.js
  10718. /*
  10719. * speedy-vision.js
  10720. * GPU-accelerated Computer Vision for JavaScript
  10721. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10722. *
  10723. * Licensed under the Apache License, Version 2.0 (the "License");
  10724. * you may not use this file except in compliance with the License.
  10725. * You may obtain a copy of the License at
  10726. *
  10727. * http://www.apache.org/licenses/LICENSE-2.0
  10728. *
  10729. * Unless required by applicable law or agreed to in writing, software
  10730. * distributed under the License is distributed on an "AS IS" BASIS,
  10731. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10732. * See the License for the specific language governing permissions and
  10733. * limitations under the License.
  10734. *
  10735. * pipeline.js
  10736. * A pipeline is a network of nodes in which data flows to a sink
  10737. */
  10738. /**
  10739. * A dictionary indexed by the names of the sink nodes
  10740. * @typedef {Object<string,any>} SpeedyPipelineOutput
  10741. */
  10742. /** @type {SpeedyGPU} shared GPU programs & textures */
  10743. let gpu = null;
  10744. /** @type {number} gpu reference count */
  10745. let referenceCount = 0;
  10746. /**
  10747. * A pipeline is a network of nodes in which data flows to a sink
  10748. */
  10749. class SpeedyPipeline {
  10750. /**
  10751. * Constructor
  10752. */
  10753. constructor() {
  10754. /** @type {SpeedyPipelineNode[]} the collection of all nodes that belong to this pipeline */
  10755. this._nodes = [];
  10756. /** @type {SpeedyPipelineNode[]} a sequence of nodes: from the source(s) to the sink */
  10757. this._sequence = [];
  10758. /** @type {boolean} are we running the pipeline at this moment? */
  10759. this._busy = false;
  10760. }
  10761. /**
  10762. * Find a node by its name
  10763. * @template T extends SpeedyPipelineNode
  10764. * @param {string} name
  10765. * @returns {T|null}
  10766. */
  10767. node(name) {
  10768. for (let i = 0, n = this._nodes.length; i < n; i++) {
  10769. if (this._nodes[i].name === name) return this._nodes[i];
  10770. }
  10771. return null;
  10772. }
  10773. /**
  10774. * Initialize the pipeline
  10775. * @param {...SpeedyPipelineNode} nodes
  10776. * @returns {SpeedyPipeline} this pipeline
  10777. */
  10778. init(...nodes) {
  10779. // validate
  10780. if (this._nodes.length > 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been initialized`);else if (nodes.length == 0) throw new utils_errors/* IllegalArgumentError */.qw(`Can't initialize the pipeline. Please specify its nodes`);
  10781. // create a GPU instance and increase the reference count
  10782. if (0 == referenceCount++) {
  10783. utils/* Utils */.A.assert(!gpu, 'Duplicate SpeedyGPU instance');
  10784. gpu = new SpeedyGPU();
  10785. }
  10786. // add nodes to the network
  10787. for (let i = 0; i < nodes.length; i++) {
  10788. const node = nodes[i];
  10789. if (!this._nodes.includes(node)) this._nodes.push(node);
  10790. }
  10791. // generate the sequence of nodes
  10792. this._sequence = SpeedyPipeline._tsort(this._nodes);
  10793. SpeedyPipeline._validateSequence(this._sequence);
  10794. // initialize nodes
  10795. for (let i = 0; i < this._sequence.length; i++) this._sequence[i].init(gpu);
  10796. // done!
  10797. return this;
  10798. }
  10799. /**
  10800. * Release the resources associated with this pipeline
  10801. * @returns {null}
  10802. */
  10803. release() {
  10804. if (this._nodes.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`The pipeline has already been released or has never been initialized`);
  10805. // release nodes
  10806. for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].release(gpu);
  10807. this._sequence.length = 0;
  10808. this._nodes.length = 0;
  10809. // decrease reference count and release GPU if necessary
  10810. if (0 == --referenceCount) gpu = gpu.release();
  10811. // done!
  10812. return null;
  10813. }
  10814. /**
  10815. * Run the pipeline
  10816. * @returns {SpeedyPromise<SpeedyPipelineOutput>} results are indexed by the names of the sink nodes
  10817. */
  10818. run() {
  10819. utils/* Utils */.A.assert(this._sequence.length > 0, `The pipeline has not been initialized or has been released`);
  10820. // is the pipeline busy?
  10821. if (this._busy) {
  10822. // if so, we need to wait 'til it finishes
  10823. return new speedy_promise/* SpeedyPromise */.i((resolve, reject) => {
  10824. setTimeout(() => this.run().then(resolve, reject), 0);
  10825. });
  10826. } else {
  10827. // the pipeline is now busy and won't accept concurrent tasks
  10828. // (we allocate textures using a single pool)
  10829. this._busy = true;
  10830. }
  10831. // find the sinks
  10832. const sinks = /** @type {SpeedyPipelineSinkNode[]} */this._sequence.filter(node => node.isSink());
  10833. // create output template
  10834. const template = SpeedyPipeline._createOutputTemplate(sinks);
  10835. // diagnostic log
  10836. if (settings/* Settings */.w.logging === 'diagnostic') utils/* Utils */.A.log('%c RUNNING PIPELINE ', 'background:red;color:white;font-size:28pt;font-weight:bold');
  10837. // run the pipeline
  10838. return SpeedyPipeline._runSequence(this._sequence).then(() =>
  10839. // export results
  10840. speedy_promise/* SpeedyPromise */.i.all(sinks.map(sink => sink.export().turbocharge())).then(results =>
  10841. // aggregate results by the names of the sinks
  10842. results.reduce((obj, val, idx) => (obj[sinks[idx].name] = val, obj), template))).finally(() => {
  10843. // clear all ports
  10844. for (let i = this._sequence.length - 1; i >= 0; i--) this._sequence[i].clearPorts();
  10845. // the pipeline is no longer busy
  10846. this._busy = false;
  10847. // diagnostic log
  10848. if (settings/* Settings */.w.logging === 'diagnostic') {
  10849. utils/* Utils */.A.log('%c PIPELINE OUTPUT \n', 'background:green;color:white;font-size:16pt;font-weight:bold');
  10850. Object.keys(template).forEach(entry => {
  10851. utils/* Utils */.A.log('%c' + entry + ':', 'font-size:10pt;font-weight:bold', template[entry]);
  10852. });
  10853. }
  10854. }).turbocharge();
  10855. }
  10856. /**
  10857. * @internal
  10858. *
  10859. * GPU instance
  10860. * @returns {SpeedyGPU}
  10861. */
  10862. get _gpu() {
  10863. return gpu;
  10864. }
  10865. /**
  10866. * Execute the tasks of a sequence of nodes
  10867. * @param {SpeedyPipelineNode[]} sequence sequence of nodes
  10868. * @param {number} [i] in [0,n)
  10869. * @param {number} [n] number of nodes
  10870. * @returns {SpeedyPromise<void>}
  10871. */
  10872. static _runSequence(sequence, i = 0, n = sequence.length) {
  10873. for (; i < n; i++) {
  10874. const runTask = sequence[i].execute(gpu);
  10875. // this call greatly improves performance when downloading pixel data using PBOs
  10876. gpu.gl.flush();
  10877. if (typeof runTask !== 'undefined') return runTask.then(() => SpeedyPipeline._runSequence(sequence, i + 1, n));
  10878. }
  10879. return speedy_promise/* SpeedyPromise */.i.resolve();
  10880. }
  10881. /**
  10882. * Topological sorting
  10883. * @param {SpeedyPipelineNode[]} nodes
  10884. * @returns {SpeedyPipelineNode[]}
  10885. */
  10886. static _tsort(nodes) {
  10887. /** @typedef {[SpeedyPipelineNode, boolean]} StackNode */
  10888. const outlinks = SpeedyPipeline._outlinks(nodes);
  10889. const stack = nodes.map(node => ( /** @type {StackNode} */[node, false]));
  10890. const trash = new Set();
  10891. const sorted = new Array(nodes.length);
  10892. let j = sorted.length;
  10893. while (stack.length > 0) {
  10894. const [node, done] = stack.pop();
  10895. if (!done) {
  10896. if (!trash.has(node)) {
  10897. const outnodes = outlinks.get(node);
  10898. trash.add(node);
  10899. stack.push([node, true]);
  10900. stack.push(...outnodes.map(node => ( /** @type {StackNode} */[node, false])));
  10901. if (outnodes.some(node => trash.has(node) && !sorted.includes(node))) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline networks cannot have cycles!`);
  10902. }
  10903. } else sorted[--j] = node;
  10904. }
  10905. return sorted;
  10906. }
  10907. /**
  10908. * Figure out the outgoing links of all nodes
  10909. * @param {SpeedyPipelineNode[]} nodes
  10910. * @returns {Map<SpeedyPipelineNode,SpeedyPipelineNode[]>}
  10911. */
  10912. static _outlinks(nodes) {
  10913. const outlinks = new Map();
  10914. for (let k = 0; k < nodes.length; k++) outlinks.set(nodes[k], []);
  10915. for (let i = 0; i < nodes.length; i++) {
  10916. const to = nodes[i];
  10917. const inputs = to.inputNodes();
  10918. for (let j = 0; j < inputs.length; j++) {
  10919. const from = inputs[j];
  10920. const links = outlinks.get(from);
  10921. if (!links) throw new utils_errors/* IllegalOperationError */.Er(`Can't initialize the pipeline. Missing node: ${from.fullName}. Did you forget to add it to the initialization list?`);
  10922. if (!links.includes(to)) links.push(to);
  10923. }
  10924. }
  10925. return outlinks;
  10926. }
  10927. /**
  10928. * Generate the output template by aggregating the names of the sinks
  10929. * @param {SpeedyPipelineNode[]} [sinks]
  10930. * @returns {SpeedyPipelineOutput}
  10931. */
  10932. static _createOutputTemplate(sinks = []) {
  10933. const template = Object.create(null);
  10934. for (let i = sinks.length - 1; i >= 0; i--) template[sinks[i].name] = null;
  10935. return template;
  10936. }
  10937. /**
  10938. * Validate a sequence of nodes
  10939. * @param {SpeedyPipelineNode[]} sequence
  10940. */
  10941. static _validateSequence(sequence) {
  10942. if (sequence.length == 0) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have nodes`);else if (!sequence[0].isSource()) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a source`);else if (!sequence.find(node => node.isSink())) throw new utils_errors/* IllegalOperationError */.Er(`Pipeline doesn't have a sink`);
  10943. }
  10944. }
  10945. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/source.js
  10946. /*
  10947. * speedy-vision.js
  10948. * GPU-accelerated Computer Vision for JavaScript
  10949. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  10950. *
  10951. * Licensed under the Apache License, Version 2.0 (the "License");
  10952. * you may not use this file except in compliance with the License.
  10953. * You may obtain a copy of the License at
  10954. *
  10955. * http://www.apache.org/licenses/LICENSE-2.0
  10956. *
  10957. * Unless required by applicable law or agreed to in writing, software
  10958. * distributed under the License is distributed on an "AS IS" BASIS,
  10959. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10960. * See the License for the specific language governing permissions and
  10961. * limitations under the License.
  10962. *
  10963. * image-input.js
  10964. * Gets an image into a pipeline
  10965. */
  10966. // Constants
  10967. const UPLOAD_BUFFER_SIZE = 2; // how many textures we allocate for uploading data
  10968. /**
  10969. * Gets an image into a pipeline
  10970. */
  10971. class SpeedyPipelineNodeImageSource extends SpeedyPipelineSourceNode {
  10972. /**
  10973. * Constructor
  10974. * @param {string} [name] name of the node
  10975. */
  10976. constructor(name = undefined) {
  10977. super(name, UPLOAD_BUFFER_SIZE, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  10978. /** @type {SpeedyMedia|null} source media */
  10979. this._media = null;
  10980. /** @type {number} texture index */
  10981. this._textureIndex = 0;
  10982. }
  10983. /**
  10984. * Source media
  10985. * @returns {SpeedyMedia|null}
  10986. */
  10987. get media() {
  10988. return this._media;
  10989. }
  10990. /**
  10991. * Source media
  10992. * @param {SpeedyMedia|null} media
  10993. */
  10994. set media(media) {
  10995. if (media !== null && !(media instanceof SpeedyMedia)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a SpeedyMedia: ${media}`);
  10996. this._media = media;
  10997. }
  10998. /**
  10999. * Run the specific task of this node
  11000. * @param {SpeedyGPU} gpu
  11001. * @returns {void|SpeedyPromise<void>}
  11002. */
  11003. _run(gpu) {
  11004. if (this._media == null) throw new utils_errors/* IllegalOperationError */.Er(`Did you forget to set the media of ${this.fullName}?`);
  11005. // use round-robin to mitigate WebGL's implicit synchronization
  11006. // and maybe minimize texture upload times
  11007. this._textureIndex = (this._textureIndex + 1) % this._tex.length;
  11008. // upload texture
  11009. const outputTexture = this._tex[this._textureIndex];
  11010. gpu.upload(this._media._source, outputTexture);
  11011. this.output().swrite(outputTexture, this._media._format);
  11012. }
  11013. }
  11014. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/sink.js
  11015. /*
  11016. * speedy-vision.js
  11017. * GPU-accelerated Computer Vision for JavaScript
  11018. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11019. *
  11020. * Licensed under the Apache License, Version 2.0 (the "License");
  11021. * you may not use this file except in compliance with the License.
  11022. * You may obtain a copy of the License at
  11023. *
  11024. * http://www.apache.org/licenses/LICENSE-2.0
  11025. *
  11026. * Unless required by applicable law or agreed to in writing, software
  11027. * distributed under the License is distributed on an "AS IS" BASIS,
  11028. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11029. * See the License for the specific language governing permissions and
  11030. * limitations under the License.
  11031. *
  11032. * image-output.js
  11033. * Gets an image out of a pipeline
  11034. */
  11035. /** @typedef {"bitmap" | "data"} SpeedyPipelineNodeImageSinkExportedMediaType exported media type */
  11036. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} default exported media type */
  11037. const DEFAULT_MEDIA_TYPE = "bitmap";
  11038. /**
  11039. * Gets an image out of a pipeline
  11040. */
  11041. class SpeedyPipelineNodeImageSink extends SpeedyPipelineSinkNode {
  11042. /**
  11043. * Constructor
  11044. * @param {string} [name] name of the node
  11045. */
  11046. constructor(name = 'image') {
  11047. super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
  11048. /** @type {SpeedyPipelineNodeImageSinkExportedMediaType} the media type that is exported from this node */
  11049. this._mediaType = DEFAULT_MEDIA_TYPE;
  11050. /** @type {ImageBitmap} output bitmap */
  11051. this._bitmap = null;
  11052. /** @type {ImageData} output pixel data */
  11053. this._data = null;
  11054. /** @type {ImageFormat} output format */
  11055. this._format = types/* ImageFormat */.f5.RGBA;
  11056. /** @type {SpeedyTextureReader} texture reader */
  11057. this._textureReader = new SpeedyTextureReader(1);
  11058. }
  11059. /**
  11060. * The media type that is exported from this node
  11061. * @returns {SpeedyPipelineNodeImageSinkExportedMediaType}
  11062. */
  11063. get mediaType() {
  11064. return this._mediaType;
  11065. }
  11066. /**
  11067. * The media type that is exported from this node
  11068. * @param {SpeedyPipelineNodeImageSinkExportedMediaType} value
  11069. */
  11070. set mediaType(value) {
  11071. if (value != 'bitmap' && value != 'data') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid mediaType for ${this.fullName}: "${value}"`);
  11072. this._mediaType = value;
  11073. }
  11074. /**
  11075. * Initializes this node
  11076. * @param {SpeedyGPU} gpu
  11077. */
  11078. init(gpu) {
  11079. super.init(gpu);
  11080. this._textureReader.init(gpu);
  11081. }
  11082. /**
  11083. * Releases this node
  11084. * @param {SpeedyGPU} gpu
  11085. */
  11086. release(gpu) {
  11087. this._textureReader.release(gpu);
  11088. super.release(gpu);
  11089. }
  11090. /**
  11091. * Export data from this node to the user
  11092. * @returns {SpeedyPromise<SpeedyMedia>}
  11093. */
  11094. export() {
  11095. const bitmapOrData = this._mediaType != 'data' ? this._bitmap : this._data;
  11096. utils/* Utils */.A.assert(bitmapOrData != null);
  11097. return SpeedyMedia.load(bitmapOrData, {
  11098. format: this._format
  11099. }, false);
  11100. }
  11101. /**
  11102. * Run the specific task of this node
  11103. * @param {SpeedyGPU} gpu
  11104. * @returns {void|SpeedyPromise<void>}
  11105. */
  11106. _run(gpu) {
  11107. const {
  11108. image,
  11109. format
  11110. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11111. if (this._mediaType != 'data') {
  11112. /* Create an ImageBitmap (default) */
  11113. return new speedy_promise/* SpeedyPromise */.i(resolve => {
  11114. const canvas = gpu.renderToCanvas(image);
  11115. createImageBitmap(canvas, 0, canvas.height - image.height, image.width, image.height).then(bitmap => {
  11116. this._bitmap = bitmap;
  11117. this._format = format;
  11118. this._data = null;
  11119. resolve();
  11120. });
  11121. });
  11122. } else {
  11123. /* Create an ImageData */
  11124. return this._textureReader.readPixelsAsync(image, 0, 0, image.width, image.height, false).then(pixels => {
  11125. const dataArray = new Uint8ClampedArray(pixels.buffer);
  11126. this._data = new ImageData(dataArray, image.width, image.height);
  11127. this._format = format;
  11128. this._bitmap = null;
  11129. });
  11130. }
  11131. }
  11132. }
  11133. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/multiplexer.js
  11134. /*
  11135. * speedy-vision.js
  11136. * GPU-accelerated Computer Vision for JavaScript
  11137. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11138. *
  11139. * Licensed under the Apache License, Version 2.0 (the "License");
  11140. * you may not use this file except in compliance with the License.
  11141. * You may obtain a copy of the License at
  11142. *
  11143. * http://www.apache.org/licenses/LICENSE-2.0
  11144. *
  11145. * Unless required by applicable law or agreed to in writing, software
  11146. * distributed under the License is distributed on an "AS IS" BASIS,
  11147. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11148. * See the License for the specific language governing permissions and
  11149. * limitations under the License.
  11150. *
  11151. * multiplexer.js
  11152. * Image multiplexer
  11153. */
  11154. /** @type {string[]} the names of the input ports indexed by their number */
  11155. const INPUT_PORT = ['in0', 'in1'];
  11156. /**
  11157. * Image multiplexer
  11158. */
  11159. class SpeedyPipelineNodeImageMultiplexer extends SpeedyPipelineNode {
  11160. /**
  11161. * Constructor
  11162. * @param {string} [name] name of the node
  11163. */
  11164. constructor(name = undefined) {
  11165. super(name, 0, [...INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Image)), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11166. /** @type {number} which port should be linked to the output? */
  11167. this._port = 0;
  11168. }
  11169. /**
  11170. * The number of the port that should be linked to the output
  11171. * @returns {number}
  11172. */
  11173. get port() {
  11174. return this._port;
  11175. }
  11176. /**
  11177. * The number of the port that should be linked to the output
  11178. * @param {number} port
  11179. */
  11180. set port(port) {
  11181. if (port < 0 || port >= INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  11182. this._port = port | 0;
  11183. }
  11184. /**
  11185. * Run the specific task of this node
  11186. * @param {SpeedyGPU} gpu
  11187. * @returns {void|SpeedyPromise<void>}
  11188. */
  11189. _run(gpu) {
  11190. const message = this.input(INPUT_PORT[this._port]).read();
  11191. this.output().write(message);
  11192. }
  11193. }
  11194. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/buffer.js
  11195. /*
  11196. * speedy-vision.js
  11197. * GPU-accelerated Computer Vision for JavaScript
  11198. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11199. *
  11200. * Licensed under the Apache License, Version 2.0 (the "License");
  11201. * you may not use this file except in compliance with the License.
  11202. * You may obtain a copy of the License at
  11203. *
  11204. * http://www.apache.org/licenses/LICENSE-2.0
  11205. *
  11206. * Unless required by applicable law or agreed to in writing, software
  11207. * distributed under the License is distributed on an "AS IS" BASIS,
  11208. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11209. * See the License for the specific language governing permissions and
  11210. * limitations under the License.
  11211. *
  11212. * buffer.js
  11213. * Image Buffer
  11214. */
  11215. /**
  11216. * Image Buffer: a node with memory.
  11217. * At time t, it outputs the image received at time t-1
  11218. */
  11219. class SpeedyPipelineNodeImageBuffer extends SpeedyPipelineNode {
  11220. /**
  11221. * Constructor
  11222. * @param {string} [name] name of the node
  11223. */
  11224. constructor(name = undefined) {
  11225. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11226. /** @type {number} current page: 0 or 1 */
  11227. this._pageIndex = 0;
  11228. /** @type {boolean} first run? */
  11229. this._initialized = false;
  11230. /** @type {ImageFormat} previous image format */
  11231. this._previousFormat = types/* ImageFormat */.f5.RGBA;
  11232. /** @type {boolean} frozen buffer? */
  11233. this._frozen = false;
  11234. }
  11235. /**
  11236. * A frozen buffer discards the input, effectively increasing the buffering time
  11237. * @returns {boolean}
  11238. */
  11239. get frozen() {
  11240. return this._frozen;
  11241. }
  11242. /**
  11243. * A frozen buffer discards the input, effectively increasing the buffering time
  11244. * @param {boolean} value
  11245. */
  11246. set frozen(value) {
  11247. this._frozen = Boolean(value);
  11248. }
  11249. /**
  11250. * Releases this node
  11251. * @param {SpeedyGPU} gpu
  11252. */
  11253. release(gpu) {
  11254. this._initialized = false;
  11255. super.release(gpu);
  11256. }
  11257. /**
  11258. * Run the specific task of this node
  11259. * @param {SpeedyGPU} gpu
  11260. * @returns {void|SpeedyPromise<void>}
  11261. */
  11262. _run(gpu) {
  11263. const {
  11264. image,
  11265. format
  11266. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11267. const previousFormat = this._previousFormat;
  11268. const page = this._tex;
  11269. const previousInputTexture = page[1 - this._pageIndex];
  11270. const outputTexture = page[this._pageIndex];
  11271. // can't store pyramids
  11272. if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't bufferize a pyramid`);
  11273. // bufferize
  11274. if (!this._frozen || !this._initialized) {
  11275. // store input
  11276. this._previousFormat = format;
  11277. previousInputTexture.resize(image.width, image.height);
  11278. image.copyTo(previousInputTexture);
  11279. // page flipping
  11280. this._pageIndex = 1 - this._pageIndex;
  11281. }
  11282. // first run?
  11283. if (!this._initialized) {
  11284. this._initialized = true;
  11285. this.output().swrite(previousInputTexture, format);
  11286. return;
  11287. }
  11288. // done!
  11289. this.output().swrite(outputTexture, previousFormat);
  11290. }
  11291. }
  11292. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/pyramid.js
  11293. /*
  11294. * speedy-vision.js
  11295. * GPU-accelerated Computer Vision for JavaScript
  11296. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11297. *
  11298. * Licensed under the Apache License, Version 2.0 (the "License");
  11299. * you may not use this file except in compliance with the License.
  11300. * You may obtain a copy of the License at
  11301. *
  11302. * http://www.apache.org/licenses/LICENSE-2.0
  11303. *
  11304. * Unless required by applicable law or agreed to in writing, software
  11305. * distributed under the License is distributed on an "AS IS" BASIS,
  11306. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11307. * See the License for the specific language governing permissions and
  11308. * limitations under the License.
  11309. *
  11310. * pyramid.js
  11311. * Generate pyramid
  11312. */
  11313. // Constants
  11314. const MAX_LEVELS = globals.PYRAMID_MAX_LEVELS; //14; // supposing image size <= 8K = 2^13 (downto 1)
  11315. const MAX_TEXTURES = 2 * MAX_LEVELS; //MAX_LEVELS;
  11316. /**
  11317. * Generate pyramid
  11318. */
  11319. class SpeedyPipelineNodeImagePyramid extends SpeedyPipelineNode {
  11320. /**
  11321. * Constructor
  11322. * @param {string} [name] name of the node
  11323. */
  11324. constructor(name = undefined) {
  11325. super(name, MAX_TEXTURES + 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11326. }
  11327. /**
  11328. * Run the specific task of this node
  11329. * @param {SpeedyGPU} gpu
  11330. * @returns {void|SpeedyPromise<void>}
  11331. */
  11332. _run(gpu) {
  11333. const {
  11334. image,
  11335. format
  11336. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11337. const outputTexture = this._tex[0];
  11338. const pyramids = gpu.programs.pyramids;
  11339. let width = image.width,
  11340. height = image.height;
  11341. // number of mipmap levels according to the OpenGL ES 3.0 spec (sec 3.8.10.4)
  11342. const mipLevels = 1 + Math.floor(Math.log2(Math.max(width, height)));
  11343. // get work textures
  11344. const mip = new Array(MAX_TEXTURES + 1);
  11345. for (let i = MAX_TEXTURES; i >= 1; i--) mip[i - 1] = this._tex[i];
  11346. // get a copy of the input image
  11347. mip[0].resize(width, height);
  11348. image.copyTo(mip[0]);
  11349. // generate gaussian pyramid
  11350. const numLevels = Math.min(mipLevels, MAX_LEVELS);
  11351. for (let level = 1; level < numLevels; level++) {
  11352. // use max(1, floor(size / 2^lod)), in accordance to
  11353. // the OpenGL ES 3.0 spec sec 3.8.10.4 (Mipmapping)
  11354. const halfWidth = Math.max(1, width >>> 1);
  11355. const halfHeight = Math.max(1, height >>> 1);
  11356. // reduce operation
  11357. const tmp = level - 1 + MAX_LEVELS;
  11358. pyramids.smoothX.outputs(width, height, mip[tmp])(mip[level - 1]);
  11359. pyramids.smoothY.outputs(width, height, mip[level - 1])(mip[tmp]);
  11360. pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level])(mip[level - 1]);
  11361. /*
  11362. (pyramids.reduce.outputs(width, height, mip[tmp]))(mip[level-1]);
  11363. (pyramids.downsample2.outputs(halfWidth, halfHeight, mip[level]))(mip[tmp]);
  11364. */
  11365. // flush
  11366. gpu.gl.flush();
  11367. // next level
  11368. width = halfWidth;
  11369. height = halfHeight;
  11370. /*
  11371. // debug: view pyramid
  11372. const view = mip[level-1];
  11373. const canvas = gpu.renderToCanvas(view);
  11374. if(!window._ww) document.body.appendChild(canvas);
  11375. window._ww = 1;
  11376. */
  11377. }
  11378. // copy to output & set mipmap
  11379. outputTexture.resize(image.width, image.height);
  11380. outputTexture.clear();
  11381. image.copyTo(outputTexture);
  11382. outputTexture.generateMipmaps(mip.slice(0, numLevels));
  11383. // done!
  11384. this.output().swrite(outputTexture, format);
  11385. }
  11386. }
  11387. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/mixer.js
  11388. /*
  11389. * speedy-vision.js
  11390. * GPU-accelerated Computer Vision for JavaScript
  11391. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11392. *
  11393. * Licensed under the Apache License, Version 2.0 (the "License");
  11394. * you may not use this file except in compliance with the License.
  11395. * You may obtain a copy of the License at
  11396. *
  11397. * http://www.apache.org/licenses/LICENSE-2.0
  11398. *
  11399. * Unless required by applicable law or agreed to in writing, software
  11400. * distributed under the License is distributed on an "AS IS" BASIS,
  11401. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11402. * See the License for the specific language governing permissions and
  11403. * limitations under the License.
  11404. *
  11405. * mixer.js
  11406. * Image Mixer
  11407. */
  11408. /**
  11409. * Image Mixer
  11410. */
  11411. class SpeedyPipelineNodeImageMixer extends SpeedyPipelineNode {
  11412. /**
  11413. * Constructor
  11414. * @param {string} [name] name of the node
  11415. */
  11416. constructor(name = undefined) {
  11417. super(name, 1, [InputPort('in0').expects(SpeedyPipelineMessageType.Image), InputPort('in1').expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11418. /** @type {number} alpha coefficient (applied to image0) */
  11419. this._alpha = 0.5;
  11420. /** @type {number} beta coefficient (applied to image1) */
  11421. this._beta = 0.5;
  11422. /** @type {number} gamma coefficient (brightness control) */
  11423. this._gamma = 0.0;
  11424. }
  11425. /**
  11426. * Alpha coefficient (applied to image0)
  11427. * @returns {number}
  11428. */
  11429. get alpha() {
  11430. return this._alpha;
  11431. }
  11432. /**
  11433. * Alpha coefficient (applied to image0)
  11434. * @param {number} value
  11435. */
  11436. set alpha(value) {
  11437. this._alpha = +value;
  11438. }
  11439. /**
  11440. * Beta coefficient (applied to image1)
  11441. * @returns {number}
  11442. */
  11443. get beta() {
  11444. return this._beta;
  11445. }
  11446. /**
  11447. * Beta coefficient (applied to image1)
  11448. * @param {number} value
  11449. */
  11450. set beta(value) {
  11451. this._beta = +value;
  11452. }
  11453. /**
  11454. * Gamma coefficient (brightness control)
  11455. * @returns {number}
  11456. */
  11457. get gamma() {
  11458. return this._gamma;
  11459. }
  11460. /**
  11461. * Gamma coefficient (brightness control)
  11462. * @param {number} value
  11463. */
  11464. set gamma(value) {
  11465. this._gamma = +value;
  11466. }
  11467. /**
  11468. * Run the specific task of this node
  11469. * @param {SpeedyGPU} gpu
  11470. * @returns {void|SpeedyPromise<void>}
  11471. */
  11472. _run(gpu) {
  11473. const in0 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in0').read();
  11474. const in1 = /** @type {SpeedyPipelineMessageWithImage} */this.input('in1').read();
  11475. const image0 = in0.image,
  11476. image1 = in1.image;
  11477. const format0 = in0.format,
  11478. format1 = in1.format;
  11479. const width = Math.max(image0.width, image1.width);
  11480. const height = Math.max(image0.height, image1.height);
  11481. const alpha = this._alpha,
  11482. beta = this._beta,
  11483. gamma = this._gamma;
  11484. const outputTexture = this._tex[0];
  11485. if (format0 != format1) throw new utils_errors/* NotSupportedError */.EM(`Can't mix images of different formats`);
  11486. gpu.programs.transforms.additiveMix.outputs(width, height, outputTexture);
  11487. gpu.programs.transforms.additiveMix(image0, image1, alpha, beta, gamma);
  11488. this.output().swrite(outputTexture, format0);
  11489. }
  11490. }
  11491. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/images/portal.js
  11492. /*
  11493. * speedy-vision.js
  11494. * GPU-accelerated Computer Vision for JavaScript
  11495. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11496. *
  11497. * Licensed under the Apache License, Version 2.0 (the "License");
  11498. * you may not use this file except in compliance with the License.
  11499. * You may obtain a copy of the License at
  11500. *
  11501. * http://www.apache.org/licenses/LICENSE-2.0
  11502. *
  11503. * Unless required by applicable law or agreed to in writing, software
  11504. * distributed under the License is distributed on an "AS IS" BASIS,
  11505. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11506. * See the License for the specific language governing permissions and
  11507. * limitations under the License.
  11508. *
  11509. * portal.js
  11510. * Image Portals
  11511. */
  11512. /**
  11513. * A sink of an Image Portal
  11514. * This is not a pipeline sink - it doesn't export any data!
  11515. */
  11516. class SpeedyPipelineNodeImagePortalSink extends SpeedyPipelineNode {
  11517. /**
  11518. * Constructor
  11519. * @param {string} [name] name of the node
  11520. */
  11521. constructor(name = undefined) {
  11522. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image)]);
  11523. /** @type {ImageFormat} stored image format */
  11524. this._format = types/* ImageFormat */.f5.RGBA;
  11525. /** @type {boolean} is this node initialized? */
  11526. this._initialized = false;
  11527. }
  11528. /**
  11529. * Stored image
  11530. * @returns {SpeedyTexture}
  11531. */
  11532. get image() {
  11533. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  11534. return this._tex[0];
  11535. }
  11536. /**
  11537. * Stored image format
  11538. * @returns {ImageFormat}
  11539. */
  11540. get format() {
  11541. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  11542. return this._format;
  11543. }
  11544. /**
  11545. * Initializes this node
  11546. * @param {SpeedyGPU} gpu
  11547. */
  11548. init(gpu) {
  11549. super.init(gpu);
  11550. this._tex[0].resize(1, 1).clear(); // initial texture
  11551. this._format = types/* ImageFormat */.f5.RGBA;
  11552. this._initialized = true;
  11553. }
  11554. /**
  11555. * Releases this node
  11556. * @param {SpeedyGPU} gpu
  11557. */
  11558. release(gpu) {
  11559. this._initialized = false;
  11560. super.release(gpu);
  11561. }
  11562. /**
  11563. * Run the specific task of this node
  11564. * @param {SpeedyGPU} gpu
  11565. * @returns {void|SpeedyPromise<void>}
  11566. */
  11567. _run(gpu) {
  11568. const {
  11569. image,
  11570. format
  11571. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11572. const tex = this._tex[0];
  11573. // can't store pyramids
  11574. if (image.hasMipmaps()) throw new utils_errors/* NotSupportedError */.EM(`${this.fullName} can't store a pyramid`);
  11575. // copy input
  11576. this._format = format;
  11577. tex.resize(image.width, image.height);
  11578. image.copyTo(tex);
  11579. }
  11580. }
  11581. /**
  11582. * A source of an Image Portal
  11583. */
  11584. class SpeedyPipelineNodeImagePortalSource extends SpeedyPipelineSourceNode {
  11585. /**
  11586. * Constructor
  11587. * @param {string} [name] name of the node
  11588. */
  11589. constructor(name = undefined) {
  11590. super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11591. /** @type {SpeedyPipelineNodeImagePortalSink|null} portal sink */
  11592. this._source = null;
  11593. }
  11594. /**
  11595. * Data source
  11596. * @returns {SpeedyPipelineNodeImagePortalSink|null}
  11597. */
  11598. get source() {
  11599. return this._source;
  11600. }
  11601. /**
  11602. * Data source
  11603. * @param {SpeedyPipelineNodeImagePortalSink|null} node
  11604. */
  11605. set source(node) {
  11606. if (node !== null && !(node instanceof SpeedyPipelineNodeImagePortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  11607. this._source = node;
  11608. }
  11609. /**
  11610. * Run the specific task of this node
  11611. * @param {SpeedyGPU} gpu
  11612. * @returns {void|SpeedyPromise<void>}
  11613. */
  11614. _run(gpu) {
  11615. if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  11616. this.output().swrite(this._source.image, this._source.format);
  11617. }
  11618. }
  11619. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/image-factory.js
  11620. /*
  11621. * speedy-vision.js
  11622. * GPU-accelerated Computer Vision for JavaScript
  11623. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11624. *
  11625. * Licensed under the Apache License, Version 2.0 (the "License");
  11626. * you may not use this file except in compliance with the License.
  11627. * You may obtain a copy of the License at
  11628. *
  11629. * http://www.apache.org/licenses/LICENSE-2.0
  11630. *
  11631. * Unless required by applicable law or agreed to in writing, software
  11632. * distributed under the License is distributed on an "AS IS" BASIS,
  11633. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11634. * See the License for the specific language governing permissions and
  11635. * limitations under the License.
  11636. *
  11637. * image-factory.js
  11638. * Image-related nodes
  11639. */
  11640. /**
  11641. * Portal nodes
  11642. */
  11643. class SpeedyPipelineImagePortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  11644. /**
  11645. * Create an image portal source
  11646. * @param {string} [name] name of the node
  11647. * @returns {SpeedyPipelineNodeImagePortalSource}
  11648. */
  11649. static Source(name = undefined) {
  11650. return new SpeedyPipelineNodeImagePortalSource(name);
  11651. }
  11652. /**
  11653. * Create an image portal sink
  11654. * @param {string} [name] name of the node
  11655. * @returns {SpeedyPipelineNodeImagePortalSink}
  11656. */
  11657. static Sink(name = undefined) {
  11658. return new SpeedyPipelineNodeImagePortalSink(name);
  11659. }
  11660. }
  11661. /**
  11662. * Image nodes
  11663. */
  11664. class SpeedyPipelineImageFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  11665. /**
  11666. * Create an image source
  11667. * @param {string} [name] name of the node
  11668. * @returns {SpeedyPipelineNodeImageSource}
  11669. */
  11670. static Source(name = undefined) {
  11671. return new SpeedyPipelineNodeImageSource(name);
  11672. }
  11673. /**
  11674. * Create an image sink
  11675. * @param {string} [name] name of the node
  11676. * @returns {SpeedyPipelineNodeImageSink}
  11677. */
  11678. static Sink(name = undefined) {
  11679. return new SpeedyPipelineNodeImageSink(name);
  11680. }
  11681. /**
  11682. * Create an image multiplexer
  11683. * @param {string} [name] name of the node
  11684. * @returns {SpeedyPipelineNodeImageMultiplexer}
  11685. */
  11686. static Multiplexer(name = undefined) {
  11687. return new SpeedyPipelineNodeImageMultiplexer(name);
  11688. }
  11689. /**
  11690. * Create an image buffer
  11691. * @param {string} [name] name of the node
  11692. * @returns {SpeedyPipelineNodeImageBuffer}
  11693. */
  11694. static Buffer(name = undefined) {
  11695. return new SpeedyPipelineNodeImageBuffer(name);
  11696. }
  11697. /**
  11698. * Image Pyramid
  11699. * @param {string} [name] name of the node
  11700. * @returns {SpeedyPipelineNodeImagePyramid}
  11701. */
  11702. static Pyramid(name = undefined) {
  11703. return new SpeedyPipelineNodeImagePyramid(name);
  11704. }
  11705. /**
  11706. * Image Mixer (blending)
  11707. * @param {string} [name] name of the node
  11708. * @returns {SpeedyPipelineNodeImageMixer}
  11709. */
  11710. static Mixer(name = undefined) {
  11711. return new SpeedyPipelineNodeImageMixer(name);
  11712. }
  11713. /**
  11714. * Image Portals
  11715. * @returns {typeof SpeedyPipelineImagePortalFactory}
  11716. */
  11717. static get Portal() {
  11718. return SpeedyPipelineImagePortalFactory;
  11719. }
  11720. }
  11721. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/greyscale.js
  11722. /*
  11723. * speedy-vision.js
  11724. * GPU-accelerated Computer Vision for JavaScript
  11725. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11726. *
  11727. * Licensed under the Apache License, Version 2.0 (the "License");
  11728. * you may not use this file except in compliance with the License.
  11729. * You may obtain a copy of the License at
  11730. *
  11731. * http://www.apache.org/licenses/LICENSE-2.0
  11732. *
  11733. * Unless required by applicable law or agreed to in writing, software
  11734. * distributed under the License is distributed on an "AS IS" BASIS,
  11735. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11736. * See the License for the specific language governing permissions and
  11737. * limitations under the License.
  11738. *
  11739. * greyscale.js
  11740. * Convert an image to greyscale
  11741. */
  11742. /**
  11743. * Convert an image to greyscale
  11744. */
  11745. class SpeedyPipelineNodeGreyscale extends SpeedyPipelineNode {
  11746. /**
  11747. * Constructor
  11748. * @param {string} [name] name of the node
  11749. */
  11750. constructor(name = undefined) {
  11751. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11752. }
  11753. /**
  11754. * Run the specific task of this node
  11755. * @param {SpeedyGPU} gpu
  11756. * @returns {void|SpeedyPromise<void>}
  11757. */
  11758. _run(gpu) {
  11759. const {
  11760. image,
  11761. format
  11762. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11763. const width = image.width,
  11764. height = image.height;
  11765. const outputTexture = this._tex[0];
  11766. const filters = gpu.programs.filters;
  11767. filters.rgb2grey.outputs(width, height, outputTexture);
  11768. filters.rgb2grey(image);
  11769. this.output().swrite(outputTexture, types/* ImageFormat */.f5.GREY);
  11770. }
  11771. }
  11772. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/gaussian-blur.js
  11773. /*
  11774. * speedy-vision.js
  11775. * GPU-accelerated Computer Vision for JavaScript
  11776. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11777. *
  11778. * Licensed under the Apache License, Version 2.0 (the "License");
  11779. * you may not use this file except in compliance with the License.
  11780. * You may obtain a copy of the License at
  11781. *
  11782. * http://www.apache.org/licenses/LICENSE-2.0
  11783. *
  11784. * Unless required by applicable law or agreed to in writing, software
  11785. * distributed under the License is distributed on an "AS IS" BASIS,
  11786. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11787. * See the License for the specific language governing permissions and
  11788. * limitations under the License.
  11789. *
  11790. * gaussian-blur.js
  11791. * Gaussian Blur
  11792. */
  11793. /**
  11794. * Default kernels for different sizes: 3x3, 5x5, 7x7... (use sigma_x = sigma_y)
  11795. * Heuristics: in order to pick a sigma, we set radius = 2 * sigma. Since
  11796. * ksize = 1 + 2 * radius, it follows that sigma = (ksize - 1) / 4. When
  11797. * ksize is 3, we set sigma = 1. Therefore, sigma = max(1, (ksize - 1) / 4).
  11798. */
  11799. const DEFAULT_KERNEL = Object.freeze({
  11800. 3: [0.27901008925473514, 0.44197982149052983, 0.27901008925473514],
  11801. // 1D convolution (sigma = 1)
  11802. 5: [0.06135959781344021, 0.2447701955296099, 0.3877404133138998, 0.2447701955296099, 0.06135959781344021],
  11803. // 1D convolution (separable kernel)
  11804. 7: [0.03873542500847274, 0.11308485700794121, 0.2150068609928349, 0.26634571398150225, 0.2150068609928349, 0.11308485700794121, 0.03873542500847274],
  11805. 9: [0.028532262603370988, 0.067234535494912, 0.12400932997922749, 0.17904386461741617, 0.20236001461014655, 0.17904386461741617, 0.12400932997922749, 0.067234535494912, 0.028532262603370988],
  11806. 11: [0.022656882730580346, 0.04610857898527292, 0.08012661469398517, 0.11890414969751599, 0.15067709325491124, 0.16305336127546846, 0.15067709325491124, 0.11890414969751599, 0.08012661469398517, 0.04610857898527292, 0.022656882730580346],
  11807. 13: [0.018815730430644363, 0.03447396964662016, 0.05657737457255748, 0.08317258170844948, 0.10952340502389682, 0.12918787500405662, 0.13649812722755, 0.12918787500405662, 0.10952340502389682, 0.08317258170844948, 0.05657737457255748, 0.03447396964662016, 0.018815730430644363],
  11808. 15: [0.016100340991695383, 0.027272329212157102, 0.042598338587449644, 0.06135478775568558, 0.08148767614129326, 0.09979838342934616, 0.11270444144735056, 0.11736740487004466, 0.11270444144735056, 0.09979838342934616, 0.08148767614129326, 0.06135478775568558, 0.042598338587449644, 0.027272329212157102, 0.016100340991695383]
  11809. //3: [ 0.25, 0.5, 0.25 ],
  11810. //5: [ 0.05, 0.25, 0.4, 0.25, 0.05 ],
  11811. });
  11812. /** Zero vector. When we set sigma_x = sigma_y = 0, we use the default rule to compute the actual sigma */
  11813. const DEFAULT_SIGMA = new SpeedyVector2(0, 0);
  11814. /** convolution programs (x-axis) */
  11815. const CONVOLUTION_X = Object.freeze({
  11816. 3: 'convolution3x',
  11817. 5: 'convolution5x',
  11818. 7: 'convolution7x',
  11819. 9: 'convolution9x',
  11820. 11: 'convolution11x',
  11821. 13: 'convolution13x',
  11822. 15: 'convolution15x'
  11823. });
  11824. /** convolution programs (y-axis) */
  11825. const CONVOLUTION_Y = Object.freeze({
  11826. 3: 'convolution3y',
  11827. 5: 'convolution5y',
  11828. 7: 'convolution7y',
  11829. 9: 'convolution9y',
  11830. 11: 'convolution11y',
  11831. 13: 'convolution13y',
  11832. 15: 'convolution15y'
  11833. });
  11834. /**
  11835. * @typedef {object} SeparableConvolutionKernel
  11836. * @property {number[]} x
  11837. * @property {number[]} y
  11838. */
  11839. /**
  11840. * Gaussian Blur
  11841. */
  11842. class SpeedyPipelineNodeGaussianBlur extends SpeedyPipelineNode {
  11843. /**
  11844. * Constructor
  11845. * @param {string} [name] name of the node
  11846. */
  11847. constructor(name = undefined) {
  11848. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11849. /** @type {SpeedySize} size of the kernel */
  11850. this._kernelSize = new SpeedySize(5, 5);
  11851. /** @type {SpeedyVector2} sigma of the Gaussian kernel (0 means: use default settings) */
  11852. this._sigma = DEFAULT_SIGMA;
  11853. /** @type {SeparableConvolutionKernel} convolution kernel */
  11854. this._kernel = {
  11855. x: DEFAULT_KERNEL[this._kernelSize.width],
  11856. y: DEFAULT_KERNEL[this._kernelSize.height]
  11857. };
  11858. }
  11859. /**
  11860. * Size of the kernel
  11861. * @returns {SpeedySize}
  11862. */
  11863. get kernelSize() {
  11864. return this._kernelSize;
  11865. }
  11866. /**
  11867. * Size of the kernel
  11868. * @param {SpeedySize} kernelSize
  11869. */
  11870. set kernelSize(kernelSize) {
  11871. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  11872. const kw = kernelSize.width,
  11873. kh = kernelSize.height;
  11874. if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  11875. this._kernelSize = kernelSize;
  11876. this._updateKernel();
  11877. }
  11878. /**
  11879. * Sigma of the Gaussian kernel
  11880. * @returns {SpeedyVector2}
  11881. */
  11882. get sigma() {
  11883. return this._sigma;
  11884. }
  11885. /**
  11886. * Sigma of the Gaussian kernel
  11887. * @param {SpeedyVector2} sigma
  11888. */
  11889. set sigma(sigma) {
  11890. utils/* Utils */.A.assert(sigma instanceof SpeedyVector2, `Sigma must be a SpeedyVector2`);
  11891. utils/* Utils */.A.assert(sigma.x >= 0 && sigma.y >= 0);
  11892. this._sigma = sigma;
  11893. this._updateKernel();
  11894. }
  11895. /**
  11896. * Run the specific task of this node
  11897. * @param {SpeedyGPU} gpu
  11898. * @returns {void|SpeedyPromise<void>}
  11899. */
  11900. _run(gpu) {
  11901. const {
  11902. image,
  11903. format
  11904. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  11905. const width = image.width,
  11906. height = image.height;
  11907. const kernX = this._kernel.x;
  11908. const kernY = this._kernel.y;
  11909. const convX = CONVOLUTION_X[this._kernelSize.width];
  11910. const convY = CONVOLUTION_Y[this._kernelSize.height];
  11911. const tex = this._tex[0];
  11912. const outputTexture = this._tex[1];
  11913. gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
  11914. gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
  11915. this.output().swrite(outputTexture, format);
  11916. }
  11917. /**
  11918. * Update the internal kernel to match
  11919. * sigma and kernelSize
  11920. */
  11921. _updateKernel() {
  11922. if (this._sigma.x == DEFAULT_SIGMA.x) this._kernel.x = DEFAULT_KERNEL[this._kernelSize.width];else this._kernel.x = utils/* Utils */.A.gaussianKernel(this._sigma.x, this._kernelSize.width, true);
  11923. if (this._sigma.y == DEFAULT_SIGMA.y) this._kernel.y = DEFAULT_KERNEL[this._kernelSize.height];else this._kernel.y = utils/* Utils */.A.gaussianKernel(this._sigma.y, this._kernelSize.height, true);
  11924. }
  11925. }
  11926. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/simple-blur.js
  11927. /*
  11928. * speedy-vision.js
  11929. * GPU-accelerated Computer Vision for JavaScript
  11930. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  11931. *
  11932. * Licensed under the Apache License, Version 2.0 (the "License");
  11933. * you may not use this file except in compliance with the License.
  11934. * You may obtain a copy of the License at
  11935. *
  11936. * http://www.apache.org/licenses/LICENSE-2.0
  11937. *
  11938. * Unless required by applicable law or agreed to in writing, software
  11939. * distributed under the License is distributed on an "AS IS" BASIS,
  11940. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11941. * See the License for the specific language governing permissions and
  11942. * limitations under the License.
  11943. *
  11944. * simple-blur.js
  11945. * Simple Blur (Box Filter)
  11946. */
  11947. /** 1D convolution filters */
  11948. const BOX_FILTER = Object.freeze({
  11949. 3: new Array(3).fill(1 / 3),
  11950. 5: new Array(5).fill(1 / 5),
  11951. 7: new Array(7).fill(1 / 7),
  11952. 9: new Array(9).fill(1 / 9),
  11953. 11: new Array(11).fill(1 / 11),
  11954. 13: new Array(13).fill(1 / 13),
  11955. 15: new Array(15).fill(1 / 15)
  11956. });
  11957. /** convolution programs (x-axis) */
  11958. const simple_blur_CONVOLUTION_X = Object.freeze({
  11959. 3: 'convolution3x',
  11960. 5: 'convolution5x',
  11961. 7: 'convolution7x',
  11962. 9: 'convolution9x',
  11963. 11: 'convolution11x',
  11964. 13: 'convolution13x',
  11965. 15: 'convolution15x'
  11966. });
  11967. /** convolution programs (y-axis) */
  11968. const simple_blur_CONVOLUTION_Y = Object.freeze({
  11969. 3: 'convolution3y',
  11970. 5: 'convolution5y',
  11971. 7: 'convolution7y',
  11972. 9: 'convolution9y',
  11973. 11: 'convolution11y',
  11974. 13: 'convolution13y',
  11975. 15: 'convolution15y'
  11976. });
  11977. /**
  11978. * @typedef {object} SeparableConvolutionKernel
  11979. * @property {number[]} x
  11980. * @property {number[]} y
  11981. */
  11982. /**
  11983. * Simple Blur (Box Filter)
  11984. */
  11985. class SpeedyPipelineNodeSimpleBlur extends SpeedyPipelineNode {
  11986. /**
  11987. * Constructor
  11988. * @param {string} [name] name of the node
  11989. */
  11990. constructor(name = undefined) {
  11991. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  11992. /** @type {SpeedySize} size of the kernel */
  11993. this._kernelSize = new SpeedySize(5, 5);
  11994. /** @type {SeparableConvolutionKernel} convolution kernel */
  11995. this._kernel = {
  11996. x: BOX_FILTER[this._kernelSize.width],
  11997. y: BOX_FILTER[this._kernelSize.height]
  11998. };
  11999. }
  12000. /**
  12001. * Size of the kernel
  12002. * @returns {SpeedySize}
  12003. */
  12004. get kernelSize() {
  12005. return this._kernelSize;
  12006. }
  12007. /**
  12008. * Size of the kernel
  12009. * @param {SpeedySize} kernelSize
  12010. */
  12011. set kernelSize(kernelSize) {
  12012. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  12013. const kw = kernelSize.width,
  12014. kh = kernelSize.height;
  12015. if (kw < 3 || kh < 3 || kw > 15 || kh > 15 || kw % 2 == 0 || kh % 2 == 0) throw new utils_errors/* NotSupportedError */.EM(`Unsupported kernel size: ${kw}x${kh}`);
  12016. this._kernelSize = kernelSize;
  12017. this._kernel.x = BOX_FILTER[this._kernelSize.width];
  12018. this._kernel.y = BOX_FILTER[this._kernelSize.height];
  12019. }
  12020. /**
  12021. * Run the specific task of this node
  12022. * @param {SpeedyGPU} gpu
  12023. * @returns {void|SpeedyPromise<void>}
  12024. */
  12025. _run(gpu) {
  12026. const {
  12027. image,
  12028. format
  12029. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12030. const width = image.width,
  12031. height = image.height;
  12032. const kernX = this._kernel.x;
  12033. const kernY = this._kernel.y;
  12034. const convX = simple_blur_CONVOLUTION_X[this._kernelSize.width];
  12035. const convY = simple_blur_CONVOLUTION_Y[this._kernelSize.height];
  12036. const tex = this._tex[0];
  12037. const outputTexture = this._tex[1];
  12038. gpu.programs.filters[convX].outputs(width, height, tex)(image, kernX);
  12039. gpu.programs.filters[convY].outputs(width, height, outputTexture)(tex, kernY);
  12040. this.output().swrite(outputTexture, format);
  12041. }
  12042. }
  12043. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/median-blur.js
  12044. /*
  12045. * speedy-vision.js
  12046. * GPU-accelerated Computer Vision for JavaScript
  12047. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12048. *
  12049. * Licensed under the Apache License, Version 2.0 (the "License");
  12050. * you may not use this file except in compliance with the License.
  12051. * You may obtain a copy of the License at
  12052. *
  12053. * http://www.apache.org/licenses/LICENSE-2.0
  12054. *
  12055. * Unless required by applicable law or agreed to in writing, software
  12056. * distributed under the License is distributed on an "AS IS" BASIS,
  12057. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12058. * See the License for the specific language governing permissions and
  12059. * limitations under the License.
  12060. *
  12061. * median-blur.js
  12062. * Median Blur
  12063. */
  12064. // Median programs
  12065. const MEDIAN = {
  12066. 3: 'median3',
  12067. 5: 'median5',
  12068. 7: 'median7'
  12069. };
  12070. /**
  12071. * Median Blur
  12072. */
  12073. class SpeedyPipelineNodeMedianBlur extends SpeedyPipelineNode {
  12074. /**
  12075. * Constructor
  12076. * @param {string} [name] name of the node
  12077. */
  12078. constructor(name = undefined) {
  12079. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12080. /** @type {SpeedySize} size of the kernel (assumed to be square) */
  12081. this._kernelSize = new SpeedySize(5, 5);
  12082. }
  12083. /**
  12084. * Size of the kernel
  12085. * @returns {SpeedySize}
  12086. */
  12087. get kernelSize() {
  12088. return this._kernelSize;
  12089. }
  12090. /**
  12091. * Size of the kernel
  12092. * @param {SpeedySize} kernelSize
  12093. */
  12094. set kernelSize(kernelSize) {
  12095. utils/* Utils */.A.assert(kernelSize instanceof SpeedySize);
  12096. const ksize = kernelSize.width;
  12097. if (!(ksize == 3 || ksize == 5 || ksize == 7)) throw new utils_errors/* NotSupportedError */.EM(`Supported kernel sizes: 3x3, 5x5, 7x7`);else if (kernelSize.width != kernelSize.height) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);
  12098. this._kernelSize = kernelSize;
  12099. }
  12100. /**
  12101. * Run the specific task of this node
  12102. * @param {SpeedyGPU} gpu
  12103. * @returns {void|SpeedyPromise<void>}
  12104. */
  12105. _run(gpu) {
  12106. const {
  12107. image,
  12108. format
  12109. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12110. const width = image.width,
  12111. height = image.height;
  12112. const ksize = this._kernelSize.width;
  12113. const med = MEDIAN[ksize];
  12114. const outputTexture = this._tex[0];
  12115. gpu.programs.filters[med].outputs(width, height, outputTexture)(image);
  12116. this.output().swrite(outputTexture, format);
  12117. }
  12118. }
  12119. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/convolution.js
  12120. /*
  12121. * speedy-vision.js
  12122. * GPU-accelerated Computer Vision for JavaScript
  12123. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12124. *
  12125. * Licensed under the Apache License, Version 2.0 (the "License");
  12126. * you may not use this file except in compliance with the License.
  12127. * You may obtain a copy of the License at
  12128. *
  12129. * http://www.apache.org/licenses/LICENSE-2.0
  12130. *
  12131. * Unless required by applicable law or agreed to in writing, software
  12132. * distributed under the License is distributed on an "AS IS" BASIS,
  12133. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12134. * See the License for the specific language governing permissions and
  12135. * limitations under the License.
  12136. *
  12137. * convolution.js
  12138. * Image convolution
  12139. */
  12140. // 2D convolution programs
  12141. const CONVOLUTION = {
  12142. 3: 'convolution3',
  12143. 5: 'convolution5',
  12144. 7: 'convolution7'
  12145. };
  12146. /**
  12147. * Image convolution
  12148. */
  12149. class SpeedyPipelineNodeConvolution extends SpeedyPipelineNode {
  12150. /**
  12151. * Constructor
  12152. * @param {string} [name] name of the node
  12153. */
  12154. constructor(name = undefined) {
  12155. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12156. /** @type {SpeedyMatrix} convolution kernel (square matrix) */
  12157. this._kernel = speedy_matrix.SpeedyMatrix.Create(3, 3, [0, 0, 0, 0, 1, 0, 0, 0, 0]); // identity transform
  12158. }
  12159. /**
  12160. * Convolution kernel
  12161. * @returns {SpeedyMatrix}
  12162. */
  12163. get kernel() {
  12164. return this._kernel;
  12165. }
  12166. /**
  12167. * Convolution kernel
  12168. * @param {SpeedyMatrix} kernel
  12169. */
  12170. set kernel(kernel) {
  12171. if (kernel.rows != kernel.columns) throw new utils_errors/* NotSupportedError */.EM(`Use a square kernel`);else if (!(kernel.rows == 3 || kernel.rows == 5 || kernel.rows == 7)) throw new utils_errors/* NotSupportedError */.EM(`Invalid kernel size. Supported sizes: 3x3, 5x5, 7x7`);
  12172. this._kernel = kernel;
  12173. }
  12174. /**
  12175. * Run the specific task of this node
  12176. * @param {SpeedyGPU} gpu
  12177. * @returns {void|SpeedyPromise<void>}
  12178. */
  12179. _run(gpu) {
  12180. const {
  12181. image,
  12182. format
  12183. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12184. const width = image.width,
  12185. height = image.height;
  12186. const outputTexture = this._tex[0];
  12187. const ksize = this._kernel.rows;
  12188. const conv = CONVOLUTION[ksize];
  12189. const kernel = this._kernel.read();
  12190. gpu.programs.filters[conv].outputs(width, height, outputTexture)(image, kernel);
  12191. this.output().swrite(outputTexture, format);
  12192. }
  12193. }
  12194. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/nightvision.js
  12195. /*
  12196. * speedy-vision.js
  12197. * GPU-accelerated Computer Vision for JavaScript
  12198. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12199. *
  12200. * Licensed under the Apache License, Version 2.0 (the "License");
  12201. * you may not use this file except in compliance with the License.
  12202. * You may obtain a copy of the License at
  12203. *
  12204. * http://www.apache.org/licenses/LICENSE-2.0
  12205. *
  12206. * Unless required by applicable law or agreed to in writing, software
  12207. * distributed under the License is distributed on an "AS IS" BASIS,
  12208. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12209. * See the License for the specific language governing permissions and
  12210. * limitations under the License.
  12211. *
  12212. * nightvision.js
  12213. * Nightvision filter
  12214. */
  12215. /**
  12216. * @typedef {"high"|"medium"|"low"} NightvisionQualityLevel
  12217. */
  12218. /**
  12219. * Nightvision filter: "see in the dark"
  12220. */
  12221. class SpeedyPipelineNodeNightvision extends SpeedyPipelineNode {
  12222. /**
  12223. * Constructor
  12224. * @param {string} [name] name of the node
  12225. */
  12226. constructor(name = undefined) {
  12227. super(name, 3, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.RGBA || msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12228. /** @type {number} a value typically in [0,1]: larger number => higher contrast */
  12229. this._gain = 0.5;
  12230. /** @type {number} a value typically in [0,1]: controls brightness */
  12231. this._offset = 0.5;
  12232. /** @type {number} gain decay, a value in [0,1] */
  12233. this._decay = 0.0;
  12234. /** @type {NightvisionQualityLevel} quality level */
  12235. this._quality = 'medium';
  12236. }
  12237. /**
  12238. * Gain, a value typically in [0,1]: larger number => higher contrast
  12239. * @returns {number}
  12240. */
  12241. get gain() {
  12242. return this._gain;
  12243. }
  12244. /**
  12245. * Gain, a value typically in [0,1]: larger number => higher contrast
  12246. * @param {number} gain
  12247. */
  12248. set gain(gain) {
  12249. this._gain = +gain;
  12250. }
  12251. /**
  12252. * Offset, a value typically in [0,1] that controls the brightness
  12253. * @returns {number}
  12254. */
  12255. get offset() {
  12256. return this._offset;
  12257. }
  12258. /**
  12259. * Offset, a value typically in [0,1] that controls the brightness
  12260. * @param {number} offset
  12261. */
  12262. set offset(offset) {
  12263. this._offset = +offset;
  12264. }
  12265. /**
  12266. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  12267. * @returns {number}
  12268. */
  12269. get decay() {
  12270. return this._decay;
  12271. }
  12272. /**
  12273. * Gain decay, a value in [0,1] that controls how the gain decays from the center of the image
  12274. * @param {number} decay
  12275. */
  12276. set decay(decay) {
  12277. this._decay = Math.max(0.0, Math.min(+decay, 1.0));
  12278. }
  12279. /**
  12280. * Quality level of the filter
  12281. * @returns {NightvisionQualityLevel}
  12282. */
  12283. get quality() {
  12284. return this._quality;
  12285. }
  12286. /**
  12287. * Quality level of the filter
  12288. * @param {NightvisionQualityLevel} quality
  12289. */
  12290. set quality(quality) {
  12291. if (quality === 'high' || quality === 'medium' || quality === 'low') this._quality = quality;else throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level for the Nightvision filter: "${quality}"`);
  12292. }
  12293. /**
  12294. * Run the specific task of this node
  12295. * @param {SpeedyGPU} gpu
  12296. * @returns {void|SpeedyPromise<void>}
  12297. */
  12298. _run(gpu) {
  12299. const {
  12300. image,
  12301. format
  12302. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12303. const width = image.width,
  12304. height = image.height;
  12305. const gain = this._gain;
  12306. const offset = this._offset;
  12307. const decay = this._decay;
  12308. const quality = this._quality;
  12309. const filters = gpu.programs.filters;
  12310. const tmp = this._tex[0];
  12311. const illuminationMap = this._tex[1];
  12312. const outputTexture = this._tex[2];
  12313. // compute illumination map
  12314. if (quality == 'medium') {
  12315. filters.illuminationMapX.outputs(width, height, tmp);
  12316. filters.illuminationMapY.outputs(width, height, illuminationMap);
  12317. filters.illuminationMapX(image);
  12318. filters.illuminationMapY(tmp);
  12319. } else if (quality == 'high') {
  12320. filters.illuminationMapHiX.outputs(width, height, tmp);
  12321. filters.illuminationMapHiY.outputs(width, height, illuminationMap);
  12322. filters.illuminationMapHiX(image);
  12323. filters.illuminationMapHiY(tmp);
  12324. } else if (quality == 'low') {
  12325. filters.illuminationMapLoX.outputs(width, height, tmp);
  12326. filters.illuminationMapLoY.outputs(width, height, illuminationMap);
  12327. filters.illuminationMapLoX(image);
  12328. filters.illuminationMapLoY(tmp);
  12329. }
  12330. // run nightvision
  12331. if (format === types/* ImageFormat */.f5.GREY) {
  12332. filters.nightvisionGreyscale.outputs(width, height, outputTexture);
  12333. filters.nightvisionGreyscale(image, illuminationMap, gain, offset, decay);
  12334. } else if (format === types/* ImageFormat */.f5.RGBA) {
  12335. filters.nightvision.outputs(width, height, outputTexture);
  12336. filters.nightvision(image, illuminationMap, gain, offset, decay);
  12337. }
  12338. // done!
  12339. this.output().swrite(outputTexture, format);
  12340. }
  12341. }
  12342. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/filters/normalize.js
  12343. /*
  12344. * speedy-vision.js
  12345. * GPU-accelerated Computer Vision for JavaScript
  12346. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12347. *
  12348. * Licensed under the Apache License, Version 2.0 (the "License");
  12349. * you may not use this file except in compliance with the License.
  12350. * You may obtain a copy of the License at
  12351. *
  12352. * http://www.apache.org/licenses/LICENSE-2.0
  12353. *
  12354. * Unless required by applicable law or agreed to in writing, software
  12355. * distributed under the License is distributed on an "AS IS" BASIS,
  12356. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12357. * See the License for the specific language governing permissions and
  12358. * limitations under the License.
  12359. *
  12360. * normalize.js
  12361. * Normalize image to a range
  12362. */
  12363. /**
  12364. * Normalize image to a range
  12365. */
  12366. class SpeedyPipelineNodeNormalize extends SpeedyPipelineNode {
  12367. /**
  12368. * Constructor
  12369. * @param {string} [name] name of the node
  12370. */
  12371. constructor(name = undefined) {
  12372. super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12373. /** @type {number} a value in [0,255] */
  12374. this._minValue = 0;
  12375. /** @type {number} a value in [0,255] */
  12376. this._maxValue = 255;
  12377. }
  12378. /**
  12379. * Minimum intensity in the output image, a value in [0,255]
  12380. * @returns {number}
  12381. */
  12382. get minValue() {
  12383. return this._minValue;
  12384. }
  12385. /**
  12386. * Minimum intensity in the output image, a value in [0,255]
  12387. * @param {number} minValue
  12388. */
  12389. set minValue(minValue) {
  12390. this._minValue = Math.max(0, Math.min(+minValue, 255));
  12391. }
  12392. /**
  12393. * Maximum intensity in the output image, a value in [0,255]
  12394. * @returns {number}
  12395. */
  12396. get maxValue() {
  12397. return this._maxValue;
  12398. }
  12399. /**
  12400. * Maximum intensity in the output image, a value in [0,255]
  12401. * @param {number} maxValue
  12402. */
  12403. set maxValue(maxValue) {
  12404. this._maxValue = Math.max(0, Math.min(+maxValue, 255));
  12405. }
  12406. /**
  12407. * Run the specific task of this node
  12408. * @param {SpeedyGPU} gpu
  12409. * @returns {void|SpeedyPromise<void>}
  12410. */
  12411. _run(gpu) {
  12412. const {
  12413. image,
  12414. format
  12415. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12416. const width = image.width,
  12417. height = image.height;
  12418. const outputTexture = this._tex[3];
  12419. let minValue = this._minValue;
  12420. let maxValue = this._maxValue;
  12421. if (minValue > maxValue) minValue = maxValue = (minValue + maxValue) / 2;
  12422. const minmax = this._scanMinMax(gpu, image, types/* PixelComponent */.kQ.GREEN);
  12423. gpu.programs.filters.normalizeGreyscale.outputs(width, height, outputTexture);
  12424. gpu.programs.filters.normalizeGreyscale(minmax, minValue, maxValue);
  12425. this.output().swrite(outputTexture, format);
  12426. }
  12427. /**
  12428. * Scan a single component in all pixels of the image and find the min & max intensities
  12429. * @param {SpeedyGPU} gpu
  12430. * @param {SpeedyTexture} image input image
  12431. * @param {PixelComponent} pixelComponent a single PixelComponent flag
  12432. * @returns {SpeedyDrawableTexture} RGBA = (max, min, max - min, original_pixel)
  12433. */
  12434. _scanMinMax(gpu, image, pixelComponent) {
  12435. const tex = this._tex;
  12436. const program = gpu.programs.utils;
  12437. const width = image.width,
  12438. height = image.height;
  12439. const numIterations = Math.ceil(Math.log2(Math.max(width, height))) | 0;
  12440. utils/* Utils */.A.assert(types/* ColorComponentId */.kg[pixelComponent] !== undefined);
  12441. program.copyComponents.outputs(width, height, tex[2]);
  12442. program.scanMinMax2D.outputs(width, height, tex[0], tex[1]);
  12443. let texture = program.copyComponents(image, image, types/* PixelComponent */.kQ.ALL, types/* ColorComponentId */.kg[pixelComponent]);
  12444. for (let i = 0; i < numIterations; i++) texture = program.scanMinMax2D(texture, i);
  12445. return texture;
  12446. }
  12447. }
  12448. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/filter-factory.js
  12449. /*
  12450. * speedy-vision.js
  12451. * GPU-accelerated Computer Vision for JavaScript
  12452. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12453. *
  12454. * Licensed under the Apache License, Version 2.0 (the "License");
  12455. * you may not use this file except in compliance with the License.
  12456. * You may obtain a copy of the License at
  12457. *
  12458. * http://www.apache.org/licenses/LICENSE-2.0
  12459. *
  12460. * Unless required by applicable law or agreed to in writing, software
  12461. * distributed under the License is distributed on an "AS IS" BASIS,
  12462. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12463. * See the License for the specific language governing permissions and
  12464. * limitations under the License.
  12465. *
  12466. * filter-factory.js
  12467. * Image filters
  12468. */
  12469. /**
  12470. * Image filters
  12471. */
  12472. class SpeedyPipelineFilterFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  12473. /**
  12474. * Convert image to greyscale
  12475. * @param {string} [name]
  12476. * @returns {SpeedyPipelineNodeGreyscale}
  12477. */
  12478. static Greyscale(name = undefined) {
  12479. return new SpeedyPipelineNodeGreyscale(name);
  12480. }
  12481. /**
  12482. * Gaussian Blur
  12483. * @param {string} [name]
  12484. * @returns {SpeedyPipelineNodeGaussianBlur}
  12485. */
  12486. static GaussianBlur(name = undefined) {
  12487. return new SpeedyPipelineNodeGaussianBlur(name);
  12488. }
  12489. /**
  12490. * Simple Blur (Box Filter)
  12491. * @param {string} [name]
  12492. * @returns {SpeedyPipelineNodeSimpleBlur}
  12493. */
  12494. static SimpleBlur(name = undefined) {
  12495. return new SpeedyPipelineNodeSimpleBlur(name);
  12496. }
  12497. /**
  12498. * Median Blur
  12499. * @param {string} [name]
  12500. * @returns {SpeedyPipelineNodeMedianBlur}
  12501. */
  12502. static MedianBlur(name = undefined) {
  12503. return new SpeedyPipelineNodeMedianBlur(name);
  12504. }
  12505. /**
  12506. * Image Convolution
  12507. * @param {string} [name]
  12508. * @returns {SpeedyPipelineNodeConvolution}
  12509. */
  12510. static Convolution(name = undefined) {
  12511. return new SpeedyPipelineNodeConvolution(name);
  12512. }
  12513. /**
  12514. * Nightvision
  12515. * @param {string} [name]
  12516. * @returns {SpeedyPipelineNodeNightvision}
  12517. */
  12518. static Nightvision(name = undefined) {
  12519. return new SpeedyPipelineNodeNightvision(name);
  12520. }
  12521. /**
  12522. * Normalize image
  12523. * @param {string} [name]
  12524. * @returns {SpeedyPipelineNodeNormalize}
  12525. */
  12526. static Normalize(name = undefined) {
  12527. return new SpeedyPipelineNodeNormalize(name);
  12528. }
  12529. }
  12530. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/perspective-warp.js
  12531. /*
  12532. * speedy-vision.js
  12533. * GPU-accelerated Computer Vision for JavaScript
  12534. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12535. *
  12536. * Licensed under the Apache License, Version 2.0 (the "License");
  12537. * you may not use this file except in compliance with the License.
  12538. * You may obtain a copy of the License at
  12539. *
  12540. * http://www.apache.org/licenses/LICENSE-2.0
  12541. *
  12542. * Unless required by applicable law or agreed to in writing, software
  12543. * distributed under the License is distributed on an "AS IS" BASIS,
  12544. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12545. * See the License for the specific language governing permissions and
  12546. * limitations under the License.
  12547. *
  12548. * perspective-warp.js
  12549. * Warp an image using a perspective transformation
  12550. */
  12551. // Used when an invalid matrix is provided
  12552. const SINGULAR_MATRIX = [0, 0, 0, 0, 0, 0, 0, 0, 1];
  12553. /**
  12554. * Warp an image using a perspective transformation
  12555. */
  12556. class SpeedyPipelineNodePerspectiveWarp extends SpeedyPipelineNode {
  12557. /**
  12558. * Constructor
  12559. * @param {string} [name] name of the node
  12560. */
  12561. constructor(name = undefined) {
  12562. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12563. /** @type {SpeedyMatrix} perspective transformation */
  12564. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  12565. }
  12566. /**
  12567. * Perspective transform, a 3x3 homography matrix
  12568. * @returns {SpeedyMatrix}
  12569. */
  12570. get transform() {
  12571. return this._transform;
  12572. }
  12573. /**
  12574. * Perspective transform, a 3x3 homography matrix
  12575. * @param {SpeedyMatrix} transform
  12576. */
  12577. set transform(transform) {
  12578. if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  12579. this._transform = transform;
  12580. }
  12581. /**
  12582. * Run the specific task of this node
  12583. * @param {SpeedyGPU} gpu
  12584. * @returns {void|SpeedyPromise<void>}
  12585. */
  12586. _run(gpu) {
  12587. const {
  12588. image,
  12589. format
  12590. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12591. const width = image.width,
  12592. height = image.height;
  12593. const outputTexture = this._tex[0];
  12594. const homography = this._transform.read();
  12595. const inverseHomography = this._inverse3(homography);
  12596. const isValidHomography = !Number.isNaN(inverseHomography[0]);
  12597. gpu.programs.transforms.warpPerspective.outputs(width, height, outputTexture);
  12598. gpu.programs.transforms.warpPerspective(image, isValidHomography ? inverseHomography : SINGULAR_MATRIX);
  12599. this.output().swrite(outputTexture, format);
  12600. }
  12601. /**
  12602. * Compute the inverse of a 3x3 matrix IN-PLACE (do it fast!)
  12603. * @param {number[]} mat 3x3 matrix in column-major format
  12604. * @param {number} [eps] epsilon
  12605. * @returns {number[]} 3x3 inverse matrix in column-major format
  12606. */
  12607. _inverse3(mat, eps = 1e-6) {
  12608. // read the entries of the matrix
  12609. const a11 = mat[0];
  12610. const a21 = mat[1];
  12611. const a31 = mat[2];
  12612. const a12 = mat[3];
  12613. const a22 = mat[4];
  12614. const a32 = mat[5];
  12615. const a13 = mat[6];
  12616. const a23 = mat[7];
  12617. const a33 = mat[8];
  12618. // compute cofactors
  12619. const b1 = a33 * a22 - a32 * a23; // b11
  12620. const b2 = a33 * a12 - a32 * a13; // b21
  12621. const b3 = a23 * a12 - a22 * a13; // b31
  12622. // compute the determinant
  12623. const det = a11 * b1 - a21 * b2 + a31 * b3;
  12624. // set up the inverse
  12625. if (!(Math.abs(det) < eps)) {
  12626. const d = 1.0 / det;
  12627. mat[0] = b1 * d;
  12628. mat[1] = -(a33 * a21 - a31 * a23) * d;
  12629. mat[2] = (a32 * a21 - a31 * a22) * d;
  12630. mat[3] = -b2 * d;
  12631. mat[4] = (a33 * a11 - a31 * a13) * d;
  12632. mat[5] = -(a32 * a11 - a31 * a12) * d;
  12633. mat[6] = b3 * d;
  12634. mat[7] = -(a23 * a11 - a21 * a13) * d;
  12635. mat[8] = (a22 * a11 - a21 * a12) * d;
  12636. } else mat.fill(Number.NaN, 0, 9);
  12637. // done!
  12638. return mat;
  12639. }
  12640. }
  12641. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/transforms/resize.js
  12642. /*
  12643. * speedy-vision.js
  12644. * GPU-accelerated Computer Vision for JavaScript
  12645. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12646. *
  12647. * Licensed under the Apache License, Version 2.0 (the "License");
  12648. * you may not use this file except in compliance with the License.
  12649. * You may obtain a copy of the License at
  12650. *
  12651. * http://www.apache.org/licenses/LICENSE-2.0
  12652. *
  12653. * Unless required by applicable law or agreed to in writing, software
  12654. * distributed under the License is distributed on an "AS IS" BASIS,
  12655. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12656. * See the License for the specific language governing permissions and
  12657. * limitations under the License.
  12658. *
  12659. * resize.js
  12660. * Resize image
  12661. */
  12662. /** @typedef {"bilinear"|"nearest"} SpeedyPipelineNodeResizeMethod */
  12663. /**
  12664. * Resize image
  12665. */
  12666. class SpeedyPipelineNodeResize extends SpeedyPipelineNode {
  12667. /**
  12668. * Constructor
  12669. * @param {string} [name] name of the node
  12670. */
  12671. constructor(name = undefined) {
  12672. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Image), OutputPort().expects(SpeedyPipelineMessageType.Image)]);
  12673. /** @type {SpeedySize} size of the output image, in pixels */
  12674. this._size = new SpeedySize(0, 0);
  12675. /** @type {SpeedyVector2} size of the output relative to the size of the input */
  12676. this._scale = new SpeedyVector2(1, 1);
  12677. /** @type {SpeedyPipelineNodeResizeMethod} interpolation method */
  12678. this._method = 'bilinear';
  12679. }
  12680. /**
  12681. * Size of the output image, in pixels (use 0 to use scale)
  12682. * @returns {SpeedySize}
  12683. */
  12684. get size() {
  12685. return this._size;
  12686. }
  12687. /**
  12688. * Size of the output image, in pixels (use 0 to use scale)
  12689. * @param {SpeedySize} size
  12690. */
  12691. set size(size) {
  12692. this._size = size;
  12693. }
  12694. /**
  12695. * Size of the output image relative to the size of the input image
  12696. * @returns {SpeedyVector2}
  12697. */
  12698. get scale() {
  12699. return this._scale;
  12700. }
  12701. /**
  12702. * Size of the output image relative to the size of the input image
  12703. * @param {SpeedyVector2} scale
  12704. */
  12705. set scale(scale) {
  12706. this._scale = scale;
  12707. }
  12708. /**
  12709. * Interpolation method
  12710. * @returns {SpeedyPipelineNodeResizeMethod}
  12711. */
  12712. get method() {
  12713. return this._method;
  12714. }
  12715. /**
  12716. * Interpolation method
  12717. * @param {SpeedyPipelineNodeResizeMethod} method
  12718. */
  12719. set method(method) {
  12720. if (method !== 'nearest' && method !== 'bilinear') throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method method: "${method}"`);
  12721. this._method = method;
  12722. }
  12723. /**
  12724. * Run the specific task of this node
  12725. * @param {SpeedyGPU} gpu
  12726. * @returns {void|SpeedyPromise<void>}
  12727. */
  12728. _run(gpu) {
  12729. const {
  12730. image,
  12731. format
  12732. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  12733. const width = image.width,
  12734. height = image.height;
  12735. const outputTexture = this._tex[0];
  12736. const method = this._method;
  12737. const newWidth = this._size.width || Math.max(1, this._scale.x * width);
  12738. const newHeight = this._size.height || Math.max(1, this._scale.y * height);
  12739. if (method == 'bilinear') {
  12740. gpu.programs.transforms.resizeBilinear.outputs(newWidth, newHeight, outputTexture)(image);
  12741. } else if (method == 'nearest') {
  12742. gpu.programs.transforms.resizeNearest.outputs(newWidth, newHeight, outputTexture)(image);
  12743. }
  12744. this.output().swrite(outputTexture, format);
  12745. }
  12746. }
  12747. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/transform-factory.js
  12748. /*
  12749. * speedy-vision.js
  12750. * GPU-accelerated Computer Vision for JavaScript
  12751. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12752. *
  12753. * Licensed under the Apache License, Version 2.0 (the "License");
  12754. * you may not use this file except in compliance with the License.
  12755. * You may obtain a copy of the License at
  12756. *
  12757. * http://www.apache.org/licenses/LICENSE-2.0
  12758. *
  12759. * Unless required by applicable law or agreed to in writing, software
  12760. * distributed under the License is distributed on an "AS IS" BASIS,
  12761. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12762. * See the License for the specific language governing permissions and
  12763. * limitations under the License.
  12764. *
  12765. * transform-factory.js
  12766. * Image transforms
  12767. */
  12768. /**
  12769. * Image transforms
  12770. */
  12771. class SpeedyPipelineTransformFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  12772. /**
  12773. * Resize image
  12774. * @param {string} [name]
  12775. * @returns {SpeedyPipelineNodeResize}
  12776. */
  12777. static Resize(name = undefined) {
  12778. return new SpeedyPipelineNodeResize(name);
  12779. }
  12780. /**
  12781. * Warp an image using a perspective transformation
  12782. * @param {string} [name]
  12783. * @returns {SpeedyPipelineNodePerspectiveWarp}
  12784. */
  12785. static PerspectiveWarp(name = undefined) {
  12786. return new SpeedyPipelineNodePerspectiveWarp(name);
  12787. }
  12788. }
  12789. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/detector.js
  12790. /*
  12791. * speedy-vision.js
  12792. * GPU-accelerated Computer Vision for JavaScript
  12793. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  12794. *
  12795. * Licensed under the Apache License, Version 2.0 (the "License");
  12796. * you may not use this file except in compliance with the License.
  12797. * You may obtain a copy of the License at
  12798. *
  12799. * http://www.apache.org/licenses/LICENSE-2.0
  12800. *
  12801. * Unless required by applicable law or agreed to in writing, software
  12802. * distributed under the License is distributed on an "AS IS" BASIS,
  12803. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12804. * See the License for the specific language governing permissions and
  12805. * limitations under the License.
  12806. *
  12807. * detector.js
  12808. * Abstract keypoint detectors
  12809. */
  12810. // Constants
  12811. const MAX_CAPACITY = globals.MAX_ENCODER_CAPACITY; // maximum capacity of the encoder (up to this many keypoints can be stored)
  12812. const detector_DEFAULT_CAPACITY = globals.DEFAULT_ENCODER_CAPACITY; // default capacity of the encoder
  12813. const DEFAULT_SCALE_FACTOR = 1.4142135623730951; // sqrt(2)
  12814. const NUMBER_OF_RGBA16_TEXTURES = 2;
  12815. // legacy constants
  12816. const NUMBER_OF_INTERNAL_TEXTURES = 0; //5; // number of internal textures used to encode the keypoints
  12817. const ENCODER_PASSES = 4; // number of passes of the keypoint encoder: directly impacts performance
  12818. const LONG_SKIP_OFFSET_PASSES = 2; // number of passes of the long skip offsets shader
  12819. /**
  12820. * Abstract keypoint detector
  12821. * @abstract
  12822. */
  12823. class SpeedyPipelineNodeKeypointDetector extends SpeedyPipelineNode {
  12824. /**
  12825. * Constructor
  12826. * @param {string} [name] name of the node
  12827. * @param {number} [texCount] number of work textures
  12828. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  12829. */
  12830. constructor(name = undefined, texCount = 0, portBuilders = undefined) {
  12831. super(name, texCount + NUMBER_OF_INTERNAL_TEXTURES, portBuilders);
  12832. /** @type {number} encoder capacity */
  12833. this._capacity = detector_DEFAULT_CAPACITY; // must not be greater than MAX_ENCODER_CAPACITY
  12834. /** @type {GLint} auxiliary storage */
  12835. this._oldWrapS = 0;
  12836. /** @type {SpeedyDrawableTexture[]} textures with 8-bytes per pixel */
  12837. this._tex16 = new Array(NUMBER_OF_RGBA16_TEXTURES).fill(null);
  12838. }
  12839. /**
  12840. * Initialize this node
  12841. * @param {SpeedyGPU} gpu
  12842. */
  12843. init(gpu) {
  12844. // initialize
  12845. super.init(gpu);
  12846. // encodeKeypointSkipOffsets() relies on this
  12847. this._oldWrapS = this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, gpu.gl.REPEAT);
  12848. // allocate RGBA16 textures
  12849. this._allocateTex16(gpu);
  12850. gpu.subscribe(this._allocateTex16, this, gpu);
  12851. }
  12852. /**
  12853. * Release this node
  12854. * @param {SpeedyGPU} gpu
  12855. */
  12856. release(gpu) {
  12857. // deallocate RGBA16 textures
  12858. gpu.unsubscribe(this._allocateTex16, this);
  12859. this._deallocateTex16(gpu);
  12860. // we need to restore the texture parameter because textures come from a pool!
  12861. this._setupSpecialTexture(gpu.gl.TEXTURE_WRAP_S, this._oldWrapS);
  12862. // release
  12863. super.release(gpu);
  12864. }
  12865. /**
  12866. * Set a parameter of the special texture
  12867. * @param {GLenum} pname
  12868. * @param {GLint} param new value
  12869. * @returns {GLint} old value of param
  12870. */
  12871. _setupSpecialTexture(pname, param) {
  12872. if (NUMBER_OF_INTERNAL_TEXTURES == 0) return;
  12873. // legacy code
  12874. const texture = this._tex[this._tex.length - 1];
  12875. const gl = texture.gl;
  12876. gl.bindTexture(gl.TEXTURE_2D, texture.glTexture);
  12877. const oldval = gl.getTexParameter(gl.TEXTURE_2D, pname);
  12878. gl.texParameteri(gl.TEXTURE_2D, pname, param);
  12879. gl.bindTexture(gl.TEXTURE_2D, null);
  12880. return oldval;
  12881. }
  12882. /**
  12883. * We can encode up to this many keypoints. If you find a
  12884. * tight bound for this, download times will be faster.
  12885. * @returns {number}
  12886. */
  12887. get capacity() {
  12888. return this._capacity;
  12889. }
  12890. /**
  12891. * We can encode up to this many keypoints. If you find a
  12892. * tight bound for this, download times will be faster.
  12893. * @param {number} capacity
  12894. */
  12895. set capacity(capacity) {
  12896. this._capacity = Math.min(Math.max(0, capacity | 0), MAX_CAPACITY);
  12897. }
  12898. /**
  12899. * Create a tiny texture with encoded keypoints out of
  12900. * an encoded corners texture
  12901. * @param {SpeedyGPU} gpu
  12902. * @param {SpeedyTexture} corners input
  12903. * @param {SpeedyDrawableTexture} encodedKeypoints output
  12904. * @param {number} [descriptorSize] in bytes
  12905. * @param {number} [extraSize] in bytes
  12906. * @returns {SpeedyDrawableTexture} encodedKeypoints
  12907. */
  12908. _encodeKeypoints(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12909. const encoderCapacity = this._capacity;
  12910. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(encoderCapacity, descriptorSize, extraSize);
  12911. const width = 1 << (Math.ceil(Math.log2(corners.width * corners.height)) >>> 1); // power of two
  12912. const height = Math.ceil(corners.width * corners.height / width); // probabilistic approach in Parallel Ale Sort 2D
  12913. //const width = corners.width, height = corners.height; // independent texture reads approach in Parallel Ale Sort 2D
  12914. const maxSize = Math.max(width, height);
  12915. const keypoints = gpu.programs.keypoints;
  12916. // prepare programs
  12917. keypoints.initLookupTable.outputs(width, height, this._tex16[1]);
  12918. keypoints.sortLookupTable.outputs(width, height, this._tex16[0], this._tex16[1]);
  12919. keypoints.encodeKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  12920. // compute lookup table
  12921. let lookupTable = keypoints.initLookupTable(corners);
  12922. for (let b = 1; b < maxSize; b *= 2) lookupTable = keypoints.sortLookupTable(lookupTable, b, width, height);
  12923. /*
  12924. // debug: view texture
  12925. const lookupView = (keypoints.viewLookupTable.outputs(
  12926. width, height, this._tex[0]
  12927. ))(lookupTable);
  12928. const canvas = gpu.renderToCanvas(lookupView);
  12929. if(!this._ww) document.body.appendChild(canvas);
  12930. this._ww = 1;
  12931. */
  12932. // encode keypoints
  12933. return keypoints.encodeKeypoints(corners, lookupTable, width, descriptorSize, extraSize, encoderLength, encoderCapacity);
  12934. }
  12935. _encodeKeypointsOLD(gpu, corners, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12936. const capacity = this._capacity;
  12937. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  12938. const width = corners.width,
  12939. height = corners.height;
  12940. const imageSize = [width, height];
  12941. const tex = this._tex.slice(this._tex.length - NUMBER_OF_INTERNAL_TEXTURES); // array of internal textures
  12942. const keypoints = gpu.programs.keypoints;
  12943. const specialTexture = tex.pop(); // gl.TEXTURE_WRAP_S is set to gl.REPEAT
  12944. // prepare programs
  12945. keypoints.encodeKeypointSkipOffsets.outputs(width, height, tex[0]);
  12946. keypoints.encodeKeypointLongSkipOffsets.outputs(width, height, tex[1], tex[0]);
  12947. keypoints.encodeKeypointPositions.outputs(encoderLength, encoderLength, tex[2], tex[3]);
  12948. keypoints.encodeKeypointProperties.outputs(encoderLength, encoderLength, encodedKeypoints);
  12949. // copy the input corners to a special texture
  12950. // that is needed by encodeKeypointSkipOffsets()
  12951. corners = gpu.programs.utils.copy.outputs(width, height, specialTexture)(corners);
  12952. // encode skip offsets
  12953. let offsets = keypoints.encodeKeypointSkipOffsets(corners, imageSize);
  12954. for (let i = 0; i < LONG_SKIP_OFFSET_PASSES; i++) {
  12955. // to boost performance
  12956. // the maximum skip offset of pass p=1,2,3... is 7 * (1+m)^p,
  12957. // where m = MAX_ITERATIONS of encodeKeypointLongSkipOffsets()
  12958. offsets = keypoints.encodeKeypointLongSkipOffsets(offsets, imageSize); // **bottleneck**
  12959. }
  12960. /*
  12961. // debug: view corners
  12962. let cornerview = offsets;
  12963. const canvas = gpu.renderToCanvas(cornerview);
  12964. if(!window._ww) document.body.appendChild(canvas);
  12965. window._ww = 1;
  12966. */
  12967. // encode keypoint positions
  12968. let encodedKps = tex[3].clear();
  12969. for (let j = 0; j < ENCODER_PASSES; j++) encodedKps = keypoints.encodeKeypointPositions(offsets, imageSize, j, ENCODER_PASSES, capacity, encodedKps, descriptorSize, extraSize, encoderLength);
  12970. // encode keypoint properties
  12971. return keypoints.encodeKeypointProperties(corners, encodedKps, descriptorSize, extraSize, encoderLength);
  12972. }
  12973. /**
  12974. * Create a tiny texture with zero encoded keypoints
  12975. * @param {SpeedyGPU} gpu
  12976. * @param {SpeedyDrawableTexture} encodedKeypoints output texture
  12977. * @param {number} [descriptorSize] in bytes
  12978. * @param {number} [extraSize] in bytes
  12979. * @returns {SpeedyDrawableTexture} encodedKeypoints
  12980. */
  12981. _encodeZeroKeypoints(gpu, encodedKeypoints, descriptorSize = 0, extraSize = 0) {
  12982. const capacity = 0;
  12983. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  12984. const keypoints = gpu.programs.keypoints;
  12985. keypoints.encodeNullKeypoints.outputs(encoderLength, encoderLength, encodedKeypoints);
  12986. return keypoints.encodeNullKeypoints();
  12987. }
  12988. /**
  12989. * Allocate RGBA16 textures
  12990. * @param {SpeedyGPU} gpu
  12991. */
  12992. _allocateTex16(gpu) {
  12993. const gl = gpu.gl;
  12994. // RGBA16UI is color renderable according to the OpenGL ES 3 spec
  12995. for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = new SpeedyDrawableTexture(gl, 1, 1, gl.RGBA_INTEGER, gl.RGBA16UI, gl.UNSIGNED_SHORT, gl.NEAREST, gl.CLAMP_TO_EDGE);
  12996. }
  12997. /**
  12998. * Deallocate RGBA16 textures
  12999. * @param {SpeedyGPU} gpu
  13000. */
  13001. _deallocateTex16(gpu) {
  13002. for (let i = 0; i < this._tex16.length; i++) this._tex16[i] = this._tex16[i].release();
  13003. }
  13004. /**
  13005. * Compute the length of the keypoint encoder, given its capacity
  13006. * @param {number} encoderCapacity how many keypoints can we fit?
  13007. * @param {number} descriptorSize in bytes
  13008. * @param {number} extraSize in bytes
  13009. */
  13010. static encoderLength(encoderCapacity, descriptorSize, extraSize) {
  13011. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  13012. const numberOfPixels = encoderCapacity * pixelsPerKeypoint;
  13013. return Math.max(globals.MIN_ENCODER_LENGTH, Math.ceil(Math.sqrt(numberOfPixels)));
  13014. }
  13015. /**
  13016. * The maximum number of keypoints we can store using
  13017. * a particular configuration of a keypoint encoder
  13018. * @param {number} descriptorSize in bytes
  13019. * @param {number} extraSize in bytes
  13020. * @param {number} encoderLength
  13021. */
  13022. static encoderCapacity(descriptorSize, extraSize, encoderLength) {
  13023. const pixelsPerKeypoint = Math.ceil((globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize) / 4);
  13024. const numberOfPixels = encoderLength * encoderLength;
  13025. return Math.floor(numberOfPixels / pixelsPerKeypoint);
  13026. }
  13027. }
  13028. /**
  13029. * Abstract scale-space keypoint detector
  13030. * @abstract
  13031. */
  13032. class SpeedyPipelineNodeMultiscaleKeypointDetector extends SpeedyPipelineNodeKeypointDetector {
  13033. /**
  13034. * Constructor
  13035. * @param {string} [name] name of the node
  13036. * @param {number} [texCount] number of work textures
  13037. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  13038. */
  13039. constructor(name = undefined, texCount = undefined, portBuilders = undefined) {
  13040. super(name, texCount, portBuilders);
  13041. /** @type {number} number of pyramid levels */
  13042. this._levels = 1;
  13043. /** @type {number} scale factor between two pyramid levels */
  13044. this._scaleFactor = DEFAULT_SCALE_FACTOR;
  13045. }
  13046. /**
  13047. * Number of pyramid levels
  13048. * @returns {number}
  13049. */
  13050. get levels() {
  13051. return this._levels;
  13052. }
  13053. /**
  13054. * Number of pyramid levels
  13055. * @param {number} levels
  13056. */
  13057. set levels(levels) {
  13058. this._levels = Math.max(1, levels | 0);
  13059. }
  13060. /**
  13061. * Scale factor between two pyramid levels
  13062. * @returns {number}
  13063. */
  13064. get scaleFactor() {
  13065. return this._scaleFactor;
  13066. }
  13067. /**
  13068. * Scale factor between two pyramid levels
  13069. * @param {number} scaleFactor should be greater than 1
  13070. */
  13071. set scaleFactor(scaleFactor) {
  13072. this._scaleFactor = Math.max(1.0, Math.min(+scaleFactor, 2.0));
  13073. }
  13074. }
  13075. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/source.js
  13076. /*
  13077. * speedy-vision.js
  13078. * GPU-accelerated Computer Vision for JavaScript
  13079. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13080. *
  13081. * Licensed under the Apache License, Version 2.0 (the "License");
  13082. * you may not use this file except in compliance with the License.
  13083. * You may obtain a copy of the License at
  13084. *
  13085. * http://www.apache.org/licenses/LICENSE-2.0
  13086. *
  13087. * Unless required by applicable law or agreed to in writing, software
  13088. * distributed under the License is distributed on an "AS IS" BASIS,
  13089. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13090. * See the License for the specific language governing permissions and
  13091. * limitations under the License.
  13092. *
  13093. * source.js
  13094. * Gets keypoints into the pipeline
  13095. */
  13096. // Constants
  13097. const UBO_MAX_BYTES = 16384; // UBOs can hold at least 16KB of data: gl.MAX_UNIFORM_BLOCK_SIZE >= 16384 according to the GL ES 3 reference
  13098. const BUFFER_SIZE = 1024; // how many keypoints we can upload in one pass of the shader (as defined in the shader program)
  13099. const SIZEOF_VEC4 = Float32Array.BYTES_PER_ELEMENT * 4; // 16 bytes
  13100. /**
  13101. * Gets keypoints into the pipeline
  13102. */
  13103. class SpeedyPipelineNodeKeypointSource extends SpeedyPipelineSourceNode {
  13104. /**
  13105. * Constructor
  13106. * @param {string} [name] name of the node
  13107. */
  13108. constructor(name = undefined) {
  13109. super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13110. /** @type {SpeedyKeypoint[]} keypoints to be uploaded to the GPU */
  13111. this._keypoints = [];
  13112. /** @type {Float32Array} upload buffer (UBO) */
  13113. this._buffer = SpeedyPipelineNodeKeypointSource._createUploadBuffer(BUFFER_SIZE);
  13114. /** @type {number} maximum number of keypoints */
  13115. this._capacity = globals.DEFAULT_ENCODER_CAPACITY;
  13116. }
  13117. /**
  13118. * Keypoints to be uploaded
  13119. * @returns {SpeedyKeypoint[]}
  13120. */
  13121. get keypoints() {
  13122. return this._keypoints;
  13123. }
  13124. /**
  13125. * Keypoints to be uploaded
  13126. * @param {SpeedyKeypoint[]} keypoints
  13127. */
  13128. set keypoints(keypoints) {
  13129. if (!Array.isArray(keypoints)) throw new utils_errors/* IllegalArgumentError */.qw(`Not an array of keypoints`);
  13130. this._keypoints = keypoints;
  13131. }
  13132. /**
  13133. * The maximum number of keypoints we'll accept.
  13134. * This should be a tight bound for better performance.
  13135. * @returns {number}
  13136. */
  13137. get capacity() {
  13138. return this._capacity;
  13139. }
  13140. /**
  13141. * The maximum number of keypoints we'll accept.
  13142. * This should be a tight bound for better performance.
  13143. * @param {number} capacity
  13144. */
  13145. set capacity(capacity) {
  13146. this._capacity = Math.min(Math.max(0, capacity | 0), globals.MAX_ENCODER_CAPACITY);
  13147. }
  13148. /**
  13149. * Run the specific task of this node
  13150. * @param {SpeedyGPU} gpu
  13151. * @returns {void|SpeedyPromise<void>}
  13152. */
  13153. _run(gpu) {
  13154. // Orientation, descriptors and extra bytes will be lost
  13155. const descriptorSize = 0,
  13156. extraSize = 0;
  13157. const keypoints = this._keypoints;
  13158. const maxKeypoints = this._capacity;
  13159. const numKeypoints = Math.min(keypoints.length, maxKeypoints);
  13160. const numPasses = Math.max(1, Math.ceil(numKeypoints / BUFFER_SIZE));
  13161. const buffer = this._buffer;
  13162. const uploadKeypoints = gpu.programs.keypoints.uploadKeypoints;
  13163. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize); // we're using maxKeypoints to avoid constant texture resize (slow on Firefox)
  13164. uploadKeypoints.outputs(encoderLength, encoderLength, this._tex[0], this._tex[1]);
  13165. let startIndex = 0,
  13166. encodedKeypoints = uploadKeypoints.clear();
  13167. for (let i = 0; i < numPasses; i++) {
  13168. const n = Math.min(BUFFER_SIZE, numKeypoints - startIndex);
  13169. const endIndex = startIndex + n;
  13170. uploadKeypoints.setUBO('KeypointBuffer', SpeedyPipelineNodeKeypointSource._fillUploadBuffer(buffer, keypoints, startIndex, endIndex));
  13171. encodedKeypoints = uploadKeypoints(encodedKeypoints, startIndex, endIndex, descriptorSize, extraSize, encoderLength);
  13172. startIndex = endIndex;
  13173. }
  13174. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13175. }
  13176. /**
  13177. * Create an upload buffer
  13178. * @param {number} bufferSize number of keypoints
  13179. * @returns {Float32Array}
  13180. */
  13181. static _createUploadBuffer(bufferSize) {
  13182. const internalBuffer = new ArrayBuffer(SIZEOF_VEC4 * bufferSize);
  13183. utils/* Utils */.A.assert(internalBuffer.byteLength <= UBO_MAX_BYTES);
  13184. return new Float32Array(internalBuffer);
  13185. }
  13186. /**
  13187. * Fill upload buffer with keypoint data
  13188. * @param {Float32Array} buffer
  13189. * @param {SpeedyKeypoint[]} keypoints
  13190. * @param {number} start index, inclusive
  13191. * @param {number} end index, exclusive
  13192. * @returns {Float32Array} buffer
  13193. */
  13194. static _fillUploadBuffer(buffer, keypoints, start, end) {
  13195. const n = end - start;
  13196. for (let i = 0; i < n; i++) {
  13197. const keypoint = keypoints[start + i];
  13198. const hasPos = keypoint.position !== undefined;
  13199. const j = i * 4;
  13200. // Format data as follows:
  13201. // vec4(xpos, ypos, lod, score)
  13202. buffer[j] = +(hasPos ? keypoint.position.x : keypoint.x) || 0;
  13203. buffer[j + 1] = +(hasPos ? keypoint.position.y : keypoint.y) || 0;
  13204. buffer[j + 2] = +keypoint.lod || 0;
  13205. buffer[j + 3] = +keypoint.score || 0;
  13206. }
  13207. // done!
  13208. return buffer;
  13209. }
  13210. }
  13211. ;// CONCATENATED MODULE: ./src/core/speedy-keypoint-descriptor.js
  13212. /*
  13213. * speedy-vision.js
  13214. * GPU-accelerated Computer Vision for JavaScript
  13215. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13216. *
  13217. * Licensed under the Apache License, Version 2.0 (the "License");
  13218. * you may not use this file except in compliance with the License.
  13219. * You may obtain a copy of the License at
  13220. *
  13221. * http://www.apache.org/licenses/LICENSE-2.0
  13222. *
  13223. * Unless required by applicable law or agreed to in writing, software
  13224. * distributed under the License is distributed on an "AS IS" BASIS,
  13225. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13226. * See the License for the specific language governing permissions and
  13227. * limitations under the License.
  13228. *
  13229. * speedy-keypoint-descriptor.js
  13230. * Keypoint descriptor
  13231. */
  13232. /**
  13233. * Represents a keypoint descriptor
  13234. */
  13235. class SpeedyKeypointDescriptor {
  13236. /**
  13237. * Constructor
  13238. * @param {Uint8Array} data descriptor bytes
  13239. */
  13240. constructor(data) {
  13241. this._data = data;
  13242. return Object.freeze(this);
  13243. }
  13244. /**
  13245. * Descriptor data
  13246. * @returns {Uint8Array}
  13247. */
  13248. get data() {
  13249. return this._data;
  13250. }
  13251. /**
  13252. * The size of the descriptor, in bytes
  13253. * @returns {number}
  13254. */
  13255. get size() {
  13256. return this._data.byteLength;
  13257. }
  13258. /**
  13259. * A string representation of the keypoint descriptor
  13260. * @returns {string}
  13261. */
  13262. toString() {
  13263. return `SpeedyKeypointDescriptor(${this._data.join(',')})`;
  13264. }
  13265. }
  13266. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/sink.js
  13267. /*
  13268. * speedy-vision.js
  13269. * GPU-accelerated Computer Vision for JavaScript
  13270. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13271. *
  13272. * Licensed under the Apache License, Version 2.0 (the "License");
  13273. * you may not use this file except in compliance with the License.
  13274. * You may obtain a copy of the License at
  13275. *
  13276. * http://www.apache.org/licenses/LICENSE-2.0
  13277. *
  13278. * Unless required by applicable law or agreed to in writing, software
  13279. * distributed under the License is distributed on an "AS IS" BASIS,
  13280. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13281. * See the License for the specific language governing permissions and
  13282. * limitations under the License.
  13283. *
  13284. * sink.js
  13285. * Gets keypoints out of the pipeline
  13286. */
  13287. /** next power of 2 */
  13288. const sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  13289. /** empty array of bytes */
  13290. const ZERO_BYTES = new Uint8Array([]);
  13291. /**
  13292. * Gets keypoints out of the pipeline
  13293. * @template {SpeedyKeypoint} T
  13294. * @abstract
  13295. */
  13296. class SpeedyPipelineNodeAbstractKeypointSink extends SpeedyPipelineSinkNode {
  13297. /**
  13298. * Constructor
  13299. * @param {string} [name] name of the node
  13300. * @param {number} [texCount]
  13301. * @param {SpeedyPipelinePortBuilder[]} [portBuilders]
  13302. */
  13303. constructor(name = 'keypoints', texCount = 0, portBuilders = []) {
  13304. super(name, texCount + 2, portBuilders);
  13305. /** @type {Array<T|null>} keypoints (output) */
  13306. this._keypoints = [];
  13307. /** @type {SpeedyTextureReader} texture reader */
  13308. this._textureReader = new SpeedyTextureReader();
  13309. /** @type {number} page flipping index */
  13310. this._page = 0;
  13311. /** @type {boolean} accelerate GPU-CPU transfers */
  13312. this._turbo = false;
  13313. /** @type {boolean} should discarded keypoints be exported as null or dropped altogether? */
  13314. this._includeDiscarded = false;
  13315. }
  13316. /**
  13317. * Accelerate GPU-CPU transfers
  13318. * @returns {boolean}
  13319. */
  13320. get turbo() {
  13321. return this._turbo;
  13322. }
  13323. /**
  13324. * Accelerate GPU-CPU transfers
  13325. * @param {boolean} value
  13326. */
  13327. set turbo(value) {
  13328. this._turbo = Boolean(value);
  13329. }
  13330. /**
  13331. * Should discarded keypoints be exported as null or dropped altogether?
  13332. * @returns {boolean}
  13333. */
  13334. get includeDiscarded() {
  13335. return this._includeDiscarded;
  13336. }
  13337. /**
  13338. * Should discarded keypoints be exported as null or dropped altogether?
  13339. * @param {boolean} value
  13340. */
  13341. set includeDiscarded(value) {
  13342. this._includeDiscarded = Boolean(value);
  13343. }
  13344. /**
  13345. * Initializes this node
  13346. * @param {SpeedyGPU} gpu
  13347. */
  13348. init(gpu) {
  13349. super.init(gpu);
  13350. this._textureReader.init(gpu);
  13351. }
  13352. /**
  13353. * Releases this node
  13354. * @param {SpeedyGPU} gpu
  13355. */
  13356. release(gpu) {
  13357. this._textureReader.release(gpu);
  13358. super.release(gpu);
  13359. }
  13360. /**
  13361. * Export data from this node to the user
  13362. * @returns {SpeedyPromise<Array<T|null>>}
  13363. */
  13364. export() {
  13365. return speedy_promise/* SpeedyPromise */.i.resolve(this._keypoints);
  13366. }
  13367. /**
  13368. * Run the specific task of this node
  13369. * @param {SpeedyGPU} gpu
  13370. * @returns {void|SpeedyPromise<void>}
  13371. */
  13372. _run(gpu) {
  13373. const {
  13374. encodedKeypoints,
  13375. descriptorSize,
  13376. extraSize,
  13377. encoderLength
  13378. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13379. return this._download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13380. }
  13381. /**
  13382. * Download and decode keypoints from the GPU
  13383. * @param {SpeedyGPU} gpu
  13384. * @param {SpeedyDrawableTexture} encodedKeypoints
  13385. * @param {number} descriptorSize
  13386. * @param {number} extraSize
  13387. * @param {number} encoderLength
  13388. * @returns {SpeedyPromise<void>}
  13389. */
  13390. _download(gpu, encodedKeypoints, descriptorSize, extraSize, encoderLength) {
  13391. const useBufferedDownloads = this._turbo;
  13392. /*
  13393. I have found experimentally that, in Firefox, readPixelsAsync()
  13394. performs MUCH better if the width of the target texture is a power
  13395. of two. I have no idea why this is the case, nor if it's related to
  13396. some interaction with the GL drivers, somehow. This seems to make no
  13397. difference on Chrome, however. In any case, let's convert the input
  13398. texture to POT.
  13399. */
  13400. const encoderWidth = sink_nextPot(encoderLength);
  13401. //const encoderHeight = nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  13402. const encoderHeight = Math.ceil(encoderLength * encoderLength / encoderWidth);
  13403. //const encoderWidth=encoderLength,encoderHeight=encoderLength;
  13404. // copy the set of keypoints to an internal texture
  13405. const copiedTexture = this._tex[this._tex.length - 1 - this._page];
  13406. gpu.programs.utils.copyKeypoints.outputs(encoderWidth, encoderHeight, copiedTexture)(encodedKeypoints);
  13407. // flip page
  13408. this._page = 1 - this._page;
  13409. // download the internal texture
  13410. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  13411. // decode the keypoints and store them in this._keypoints
  13412. this._keypoints = this._decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight);
  13413. });
  13414. }
  13415. /**
  13416. * Decode a sequence of keypoints, given a flattened image of encoded pixels
  13417. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  13418. * @param {number} descriptorSize in bytes
  13419. * @param {number} extraSize in bytes
  13420. * @param {number} encoderWidth
  13421. * @param {number} encoderHeight
  13422. * @returns {Array<T|null>} keypoints
  13423. */
  13424. _decode(pixels, descriptorSize, extraSize, encoderWidth, encoderHeight) {
  13425. const bytesPerKeypoint = globals.MIN_KEYPOINT_SIZE + descriptorSize + extraSize;
  13426. const m = globals.LOG2_PYRAMID_MAX_SCALE,
  13427. h = globals.PYRAMID_MAX_LEVELS;
  13428. const piOver255 = Math.PI / 255.0;
  13429. const keypoints = /** @type {Array<T|null>} */[];
  13430. const includeDiscarded = this._includeDiscarded;
  13431. let descriptorBytes = ZERO_BYTES,
  13432. extraBytes = ZERO_BYTES;
  13433. let x, y, z, w, lod, rotation, score;
  13434. let keypoint;
  13435. // validate
  13436. if (descriptorSize % 4 != 0 || extraSize % 4 != 0) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid descriptorSize (${descriptorSize}) / extraSize (${extraSize})`);
  13437. // how many bytes should we read?
  13438. const e2 = encoderWidth * encoderHeight * 4;
  13439. const size = pixels.byteLength;
  13440. if (size != e2) utils/* Utils */.A.warning(`Expected ${e2} bytes when decoding a set of keypoints, found ${size}`);
  13441. // copy the data (we use shared buffers when receiving pixels[])
  13442. if (descriptorSize + extraSize > 0) pixels = new Uint8Array(pixels);
  13443. // for each encoded keypoint
  13444. for (let i = 0; i < size; i += bytesPerKeypoint) {
  13445. // extract encoded header
  13446. x = pixels[i + 1] << 8 | pixels[i];
  13447. y = pixels[i + 3] << 8 | pixels[i + 2];
  13448. z = pixels[i + 5] << 8 | pixels[i + 4];
  13449. w = pixels[i + 7] << 8 | pixels[i + 6];
  13450. // the keypoint is "null": we have reached the end of the list
  13451. if (x == 0xFFFF && y == 0xFFFF) break;
  13452. // the header is zero: discard the keypoint
  13453. if (x + y + z + w == 0) {
  13454. if (includeDiscarded) keypoints.push(null);
  13455. continue;
  13456. }
  13457. // extract extra & descriptor bytes
  13458. if (extraSize > 0) {
  13459. extraBytes = pixels.subarray(8 + i, 8 + i + extraSize);
  13460. if (extraBytes.byteLength < extraSize) {
  13461. utils/* Utils */.A.warning(`KeypointSink: expected ${extraSize} extra bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${extraBytes.byteLength} instead`);
  13462. continue; // something is off here; discard
  13463. }
  13464. }
  13465. if (descriptorSize > 0) {
  13466. descriptorBytes = pixels.subarray(8 + i + extraSize, 8 + i + extraSize + descriptorSize);
  13467. if (descriptorBytes.byteLength < descriptorSize) {
  13468. utils/* Utils */.A.warning(`KeypointSink: expected ${descriptorSize} descriptor bytes when decoding the ${i / bytesPerKeypoint}-th keypoint, found ${descriptorBytes.byteLength} instead`);
  13469. continue; // something is off here; discard
  13470. }
  13471. }
  13472. // decode position: convert from fixed-point
  13473. x /= globals.FIX_RESOLUTION;
  13474. y /= globals.FIX_RESOLUTION;
  13475. // decode level-of-detail
  13476. lod = pixels[i + 4] < 255 ? -m + (m + h) * pixels[i + 4] / 255.0 : 0.0;
  13477. // decode orientation
  13478. rotation = (2 * pixels[i + 5] - 255) * piOver255;
  13479. // decode score
  13480. score = utils/* Utils */.A.decodeFloat16(w);
  13481. // create keypoint
  13482. keypoint = this._createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes);
  13483. // register keypoint
  13484. keypoints.push(keypoint);
  13485. }
  13486. // done!
  13487. return keypoints;
  13488. }
  13489. /**
  13490. * Instantiate a new keypoint
  13491. * @param {number} x
  13492. * @param {number} y
  13493. * @param {number} lod
  13494. * @param {number} rotation
  13495. * @param {number} score
  13496. * @param {Uint8Array} descriptorBytes
  13497. * @param {Uint8Array} extraBytes
  13498. * @returns {T}
  13499. */
  13500. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13501. throw new utils_errors/* AbstractMethodError */.aQ();
  13502. }
  13503. /**
  13504. * Allocate extra space
  13505. * @param {SpeedyGPU} gpu
  13506. * @param {SpeedyDrawableTexture} output output texture
  13507. * @param {SpeedyTexture} inputEncodedKeypoints input with no extra space
  13508. * @param {number} inputDescriptorSize in bytes, must be positive
  13509. * @param {number} inputExtraSize must be 0
  13510. * @param {number} outputDescriptorSize must be inputDescriptorSize
  13511. * @param {number} outputExtraSize in bytes, must be positive and a multiple of 4
  13512. * @returns {SpeedyDrawableTexture} encodedKeypoints with extra space
  13513. */
  13514. _allocateExtra(gpu, output, inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize) {
  13515. utils/* Utils */.A.assert(inputExtraSize === 0);
  13516. utils/* Utils */.A.assert(outputDescriptorSize === inputDescriptorSize && outputExtraSize > 0 && outputExtraSize % 4 === 0);
  13517. const inputEncoderLength = inputEncodedKeypoints.width;
  13518. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  13519. const outputEncoderCapacity = inputEncoderCapacity;
  13520. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  13521. return gpu.programs.keypoints.allocateExtra.outputs(outputEncoderLength, outputEncoderLength, output)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  13522. }
  13523. }
  13524. /**
  13525. * Gets standard keypoints out of the pipeline
  13526. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyKeypoint>}
  13527. */
  13528. class SpeedyPipelineNodeKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13529. /**
  13530. * Constructor
  13531. * @param {string} [name] name of the node
  13532. */
  13533. constructor(name = 'keypoints') {
  13534. super(name, 0, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13535. }
  13536. /**
  13537. * Instantiate a new keypoint
  13538. * @param {number} x
  13539. * @param {number} y
  13540. * @param {number} lod
  13541. * @param {number} rotation
  13542. * @param {number} score
  13543. * @param {Uint8Array} descriptorBytes
  13544. * @param {Uint8Array} extraBytes
  13545. * @returns {SpeedyKeypoint}
  13546. */
  13547. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13548. const descriptorSize = descriptorBytes.byteLength;
  13549. // read descriptor, if any
  13550. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13551. // create keypoint
  13552. return new SpeedyKeypoint(x, y, lod, rotation, score, descriptor);
  13553. }
  13554. }
  13555. /**
  13556. * Gets tracked keypoints out of the pipeline
  13557. * @extends {SpeedyPipelineNodeAbstractKeypointSink<SpeedyTrackedKeypoint>}
  13558. */
  13559. class SpeedyPipelineNodeTrackedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13560. /**
  13561. * Constructor
  13562. * @param {string} [name] name of the node
  13563. */
  13564. constructor(name = 'keypoints') {
  13565. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
  13566. }
  13567. /**
  13568. * Run the specific task of this node
  13569. * @param {SpeedyGPU} gpu
  13570. * @returns {void|SpeedyPromise<void>}
  13571. */
  13572. _run(gpu) {
  13573. const {
  13574. encodedKeypoints,
  13575. descriptorSize,
  13576. extraSize,
  13577. encoderLength
  13578. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13579. const {
  13580. vectors
  13581. } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input('flow').read();
  13582. // allocate extra space
  13583. const newDescriptorSize = descriptorSize;
  13584. const newExtraSize = 4; // 1 pixel per flow vector per keypoint
  13585. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  13586. // attach flow vectors
  13587. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  13588. const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(vectors, vectors.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  13589. // done!
  13590. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  13591. }
  13592. /**
  13593. * Instantiate a new keypoint
  13594. * @param {number} x
  13595. * @param {number} y
  13596. * @param {number} lod
  13597. * @param {number} rotation
  13598. * @param {number} score
  13599. * @param {Uint8Array} descriptorBytes
  13600. * @param {Uint8Array} extraBytes
  13601. * @returns {SpeedyTrackedKeypoint}
  13602. */
  13603. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13604. const descriptorSize = descriptorBytes.byteLength;
  13605. const extraSize = extraBytes.byteLength;
  13606. // read descriptor, if any
  13607. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13608. // read flow vector
  13609. const fx = utils/* Utils */.A.decodeFloat16(extraBytes[1] << 8 | extraBytes[0]);
  13610. const fy = utils/* Utils */.A.decodeFloat16(extraBytes[3] << 8 | extraBytes[2]);
  13611. const flow = new SpeedyVector2(fx, fy);
  13612. // create keypoint
  13613. return new SpeedyTrackedKeypoint(x, y, lod, rotation, score, descriptor, flow);
  13614. }
  13615. }
  13616. /**
  13617. * Gets matched keypoints out of the pipeline
  13618. * @extends SpeedyPipelineNodeAbstractKeypointSink<SpeedyMatchedKeypoint>
  13619. */
  13620. class SpeedyPipelineNodeMatchedKeypointSink extends SpeedyPipelineNodeAbstractKeypointSink {
  13621. /**
  13622. * Constructor
  13623. * @param {string} [name] name of the node
  13624. */
  13625. constructor(name = 'keypoints') {
  13626. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.extraSize == 0), InputPort('matches').expects(SpeedyPipelineMessageType.KeypointMatches)]);
  13627. }
  13628. /**
  13629. * Run the specific task of this node
  13630. * @param {SpeedyGPU} gpu
  13631. * @returns {void|SpeedyPromise<void>}
  13632. */
  13633. _run(gpu) {
  13634. const {
  13635. encodedKeypoints,
  13636. descriptorSize,
  13637. extraSize,
  13638. encoderLength
  13639. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13640. const {
  13641. encodedMatches,
  13642. matchesPerKeypoint
  13643. } = /** @type {SpeedyPipelineMessageWithKeypointMatches} */this.input('matches').read();
  13644. // allocate space for the matches
  13645. const newDescriptorSize = descriptorSize;
  13646. const newExtraSize = matchesPerKeypoint * 4; // 4 bytes per pixel
  13647. const encodedKeypointsWithExtraSpace = this._allocateExtra(gpu, this._tex[0], encodedKeypoints, descriptorSize, extraSize, newDescriptorSize, newExtraSize);
  13648. // transfer matches to a new texture
  13649. const newEncoderLength = encodedKeypointsWithExtraSpace.width;
  13650. const newEncodedKeypoints = gpu.programs.keypoints.transferToExtra.outputs(newEncoderLength, newEncoderLength, this._tex[1])(encodedMatches, encodedMatches.width, encodedKeypointsWithExtraSpace, newDescriptorSize, newExtraSize, newEncoderLength);
  13651. // done!
  13652. return this._download(gpu, newEncodedKeypoints, newDescriptorSize, newExtraSize, newEncoderLength);
  13653. }
  13654. /**
  13655. * Instantiate a new keypoint
  13656. * @param {number} x
  13657. * @param {number} y
  13658. * @param {number} lod
  13659. * @param {number} rotation
  13660. * @param {number} score
  13661. * @param {Uint8Array} descriptorBytes
  13662. * @param {Uint8Array} extraBytes
  13663. * @returns {SpeedyMatchedKeypoint}
  13664. */
  13665. _createKeypoint(x, y, lod, rotation, score, descriptorBytes, extraBytes) {
  13666. const descriptorSize = descriptorBytes.byteLength;
  13667. const extraSize = extraBytes.byteLength;
  13668. // read descriptor, if any
  13669. const descriptor = descriptorSize > 0 ? new SpeedyKeypointDescriptor(descriptorBytes) : null;
  13670. // decode matches
  13671. const matchesPerKeypoint = extraSize / 4;
  13672. const matches = /** @type {SpeedyKeypointMatch[]} */new Array(matchesPerKeypoint);
  13673. for (let matchIndex = 0; matchIndex < matchesPerKeypoint; matchIndex++) {
  13674. const base = matchIndex * 4;
  13675. const u32 = extraBytes[base] | extraBytes[base + 1] << 8 | extraBytes[base + 2] << 16 | extraBytes[base + 3] << 24;
  13676. const match = new SpeedyKeypointMatch(u32 & globals.MATCH_INDEX_MASK, u32 >>> globals.MATCH_INDEX_BITS);
  13677. matches[matchIndex] = match;
  13678. }
  13679. // done!
  13680. return new SpeedyMatchedKeypoint(x, y, lod, rotation, score, descriptor, matches);
  13681. }
  13682. }
  13683. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/clipper.js
  13684. /*
  13685. * speedy-vision.js
  13686. * GPU-accelerated Computer Vision for JavaScript
  13687. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13688. *
  13689. * Licensed under the Apache License, Version 2.0 (the "License");
  13690. * you may not use this file except in compliance with the License.
  13691. * You may obtain a copy of the License at
  13692. *
  13693. * http://www.apache.org/licenses/LICENSE-2.0
  13694. *
  13695. * Unless required by applicable law or agreed to in writing, software
  13696. * distributed under the License is distributed on an "AS IS" BASIS,
  13697. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13698. * See the License for the specific language governing permissions and
  13699. * limitations under the License.
  13700. *
  13701. * clipper.js
  13702. * Keypoint clipper
  13703. */
  13704. // Constants
  13705. const LOG2_STRIDE = 5;
  13706. const MAX_SIZE = globals.MAX_ENCODER_CAPACITY;
  13707. /**
  13708. * Keypoint clipper: filters the best keypoints from a stream
  13709. */
  13710. class SpeedyPipelineNodeKeypointClipper extends SpeedyPipelineNode {
  13711. /**
  13712. * Constructor
  13713. * @param {string} [name] name of the node
  13714. */
  13715. constructor(name = undefined) {
  13716. super(name, 4, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13717. /** @type {number} the maximum number of keypoints in the output */
  13718. this._size = MAX_SIZE;
  13719. }
  13720. /**
  13721. * The maximum number of keypoints in the output
  13722. * @returns {number}
  13723. */
  13724. get size() {
  13725. return this._size;
  13726. }
  13727. /**
  13728. * The maximum number of keypoints in the output
  13729. * @param {number} size
  13730. */
  13731. set size(size) {
  13732. this._size = Math.max(0, Math.min(size | 0, MAX_SIZE));
  13733. }
  13734. /**
  13735. * Run the specific task of this node
  13736. * @param {SpeedyGPU} gpu
  13737. * @returns {void|SpeedyPromise<void>}
  13738. */
  13739. _run(gpu) {
  13740. const {
  13741. encodedKeypoints,
  13742. descriptorSize,
  13743. extraSize,
  13744. encoderLength
  13745. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13746. const keypoints = gpu.programs.keypoints;
  13747. const clipValue = this._size;
  13748. const tex = this._tex;
  13749. const outputTexture = this._tex[3];
  13750. // find the minimum power of 2 pot such that pot >= capacity
  13751. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  13752. //const pot = 1 << (Math.ceil(Math.log2(capacity)) | 0);
  13753. // find the dimensions of the sorting shaders
  13754. const stride = 1 << LOG2_STRIDE; // must be a power of 2
  13755. //const height = Math.max(1, pot >>> LOG2_STRIDE); // this is also a power of 2
  13756. const height = Math.ceil(capacity / stride); // more economical, maybe not a power of 2
  13757. const numberOfPixels = stride * height;
  13758. // find the dimensions of the output texture
  13759. const newCapacity = Math.min(capacity, clipValue);
  13760. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(newCapacity, descriptorSize, extraSize);
  13761. // generate permutation of keypoints
  13762. keypoints.sortCreatePermutation.outputs(stride, height, tex[0]);
  13763. let permutation = keypoints.sortCreatePermutation(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13764. // sort permutation
  13765. const numPasses = Math.ceil(Math.log2(numberOfPixels));
  13766. keypoints.sortMergePermutation.outputs(stride, height, tex[1], tex[2]);
  13767. for (let i = 1; i <= numPasses; i++) {
  13768. const blockSize = 1 << i; // 2, 4, 8...
  13769. const dblLog2BlockSize = i << 1; // 2 * log2(blockSize)
  13770. permutation = keypoints.sortMergePermutation(permutation, blockSize, dblLog2BlockSize);
  13771. }
  13772. // apply permutation
  13773. keypoints.sortApplyPermutation.outputs(newEncoderLength, newEncoderLength, outputTexture);
  13774. keypoints.sortApplyPermutation(permutation, newCapacity, encodedKeypoints, descriptorSize, extraSize);
  13775. /*
  13776. // debug (read the contents of the permutation)
  13777. const pixels = permutation.inspect(gpu), debug = [];
  13778. for(let i = 0; i < pixels.length; i += 4) {
  13779. let id = pixels[i] | (pixels[i+1] << 8);
  13780. let score = pixels[i+2] / 255.0;
  13781. let valid = pixels[i+3] / 255.0;
  13782. debug.push([ id, valid, score, ].join(', '));
  13783. }
  13784. console.log(debug);
  13785. */
  13786. // done!
  13787. this.output().swrite(outputTexture, descriptorSize, extraSize, newEncoderLength);
  13788. }
  13789. }
  13790. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/border-clipper.js
  13791. /*
  13792. * speedy-vision.js
  13793. * GPU-accelerated Computer Vision for JavaScript
  13794. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13795. *
  13796. * Licensed under the Apache License, Version 2.0 (the "License");
  13797. * you may not use this file except in compliance with the License.
  13798. * You may obtain a copy of the License at
  13799. *
  13800. * http://www.apache.org/licenses/LICENSE-2.0
  13801. *
  13802. * Unless required by applicable law or agreed to in writing, software
  13803. * distributed under the License is distributed on an "AS IS" BASIS,
  13804. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13805. * See the License for the specific language governing permissions and
  13806. * limitations under the License.
  13807. *
  13808. * border-clipper.js
  13809. * Keypoint Border Clipper
  13810. */
  13811. /**
  13812. * The Border Clipper removes all keypoints within a border of the edges of an image
  13813. */
  13814. class SpeedyPipelineNodeKeypointBorderClipper extends SpeedyPipelineNode {
  13815. /**
  13816. * Constructor
  13817. * @param {string} [name] name of the node
  13818. */
  13819. constructor(name = undefined) {
  13820. super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13821. /** @type {SpeedySize} image size, in pixels */
  13822. this._imageSize = new SpeedySize(0, 0);
  13823. /** @type {SpeedyVector2} border size, in pixels */
  13824. this._borderSize = new SpeedyVector2(0, 0);
  13825. }
  13826. /**
  13827. * Image size, in pixels
  13828. * @returns {SpeedySize}
  13829. */
  13830. get imageSize() {
  13831. return this._imageSize;
  13832. }
  13833. /**
  13834. * Image size, in pixels
  13835. * @param {SpeedySize} imageSize
  13836. */
  13837. set imageSize(imageSize) {
  13838. this._imageSize = imageSize;
  13839. }
  13840. /**
  13841. * Border size, in pixels
  13842. * @returns {SpeedyVector2}
  13843. */
  13844. get borderSize() {
  13845. return this._borderSize;
  13846. }
  13847. /**
  13848. * Border size, in pixels
  13849. * @param {SpeedyVector2} borderSize
  13850. */
  13851. set borderSize(borderSize) {
  13852. this._borderSize = borderSize;
  13853. }
  13854. /**
  13855. * Run the specific task of this node
  13856. * @param {SpeedyGPU} gpu
  13857. * @returns {void|SpeedyPromise<void>}
  13858. */
  13859. _run(gpu) {
  13860. const {
  13861. encodedKeypoints,
  13862. descriptorSize,
  13863. extraSize,
  13864. encoderLength
  13865. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13866. const keypoints = gpu.programs.keypoints;
  13867. const imageSize = this._imageSize;
  13868. const borderSize = this._borderSize;
  13869. const imageWidth = imageSize.width,
  13870. imageHeight = imageSize.height;
  13871. const borderLeft = borderSize.x,
  13872. borderRight = borderSize.x;
  13873. const borderTop = borderSize.y,
  13874. borderBottom = borderSize.y;
  13875. const tex = this._tex;
  13876. // validate
  13877. if (imageWidth == 0 || imageHeight == 0) throw new utils_errors/* IllegalOperationError */.Er(`BorderClipper: did you forget to set the image size?`);
  13878. // find the capacity of the keypoint stream
  13879. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  13880. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  13881. // prepare programs
  13882. keypoints.clipBorder.outputs(encoderLength, encoderLength, tex[0]);
  13883. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  13884. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  13885. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  13886. // clip keypoints
  13887. let clippedKeypoints = keypoints.clipBorder(imageWidth, imageHeight, borderTop, borderRight, borderBottom, borderLeft, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  13888. // sort keypoints
  13889. let sortedKeypoints = keypoints.mixKeypointsInit(clippedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  13890. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  13891. clippedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, clippedKeypoints, descriptorSize, extraSize, encoderLength);
  13892. /*
  13893. // debug: view keypoints
  13894. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  13895. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  13896. */
  13897. // done!
  13898. this.output().swrite(clippedKeypoints, descriptorSize, extraSize, encoderLength);
  13899. }
  13900. }
  13901. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/buffer.js
  13902. /*
  13903. * speedy-vision.js
  13904. * GPU-accelerated Computer Vision for JavaScript
  13905. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  13906. *
  13907. * Licensed under the Apache License, Version 2.0 (the "License");
  13908. * you may not use this file except in compliance with the License.
  13909. * You may obtain a copy of the License at
  13910. *
  13911. * http://www.apache.org/licenses/LICENSE-2.0
  13912. *
  13913. * Unless required by applicable law or agreed to in writing, software
  13914. * distributed under the License is distributed on an "AS IS" BASIS,
  13915. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13916. * See the License for the specific language governing permissions and
  13917. * limitations under the License.
  13918. *
  13919. * buffer.js
  13920. * Keypoint Buffer
  13921. */
  13922. /**
  13923. * Keypoint Buffer: a node with memory.
  13924. * At time t, it outputs the keypoints received at time t-1
  13925. */
  13926. class SpeedyPipelineNodeKeypointBuffer extends SpeedyPipelineNode {
  13927. /**
  13928. * Constructor
  13929. * @param {string} [name] name of the node
  13930. */
  13931. constructor(name = undefined) {
  13932. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  13933. /** @type {number} current page: 0 or 1 */
  13934. this._pageIndex = 0;
  13935. /** @type {boolean} first run? */
  13936. this._initialized = false;
  13937. /** @type {number} previous descriptor size, in bytes */
  13938. this._previousDescriptorSize = 0;
  13939. /** @type {number} previous extra size, in bytes */
  13940. this._previousExtraSize = 0;
  13941. /** @type {number} previous encoder length */
  13942. this._previousEncoderLength = 0;
  13943. /** @type {boolean} frozen buffer? */
  13944. this._frozen = false;
  13945. }
  13946. /**
  13947. * A frozen buffer discards the input, effectively increasing the buffering time
  13948. * @returns {boolean}
  13949. */
  13950. get frozen() {
  13951. return this._frozen;
  13952. }
  13953. /**
  13954. * A frozen buffer discards the input, effectively increasing the buffering time
  13955. * @param {boolean} value
  13956. */
  13957. set frozen(value) {
  13958. this._frozen = Boolean(value);
  13959. }
  13960. /**
  13961. * Releases this node
  13962. * @param {SpeedyGPU} gpu
  13963. */
  13964. release(gpu) {
  13965. this._initialized = false;
  13966. super.release(gpu);
  13967. }
  13968. /**
  13969. * Run the specific task of this node
  13970. * @param {SpeedyGPU} gpu
  13971. * @returns {void|SpeedyPromise<void>}
  13972. */
  13973. _run(gpu) {
  13974. const {
  13975. encodedKeypoints,
  13976. descriptorSize,
  13977. extraSize,
  13978. encoderLength
  13979. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  13980. const previousDescriptorSize = this._previousDescriptorSize;
  13981. const previousExtraSize = this._previousExtraSize;
  13982. const previousEncoderLength = this._previousEncoderLength;
  13983. const page = this._tex;
  13984. const previousInputTexture = page[1 - this._pageIndex];
  13985. const outputTexture = page[this._pageIndex];
  13986. // bufferize
  13987. if (!this._frozen || !this._initialized) {
  13988. // store input
  13989. this._previousDescriptorSize = descriptorSize;
  13990. this._previousExtraSize = extraSize;
  13991. this._previousEncoderLength = encoderLength;
  13992. previousInputTexture.resize(encoderLength, encoderLength);
  13993. encodedKeypoints.copyTo(previousInputTexture);
  13994. // page flipping
  13995. this._pageIndex = 1 - this._pageIndex;
  13996. }
  13997. // first run?
  13998. if (!this._initialized) {
  13999. this._initialized = true;
  14000. this.output().swrite(previousInputTexture, descriptorSize, extraSize, encoderLength);
  14001. return;
  14002. }
  14003. // done!
  14004. this.output().swrite(outputTexture, previousDescriptorSize, previousExtraSize, previousEncoderLength);
  14005. }
  14006. }
  14007. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/mixer.js
  14008. /*
  14009. * speedy-vision.js
  14010. * GPU-accelerated Computer Vision for JavaScript
  14011. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14012. *
  14013. * Licensed under the Apache License, Version 2.0 (the "License");
  14014. * you may not use this file except in compliance with the License.
  14015. * You may obtain a copy of the License at
  14016. *
  14017. * http://www.apache.org/licenses/LICENSE-2.0
  14018. *
  14019. * Unless required by applicable law or agreed to in writing, software
  14020. * distributed under the License is distributed on an "AS IS" BASIS,
  14021. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14022. * See the License for the specific language governing permissions and
  14023. * limitations under the License.
  14024. *
  14025. * mixer.js
  14026. * Keypoint Mixer
  14027. */
  14028. /**
  14029. * Keypoint Mixer: merges two sets of keypoints
  14030. */
  14031. class SpeedyPipelineNodeKeypointMixer extends SpeedyPipelineNode {
  14032. /**
  14033. * Constructor
  14034. * @param {string} [name] name of the node
  14035. */
  14036. constructor(name = undefined) {
  14037. super(name, 5, [InputPort('in0').expects(SpeedyPipelineMessageType.Keypoints), InputPort('in1').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14038. }
  14039. /**
  14040. * Run the specific task of this node
  14041. * @param {SpeedyGPU} gpu
  14042. * @returns {void|SpeedyPromise<void>}
  14043. */
  14044. _run(gpu) {
  14045. const kps0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in0').read();
  14046. const kps1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in1').read();
  14047. const descriptorSize = kps0.descriptorSize;
  14048. const extraSize = kps0.extraSize;
  14049. const keypoints = gpu.programs.keypoints;
  14050. const tex = this._tex;
  14051. // ensure that the format of kps0 equals the format of kps1
  14052. if (!(kps0.descriptorSize === kps1.descriptorSize && kps0.extraSize === kps0.extraSize)) throw new utils_errors/* IllegalOperationError */.Er(`Can't merge two sets of keypoints that have different formats`);
  14053. // find the capacity of kps0 + kps1
  14054. const cap0 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps0.descriptorSize, kps0.extraSize, kps0.encoderLength);
  14055. const cap1 = SpeedyPipelineNodeKeypointDetector.encoderCapacity(kps1.descriptorSize, kps1.extraSize, kps1.encoderLength);
  14056. const capacity = cap0 + cap1;
  14057. // find the dimensions of the output texture
  14058. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(capacity, descriptorSize, extraSize);
  14059. const mixEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  14060. // prepare programs
  14061. keypoints.mixKeypointsPreInit.outputs(encoderLength, encoderLength, tex[0]);
  14062. keypoints.mixKeypointsInit.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14063. keypoints.mixKeypointsSort.outputs(mixEncoderLength, mixEncoderLength, tex[2], tex[3]);
  14064. keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, tex[4]);
  14065. // mix keypoints
  14066. let mixedKeypoints = keypoints.mixKeypointsPreInit(kps0.encodedKeypoints, kps1.encodedKeypoints, kps0.encoderLength, kps1.encoderLength, cap0, cap1, descriptorSize, extraSize, encoderLength);
  14067. let sortedKeypoints = keypoints.mixKeypointsInit(mixedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  14068. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = keypoints.mixKeypointsSort(sortedKeypoints, b);
  14069. mixedKeypoints = keypoints.mixKeypointsApply(sortedKeypoints, mixedKeypoints, descriptorSize, extraSize, encoderLength);
  14070. /*
  14071. // debug: view keypoints
  14072. keypoints.mixKeypointsView.outputs(mixEncoderLength, mixEncoderLength, tex[1]);
  14073. this._visualize(gpu, keypoints.mixKeypointsView(sortedKeypoints));
  14074. */
  14075. this.output().swrite(mixedKeypoints, descriptorSize, extraSize, encoderLength);
  14076. }
  14077. }
  14078. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/shuffler.js
  14079. /*
  14080. * speedy-vision.js
  14081. * GPU-accelerated Computer Vision for JavaScript
  14082. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14083. *
  14084. * Licensed under the Apache License, Version 2.0 (the "License");
  14085. * you may not use this file except in compliance with the License.
  14086. * You may obtain a copy of the License at
  14087. *
  14088. * http://www.apache.org/licenses/LICENSE-2.0
  14089. *
  14090. * Unless required by applicable law or agreed to in writing, software
  14091. * distributed under the License is distributed on an "AS IS" BASIS,
  14092. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14093. * See the License for the specific language governing permissions and
  14094. * limitations under the License.
  14095. *
  14096. * shuffler.js
  14097. * Keypoint Shuffler
  14098. */
  14099. /**
  14100. * The Keypoint Shuffler shuffles a list of keypoints
  14101. */
  14102. class SpeedyPipelineNodeKeypointShuffler extends SpeedyPipelineNode {
  14103. /**
  14104. * Constructor
  14105. * @param {string} [name] name of the node
  14106. */
  14107. constructor(name = undefined) {
  14108. super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14109. /** @type {number} maximum number of keypoints */
  14110. this._maxKeypoints = Number.NaN;
  14111. }
  14112. /**
  14113. * Maximum number of keypoints (optional)
  14114. * @returns {number}
  14115. */
  14116. get maxKeypoints() {
  14117. return this._maxKeypoints;
  14118. }
  14119. /**
  14120. * Maximum number of keypoints (optional)
  14121. * @param {number} value
  14122. */
  14123. set maxKeypoints(value) {
  14124. if (!Number.isNaN(value)) this._maxKeypoints = Math.max(0, value | 0);else this._maxKeypoints = Number.NaN;
  14125. }
  14126. /**
  14127. * Run the specific task of this node
  14128. * @param {SpeedyGPU} gpu
  14129. * @returns {void|SpeedyPromise<void>}
  14130. */
  14131. _run(gpu) {
  14132. let {
  14133. encodedKeypoints,
  14134. descriptorSize,
  14135. extraSize,
  14136. encoderLength
  14137. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  14138. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14139. const maxKeypoints = this._maxKeypoints;
  14140. // shuffle the keypoints (including nulls)
  14141. const permutationMaxLength = gpu.programs.keypoints.shuffle.definedConstant('PERMUTATION_MAXLEN');
  14142. const permutationLength = Math.min(permutationMaxLength, capacity);
  14143. const permutation = this._generatePermutation(permutationLength, permutationMaxLength);
  14144. encodedKeypoints = gpu.programs.keypoints.shuffle.setUBO('Permutation', permutation).outputs(encoderLength, encoderLength, this._tex[0])(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14145. // sort the keypoints
  14146. gpu.programs.keypoints.mixKeypointsInit.outputs(encoderLength, encoderLength, this._tex[1]);
  14147. gpu.programs.keypoints.mixKeypointsSort.outputs(encoderLength, encoderLength, this._tex[2], this._tex[3]);
  14148. gpu.programs.keypoints.mixKeypointsApply.outputs(encoderLength, encoderLength, this._tex[4]);
  14149. let sortedKeypoints = gpu.programs.keypoints.mixKeypointsInit(encodedKeypoints, descriptorSize, extraSize, encoderLength, capacity);
  14150. for (let b = 1; b < capacity; b *= 2) sortedKeypoints = gpu.programs.keypoints.mixKeypointsSort(sortedKeypoints, b);
  14151. encodedKeypoints = gpu.programs.keypoints.mixKeypointsApply(sortedKeypoints, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14152. // clip the output?
  14153. if (!Number.isNaN(maxKeypoints) && maxKeypoints < capacity) {
  14154. const newEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(maxKeypoints, descriptorSize, extraSize);
  14155. encodedKeypoints = gpu.programs.keypoints.clip.outputs(newEncoderLength, newEncoderLength, this._tex[5])(encodedKeypoints, descriptorSize, extraSize, encoderLength, maxKeypoints);
  14156. encoderLength = newEncoderLength;
  14157. }
  14158. // done!
  14159. this.output().swrite(encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14160. }
  14161. /**
  14162. * Generate a permutation p of { 0, 1, ..., n-1 } such that p(p(x)) = x for all x
  14163. * @param {number} n positive integer
  14164. * @param {number} [bufsize] size of the output array
  14165. * @returns {Int32Array} permutation
  14166. */
  14167. _generatePermutation(n, bufsize = n) {
  14168. const array = new Int32Array(bufsize);
  14169. const p = array.subarray(0, n).fill(-1);
  14170. const q = utils/* Utils */.A.shuffle(utils/* Utils */.A.range(n));
  14171. for (let i = 0, j = 0; i < n; i++) {
  14172. if (p[i] < 0) {
  14173. do {
  14174. p[i] = q[j++];
  14175. } while (p[i] < i);
  14176. p[p[i]] = i;
  14177. }
  14178. }
  14179. return array; // padded with zeros
  14180. }
  14181. }
  14182. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/multiplexer.js
  14183. /*
  14184. * speedy-vision.js
  14185. * GPU-accelerated Computer Vision for JavaScript
  14186. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14187. *
  14188. * Licensed under the Apache License, Version 2.0 (the "License");
  14189. * you may not use this file except in compliance with the License.
  14190. * You may obtain a copy of the License at
  14191. *
  14192. * http://www.apache.org/licenses/LICENSE-2.0
  14193. *
  14194. * Unless required by applicable law or agreed to in writing, software
  14195. * distributed under the License is distributed on an "AS IS" BASIS,
  14196. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14197. * See the License for the specific language governing permissions and
  14198. * limitations under the License.
  14199. *
  14200. * multiplexer.js
  14201. * Keypoint multiplexer
  14202. */
  14203. /** @type {string[]} the names of the input ports indexed by their number */
  14204. const multiplexer_INPUT_PORT = ['in0', 'in1'];
  14205. /**
  14206. * Keypoint multiplexer
  14207. */
  14208. class SpeedyPipelineNodeKeypointMultiplexer extends SpeedyPipelineNode {
  14209. /**
  14210. * Constructor
  14211. * @param {string} [name] name of the node
  14212. */
  14213. constructor(name = undefined) {
  14214. super(name, 0, [...multiplexer_INPUT_PORT.map(portName => InputPort(portName).expects(SpeedyPipelineMessageType.Keypoints)), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14215. /** @type {number} which port should be linked to the output? */
  14216. this._port = 0;
  14217. }
  14218. /**
  14219. * The number of the port that should be linked to the output
  14220. * @returns {number}
  14221. */
  14222. get port() {
  14223. return this._port;
  14224. }
  14225. /**
  14226. * The number of the port that should be linked to the output
  14227. * @param {number} port
  14228. */
  14229. set port(port) {
  14230. if (port < 0 || port >= multiplexer_INPUT_PORT.length) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid port: ${port}`);
  14231. this._port = port | 0;
  14232. }
  14233. /**
  14234. * Run the specific task of this node
  14235. * @param {SpeedyGPU} gpu
  14236. * @returns {void|SpeedyPromise<void>}
  14237. */
  14238. _run(gpu) {
  14239. const message = this.input(multiplexer_INPUT_PORT[this._port]).read();
  14240. this.output().write(message);
  14241. }
  14242. }
  14243. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/transformer.js
  14244. /*
  14245. * speedy-vision.js
  14246. * GPU-accelerated Computer Vision for JavaScript
  14247. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14248. *
  14249. * Licensed under the Apache License, Version 2.0 (the "License");
  14250. * you may not use this file except in compliance with the License.
  14251. * You may obtain a copy of the License at
  14252. *
  14253. * http://www.apache.org/licenses/LICENSE-2.0
  14254. *
  14255. * Unless required by applicable law or agreed to in writing, software
  14256. * distributed under the License is distributed on an "AS IS" BASIS,
  14257. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14258. * See the License for the specific language governing permissions and
  14259. * limitations under the License.
  14260. *
  14261. * transformer.js
  14262. * Apply a transformation matrix to a set of keypoints
  14263. */
  14264. /**
  14265. * Apply a transformation matrix to a set of keypoints
  14266. */
  14267. class SpeedyPipelineNodeKeypointTransformer extends SpeedyPipelineNode {
  14268. /**
  14269. * Constructor
  14270. * @param {string} [name] name of the node
  14271. */
  14272. constructor(name = undefined) {
  14273. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14274. /** @type {SpeedyMatrix} transformation matrix */
  14275. this._transform = speedy_matrix.SpeedyMatrix.Create(3, 3, [1, 0, 0, 0, 1, 0, 0, 0, 1]); // identity matrix
  14276. }
  14277. /**
  14278. * Transformation matrix
  14279. * @returns {SpeedyMatrix}
  14280. */
  14281. get transform() {
  14282. return this._transform;
  14283. }
  14284. /**
  14285. * Transformation matrix. Must be 3x3
  14286. * @param {SpeedyMatrix} transform
  14287. */
  14288. set transform(transform) {
  14289. if (!(transform.rows == 3 && transform.columns == 3)) throw new utils_errors/* IllegalArgumentError */.qw(`Not a 3x3 transformation matrix: ${transform}`);
  14290. this._transform = transform;
  14291. }
  14292. /**
  14293. * Run the specific task of this node
  14294. * @param {SpeedyGPU} gpu
  14295. * @returns {void|SpeedyPromise<void>}
  14296. */
  14297. _run(gpu) {
  14298. const {
  14299. encodedKeypoints,
  14300. descriptorSize,
  14301. extraSize,
  14302. encoderLength
  14303. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  14304. const outputTexture = this._tex[0];
  14305. const homography = this._transform.read();
  14306. // apply homography
  14307. gpu.programs.keypoints.applyHomography.outputs(encodedKeypoints.width, encodedKeypoints.height, outputTexture)(homography, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14308. // done!
  14309. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  14310. }
  14311. }
  14312. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/subpixel.js
  14313. /*
  14314. * speedy-vision.js
  14315. * GPU-accelerated Computer Vision for JavaScript
  14316. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14317. *
  14318. * Licensed under the Apache License, Version 2.0 (the "License");
  14319. * you may not use this file except in compliance with the License.
  14320. * You may obtain a copy of the License at
  14321. *
  14322. * http://www.apache.org/licenses/LICENSE-2.0
  14323. *
  14324. * Unless required by applicable law or agreed to in writing, software
  14325. * distributed under the License is distributed on an "AS IS" BASIS,
  14326. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14327. * See the License for the specific language governing permissions and
  14328. * limitations under the License.
  14329. *
  14330. * subpixel.js
  14331. * Subpixel refinement of keypoint location
  14332. */
  14333. /** @typedef {"quadratic1d"|"taylor2d"|"bicubic-upsample"|"bilinear-upsample"} SubpixelRefinementMethod */
  14334. /** @const {Object<SubpixelRefinementMethod,string>} method name to program name */
  14335. const METHOD2PROGRAM = Object.freeze({
  14336. 'quadratic1d': 'subpixelQuadratic1d',
  14337. 'taylor2d': 'subpixelTaylor2d',
  14338. 'bicubic-upsample': 'subpixelBicubic',
  14339. 'bilinear-upsample': 'subpixelBilinear'
  14340. });
  14341. /**
  14342. * Subpixel refinement of keypoint location
  14343. */
  14344. class SpeedyPipelineNodeKeypointSubpixelRefiner extends SpeedyPipelineNode {
  14345. /**
  14346. * Constructor
  14347. * @param {string} [name] name of the node
  14348. */
  14349. constructor(name = undefined) {
  14350. super(name, 2, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('displacements').expects(SpeedyPipelineMessageType.Vector2)]);
  14351. /** @type {SubpixelRefinementMethod} subpixel refinement method */
  14352. this._method = 'quadratic1d';
  14353. /** @type {number} max iterations for the upsampling methods */
  14354. this._maxIterations = 6;
  14355. /** @type {number} convergence threshold for the upsampling methods */
  14356. this._epsilon = 0.1;
  14357. }
  14358. /**
  14359. * Subpixel refinement method
  14360. * @returns {SubpixelRefinementMethod}
  14361. */
  14362. get method() {
  14363. return this._method;
  14364. }
  14365. /**
  14366. * Subpixel refinement method
  14367. * @param {SubpixelRefinementMethod} name
  14368. */
  14369. set method(name) {
  14370. if (!Object.prototype.hasOwnProperty.call(METHOD2PROGRAM, name)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid method: "${name}"`);
  14371. this._method = name;
  14372. }
  14373. /**
  14374. * Max. iterations for the upsampling methods
  14375. * @returns {number}
  14376. */
  14377. get maxIterations() {
  14378. return this._maxIterations;
  14379. }
  14380. /**
  14381. * Max. iterations for the upsampling methods
  14382. * @param {number} value
  14383. */
  14384. set maxIterations(value) {
  14385. this._maxIterations = Math.max(0, +value);
  14386. }
  14387. /**
  14388. * Convergence threshold for the upsampling methods
  14389. * @returns {number}
  14390. */
  14391. get epsilon() {
  14392. return this._epsilon;
  14393. }
  14394. /**
  14395. * Convergence threshold for the upsampling methods
  14396. * @param {number} value
  14397. */
  14398. set epsilon(value) {
  14399. this._epsilon = Math.max(0, +value);
  14400. }
  14401. /**
  14402. * Run the specific task of this node
  14403. * @param {SpeedyGPU} gpu
  14404. * @returns {void|SpeedyPromise<void>}
  14405. */
  14406. _run(gpu) {
  14407. const {
  14408. encodedKeypoints,
  14409. descriptorSize,
  14410. extraSize,
  14411. encoderLength
  14412. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  14413. const {
  14414. image,
  14415. format
  14416. } = /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read();
  14417. const tex = this._tex;
  14418. const program = METHOD2PROGRAM[this._method];
  14419. const maxIterations = this._maxIterations;
  14420. const epsilon = this._epsilon;
  14421. // note: if you detected the keypoints using a pyramid,
  14422. // you need to pass that pyramid as input!
  14423. // we'll compute the offsets for each keypoint
  14424. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14425. const offsetEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per refinement offset
  14426. const offsets = gpu.programs.keypoints[program].outputs(offsetEncoderLength, offsetEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength, maxIterations, epsilon);
  14427. // apply the offsets to the keypoints
  14428. const refinedKeypoints = gpu.programs.keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[1])(offsets, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14429. // done!
  14430. this.output().swrite(refinedKeypoints, descriptorSize, extraSize, encoderLength);
  14431. this.output('displacements').swrite(offsets);
  14432. }
  14433. }
  14434. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/fast.js
  14435. /*
  14436. * speedy-vision.js
  14437. * GPU-accelerated Computer Vision for JavaScript
  14438. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14439. *
  14440. * Licensed under the Apache License, Version 2.0 (the "License");
  14441. * you may not use this file except in compliance with the License.
  14442. * You may obtain a copy of the License at
  14443. *
  14444. * http://www.apache.org/licenses/LICENSE-2.0
  14445. *
  14446. * Unless required by applicable law or agreed to in writing, software
  14447. * distributed under the License is distributed on an "AS IS" BASIS,
  14448. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14449. * See the License for the specific language governing permissions and
  14450. * limitations under the License.
  14451. *
  14452. * fast.js
  14453. * FAST corner detector
  14454. */
  14455. // Constants
  14456. const DEFAULT_THRESHOLD = 20;
  14457. /**
  14458. * FAST corner detector
  14459. */
  14460. class SpeedyPipelineNodeFASTKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
  14461. /**
  14462. * Constructor
  14463. * @param {string} [name] name of the node
  14464. */
  14465. constructor(name = undefined) {
  14466. super(name, 5, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14467. /** @type {number} FAST threshold in [0,255] */
  14468. this._threshold = DEFAULT_THRESHOLD;
  14469. }
  14470. /**
  14471. * FAST threshold in [0,255]
  14472. * @returns {number}
  14473. */
  14474. get threshold() {
  14475. return this._threshold;
  14476. }
  14477. /**
  14478. * FAST threshold in [0,255]
  14479. * @param {number} threshold
  14480. */
  14481. set threshold(threshold) {
  14482. this._threshold = Math.max(0, Math.min(threshold | 0, 255));
  14483. }
  14484. /**
  14485. * Run the specific task of this node
  14486. * @param {SpeedyGPU} gpu
  14487. * @returns {void|SpeedyPromise<void>}
  14488. */
  14489. _run(gpu) {
  14490. const {
  14491. image,
  14492. format
  14493. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  14494. const width = image.width,
  14495. height = image.height;
  14496. const tex = this._tex;
  14497. const capacity = this._capacity;
  14498. const threshold = this._threshold;
  14499. const lodStep = Math.log2(this.scaleFactor);
  14500. const levels = this.levels;
  14501. // validate pyramid
  14502. if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  14503. // skip if the capacity is zero
  14504. if (capacity == 0) {
  14505. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[4]);
  14506. const encoderLength = encodedKeypoints.width;
  14507. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14508. return;
  14509. }
  14510. // FAST
  14511. gpu.programs.keypoints.fast9_16.outputs(width, height, tex[0], tex[1]);
  14512. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[2]);
  14513. let corners = tex[1].clear();
  14514. let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
  14515. for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  14516. corners = gpu.programs.keypoints.fast9_16(corners, image, lod, threshold);
  14517. //corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  14518. }
  14519. // Same-scale non-maximum suppression
  14520. // *nicer results inside the loop; faster outside
  14521. // Hard to notice a difference when using FAST
  14522. corners = gpu.programs.keypoints.nonmaxSpace(corners);
  14523. // Multi-scale non-maximum suppression
  14524. // (doesn't seem to remove many keypoints)
  14525. if (levels > 1) {
  14526. corners = gpu.programs.keypoints.nonmaxScaleSimple.outputs(width, height, tex[1])(corners, image, lodStep);
  14527. }
  14528. // encode keypoints
  14529. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[3]);
  14530. const encoderLength = encodedKeypoints.width;
  14531. // scale refinement
  14532. if (levels > 1) {
  14533. encodedKeypoints = gpu.programs.keypoints.refineScaleFAST916.outputs(encoderLength, encoderLength, tex[4])(image, lodStep, encodedKeypoints, 0, 0, encoderLength, threshold);
  14534. }
  14535. // done!
  14536. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14537. }
  14538. }
  14539. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/detectors/harris.js
  14540. /*
  14541. * speedy-vision.js
  14542. * GPU-accelerated Computer Vision for JavaScript
  14543. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14544. *
  14545. * Licensed under the Apache License, Version 2.0 (the "License");
  14546. * you may not use this file except in compliance with the License.
  14547. * You may obtain a copy of the License at
  14548. *
  14549. * http://www.apache.org/licenses/LICENSE-2.0
  14550. *
  14551. * Unless required by applicable law or agreed to in writing, software
  14552. * distributed under the License is distributed on an "AS IS" BASIS,
  14553. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14554. * See the License for the specific language governing permissions and
  14555. * limitations under the License.
  14556. *
  14557. * harris.js
  14558. * Harris corner detector
  14559. */
  14560. /** Window size helper */
  14561. const HARRIS = Object.freeze({
  14562. 1: 'harris1',
  14563. 3: 'harris3',
  14564. 5: 'harris5',
  14565. 7: 'harris7'
  14566. });
  14567. /**
  14568. * Harris corner detector
  14569. */
  14570. class SpeedyPipelineNodeHarrisKeypointDetector extends SpeedyPipelineNodeMultiscaleKeypointDetector {
  14571. /**
  14572. * Constructor
  14573. * @param {string} [name] name of the node
  14574. */
  14575. constructor(name = undefined) {
  14576. super(name, 6, [InputPort().expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14577. /** @type {SpeedySize} neighborhood size */
  14578. this._windowSize = new SpeedySize(3, 3);
  14579. /** @type {number} min corner quality in [0,1] */
  14580. this._quality = 0.1;
  14581. }
  14582. /**
  14583. * Minimum corner quality in [0,1] - this is a fraction of
  14584. * the largest min. eigenvalue of the autocorrelation matrix
  14585. * over the entire image
  14586. * @returns {number}
  14587. */
  14588. get quality() {
  14589. return this._quality;
  14590. }
  14591. /**
  14592. * Minimum corner quality in [0,1]
  14593. * @param {number} quality
  14594. */
  14595. set quality(quality) {
  14596. this._quality = Math.max(0.0, Math.min(+quality, 1.0));
  14597. }
  14598. /**
  14599. * Neighborhood size
  14600. * @returns {SpeedySize}
  14601. */
  14602. get windowSize() {
  14603. return this._windowSize;
  14604. }
  14605. /**
  14606. * Neighborhood size
  14607. * @param {SpeedySize} windowSize
  14608. */
  14609. set windowSize(windowSize) {
  14610. const d = windowSize.width;
  14611. if (!(d == windowSize.height && (d == 1 || d == 3 || d == 5 || d == 7))) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid window: ${windowSize}. Acceptable sizes: 1x1, 3x3, 5x5, 7x7`);
  14612. this._windowSize = windowSize;
  14613. }
  14614. /**
  14615. * Run the specific task of this node
  14616. * @param {SpeedyGPU} gpu
  14617. * @returns {void|SpeedyPromise<void>}
  14618. */
  14619. _run(gpu) {
  14620. const {
  14621. image,
  14622. format
  14623. } = /** @type {SpeedyPipelineMessageWithImage} */this.input().read();
  14624. const width = image.width,
  14625. height = image.height;
  14626. const capacity = this._capacity;
  14627. const quality = this._quality;
  14628. const windowSize = this._windowSize.width;
  14629. const levels = this.levels;
  14630. const lodStep = Math.log2(this.scaleFactor);
  14631. const intFactor = levels > 1 ? this.scaleFactor : 1;
  14632. const harris = gpu.programs.keypoints[HARRIS[windowSize]];
  14633. const tex = this._tex;
  14634. // validate pyramid
  14635. if (!(levels == 1 || image.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`Expected a pyramid in ${this.fullName}`);
  14636. // skip if the capacity is zero
  14637. if (capacity == 0) {
  14638. const encodedKeypoints = this._encodeZeroKeypoints(gpu, tex[5]);
  14639. const encoderLength = encodedKeypoints.width;
  14640. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14641. return;
  14642. }
  14643. // compute corner response map
  14644. harris.outputs(width, height, tex[0], tex[1]);
  14645. gpu.programs.utils.sobelDerivatives.outputs(width, height, tex[2]);
  14646. gpu.programs.keypoints.nonmaxSpace.outputs(width, height, tex[3]);
  14647. let corners = tex[1].clear();
  14648. let numPasses = Math.max(1, Math.min(levels, globals.PYRAMID_MAX_LEVELS / lodStep | 0));
  14649. for (let lod = lodStep * (numPasses - 1); numPasses-- > 0; lod -= lodStep) {
  14650. const gaussian = utils/* Utils */.A.gaussianKernel(intFactor * (1 + lod), windowSize);
  14651. const derivatives = gpu.programs.utils.sobelDerivatives(image, lod);
  14652. corners = harris(corners, image, derivatives, lod, lodStep, gaussian);
  14653. corners = gpu.programs.keypoints.nonmaxSpace(corners); // see below*
  14654. }
  14655. // Same-scale non-maximum suppression
  14656. // *performs better inside the loop
  14657. //corners = gpu.programs.keypoints.nonmaxSpace(corners);
  14658. // Multi-scale non-maximum suppression
  14659. // (doesn't seem to remove many keypoints)
  14660. if (levels > 1) {
  14661. const laplacian = gpu.programs.keypoints.laplacian.outputs(width, height, tex[0])(corners, image, lodStep, 0);
  14662. corners = gpu.programs.keypoints.nonmaxScale.outputs(width, height, tex[2])(corners, image, laplacian, lodStep);
  14663. }
  14664. // find the maximum corner response over the entire image
  14665. gpu.programs.keypoints.harrisScoreFindMax.outputs(width, height, tex[0], tex[1]);
  14666. numPasses = Math.ceil(Math.log2(Math.max(width, height)));
  14667. let maxScore = corners;
  14668. for (let j = 0; j < numPasses; j++) maxScore = gpu.programs.keypoints.harrisScoreFindMax(maxScore, j);
  14669. // discard corners below a quality level
  14670. corners = gpu.programs.keypoints.harrisScoreCutoff.outputs(width, height, maxScore == tex[0] ? tex[1] : tex[0])(corners, maxScore, quality);
  14671. // encode keypoints
  14672. let encodedKeypoints = this._encodeKeypoints(gpu, corners, tex[4]);
  14673. const encoderLength = encodedKeypoints.width;
  14674. // scale refinement
  14675. if (levels > 1) {
  14676. encodedKeypoints = gpu.programs.keypoints.refineScaleLoG.outputs(encoderLength, encoderLength, tex[5])(image, lodStep, encodedKeypoints, 0, 0, encoderLength);
  14677. }
  14678. // done!
  14679. this.output().swrite(encodedKeypoints, 0, 0, encoderLength);
  14680. }
  14681. }
  14682. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/descriptor.js
  14683. /*
  14684. * speedy-vision.js
  14685. * GPU-accelerated Computer Vision for JavaScript
  14686. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14687. *
  14688. * Licensed under the Apache License, Version 2.0 (the "License");
  14689. * you may not use this file except in compliance with the License.
  14690. * You may obtain a copy of the License at
  14691. *
  14692. * http://www.apache.org/licenses/LICENSE-2.0
  14693. *
  14694. * Unless required by applicable law or agreed to in writing, software
  14695. * distributed under the License is distributed on an "AS IS" BASIS,
  14696. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14697. * See the License for the specific language governing permissions and
  14698. * limitations under the License.
  14699. *
  14700. * descriptor.js
  14701. * Abstract keypoint descriptor
  14702. */
  14703. /**
  14704. * Abstract keypoint descriptor
  14705. * @abstract
  14706. */
  14707. class SpeedyPipelineNodeKeypointDescriptor extends SpeedyPipelineNode {
  14708. /**
  14709. * Constructor
  14710. * @param {string} [name] name of the node
  14711. * @param {number} [texCount] number of work textures
  14712. * @param {SpeedyPipelinePortBuilder[]} [portBuilders] port builders
  14713. */
  14714. constructor(name = undefined, texCount = 0, portBuilders = undefined) {
  14715. super(name, texCount + 1, portBuilders);
  14716. }
  14717. /**
  14718. *
  14719. * Allocate space for keypoint descriptors
  14720. * @param {SpeedyGPU} gpu
  14721. * @param {number} inputDescriptorSize should be 0
  14722. * @param {number} inputExtraSize must be non-negative
  14723. * @param {number} outputDescriptorSize in bytes, must be a multiple of 4
  14724. * @param {number} outputExtraSize must be inputExtraSize
  14725. * @param {SpeedyTexture} inputEncodedKeypoints input with no descriptors
  14726. * @returns {SpeedyDrawableTexture} encodedKeypoints
  14727. */
  14728. _allocateDescriptors(gpu, inputDescriptorSize, inputExtraSize, outputDescriptorSize, outputExtraSize, inputEncodedKeypoints) {
  14729. utils/* Utils */.A.assert(inputDescriptorSize >= 0 && inputExtraSize >= 0);
  14730. utils/* Utils */.A.assert(outputDescriptorSize >= 0 && outputDescriptorSize % 4 === 0 && outputExtraSize === inputExtraSize);
  14731. const inputEncoderLength = inputEncodedKeypoints.width;
  14732. const inputEncoderCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(inputDescriptorSize, inputExtraSize, inputEncoderLength);
  14733. const outputEncoderCapacity = inputEncoderCapacity;
  14734. const outputEncoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(outputEncoderCapacity, outputDescriptorSize, outputExtraSize);
  14735. const tex = this._tex[this._tex.length - 1];
  14736. return gpu.programs.keypoints.allocateDescriptors.outputs(outputEncoderLength, outputEncoderLength, tex)(inputEncodedKeypoints, inputDescriptorSize, inputExtraSize, inputEncoderLength, outputDescriptorSize, outputExtraSize, outputEncoderLength);
  14737. }
  14738. }
  14739. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/descriptors/orb.js
  14740. /*
  14741. * speedy-vision.js
  14742. * GPU-accelerated Computer Vision for JavaScript
  14743. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14744. *
  14745. * Licensed under the Apache License, Version 2.0 (the "License");
  14746. * you may not use this file except in compliance with the License.
  14747. * You may obtain a copy of the License at
  14748. *
  14749. * http://www.apache.org/licenses/LICENSE-2.0
  14750. *
  14751. * Unless required by applicable law or agreed to in writing, software
  14752. * distributed under the License is distributed on an "AS IS" BASIS,
  14753. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14754. * See the License for the specific language governing permissions and
  14755. * limitations under the License.
  14756. *
  14757. * orb.js
  14758. * ORB descriptors
  14759. */
  14760. // Constants
  14761. const DESCRIPTOR_SIZE = 32; // 256 bits
  14762. /**
  14763. * ORB descriptors
  14764. */
  14765. class SpeedyPipelineNodeORBKeypointDescriptor extends SpeedyPipelineNodeKeypointDescriptor {
  14766. /**
  14767. * Constructor
  14768. * @param {string} [name] name of the node
  14769. */
  14770. constructor(name = undefined) {
  14771. super(name, 3, [InputPort('image').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  14772. }
  14773. /**
  14774. * Run the specific task of this node
  14775. * @param {SpeedyGPU} gpu
  14776. * @returns {void|SpeedyPromise<void>}
  14777. */
  14778. _run(gpu) {
  14779. const {
  14780. encodedKeypoints,
  14781. descriptorSize,
  14782. extraSize,
  14783. encoderLength
  14784. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  14785. const image = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('image').read()).image;
  14786. const tex = this._tex;
  14787. const outputTexture = this._tex[2];
  14788. // compute orientation
  14789. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14790. const orientationEncoderLength = Math.max(1, Math.ceil(Math.sqrt(capacity))); // 1 pixel per keypoint
  14791. const encodedOrientations = gpu.programs.keypoints.orbOrientation.outputs(orientationEncoderLength, orientationEncoderLength, tex[0])(image, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14792. const orientedKeypoints = gpu.programs.keypoints.transferOrientation.outputs(encoderLength, encoderLength, tex[1])(encodedOrientations, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  14793. // allocate space
  14794. const encodedKps = this._allocateDescriptors(gpu, descriptorSize, extraSize, DESCRIPTOR_SIZE, extraSize, orientedKeypoints);
  14795. const newEncoderLength = encodedKps.width;
  14796. // compute descriptors (it's a good idea to blur the image)
  14797. const describedKeypoints = gpu.programs.keypoints.orbDescriptor.outputs(newEncoderLength, newEncoderLength, outputTexture)(image, encodedKps, extraSize, newEncoderLength);
  14798. // done!
  14799. this.output().swrite(describedKeypoints, DESCRIPTOR_SIZE, extraSize, newEncoderLength);
  14800. }
  14801. }
  14802. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/trackers/lk.js
  14803. /*
  14804. * speedy-vision.js
  14805. * GPU-accelerated Computer Vision for JavaScript
  14806. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14807. *
  14808. * Licensed under the Apache License, Version 2.0 (the "License");
  14809. * you may not use this file except in compliance with the License.
  14810. * You may obtain a copy of the License at
  14811. *
  14812. * http://www.apache.org/licenses/LICENSE-2.0
  14813. *
  14814. * Unless required by applicable law or agreed to in writing, software
  14815. * distributed under the License is distributed on an "AS IS" BASIS,
  14816. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14817. * See the License for the specific language governing permissions and
  14818. * limitations under the License.
  14819. *
  14820. * lk.js
  14821. * LK optical-flow
  14822. */
  14823. // Constants
  14824. const DEFAULT_WINDOW_SIZE = new SpeedySize(11, 11); // nice on mobile?
  14825. const DEFAULT_DEPTH = Math.min(3, globals.PYRAMID_MAX_LEVELS);
  14826. const DEFAULT_NUMBER_OF_ITERATIONS = 30;
  14827. const DEFAULT_DISCARD_THRESHOLD = 0.0001;
  14828. const DEFAULT_EPSILON = 0.01;
  14829. const LK_PROGRAM = {
  14830. 3: 'lk3',
  14831. 5: 'lk5',
  14832. 7: 'lk7',
  14833. 9: 'lk9',
  14834. 11: 'lk11',
  14835. 13: 'lk13',
  14836. 15: 'lk15',
  14837. 17: 'lk17',
  14838. 19: 'lk19',
  14839. 21: 'lk21'
  14840. };
  14841. /**
  14842. * LK optical-flow
  14843. */
  14844. class SpeedyPipelineNodeLKKeypointTracker extends SpeedyPipelineNode {
  14845. /**
  14846. * Constructor
  14847. * @param {string} [name] name of the node
  14848. */
  14849. constructor(name = undefined) {
  14850. super(name, 3, [InputPort('previousImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('nextImage').expects(SpeedyPipelineMessageType.Image).satisfying(( /** @type {SpeedyPipelineMessageWithImage} */msg) => msg.format === types/* ImageFormat */.f5.GREY), InputPort('previousKeypoints').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints), OutputPort('flow').expects(SpeedyPipelineMessageType.Vector2)]);
  14851. /** @type {SpeedySize} window size */
  14852. this._windowSize = DEFAULT_WINDOW_SIZE;
  14853. /** @type {number} number of pyramid levels to use */
  14854. this._levels = DEFAULT_DEPTH;
  14855. /** @type {number} minimum acceptable corner response */
  14856. this._discardThreshold = DEFAULT_DISCARD_THRESHOLD;
  14857. /** @type {number} number of iterations per pyramid level (termination criteria) */
  14858. this._numberOfIterations = DEFAULT_NUMBER_OF_ITERATIONS;
  14859. /** @type {number} minimum increment per iteration (termination criteria) */
  14860. this._epsilon = DEFAULT_EPSILON;
  14861. }
  14862. /**
  14863. * Window size (use odd numbers)
  14864. * @returns {SpeedySize}
  14865. */
  14866. get windowSize() {
  14867. return this._windowSize;
  14868. }
  14869. /**
  14870. * Window size (use odd numbers)
  14871. * @param {SpeedySize} windowSize must be a square window
  14872. */
  14873. set windowSize(windowSize) {
  14874. if (windowSize.width != windowSize.height) {
  14875. throw new utils_errors/* NotSupportedError */.EM(`LK: window ${this._windowSize.toString()} is not square!`);
  14876. } else if (!Object.prototype.hasOwnProperty.call(LK_PROGRAM, windowSize.width)) {
  14877. const SUPPORTED_WINDOWS = Object.keys(LK_PROGRAM).sort((a, b) => a - b).map(k => k + 'x' + k).join(', ');
  14878. throw new utils_errors/* NotSupportedError */.EM(`LK: window of size ${this._windowSize.toString()} is not supported! Supported sizes: ${SUPPORTED_WINDOWS}`);
  14879. }
  14880. this._windowSize = windowSize;
  14881. }
  14882. /**
  14883. * Number of pyramid levels to use
  14884. * @returns {number}
  14885. */
  14886. get levels() {
  14887. return this._levels;
  14888. }
  14889. /**
  14890. * Number of pyramid levels to use
  14891. * @param {number} levels
  14892. */
  14893. set levels(levels) {
  14894. utils/* Utils */.A.assert(levels >= 1 && levels <= globals.PYRAMID_MAX_LEVELS);
  14895. this._levels = levels | 0;
  14896. }
  14897. /**
  14898. * Get the discard threshold, used to discard "bad" keypoints
  14899. * @returns {number}
  14900. */
  14901. get discardThreshold() {
  14902. return this._discardThreshold;
  14903. }
  14904. /**
  14905. * Set the discard threshold, used to discard "bad" keypoints
  14906. * @param {number} value typically 10^(-4) - increase to discard more
  14907. */
  14908. set discardThreshold(value) {
  14909. utils/* Utils */.A.assert(value >= 0);
  14910. this._discardThreshold = +value;
  14911. }
  14912. /**
  14913. * Get the maximum number of iterations of the pyramidal LK algorithm
  14914. * @returns {number}
  14915. */
  14916. get numberOfIterations() {
  14917. return this._numberOfIterations;
  14918. }
  14919. /**
  14920. * Set the maximum number of iterations of the pyramidal LK algorithm
  14921. * @param {number} value
  14922. */
  14923. set numberOfIterations(value) {
  14924. utils/* Utils */.A.assert(value >= 1);
  14925. this._numberOfIterations = value | 0;
  14926. }
  14927. /**
  14928. * Get the accuracy threshold, used to stop LK iterations
  14929. * @returns {number}
  14930. */
  14931. get epsilon() {
  14932. return this._epsilon;
  14933. }
  14934. /**
  14935. * Get the accuracy threshold, used to stop LK iterations
  14936. * @param {number} value typically 0.01
  14937. */
  14938. set epsilon(value) {
  14939. utils/* Utils */.A.assert(value >= 0);
  14940. this._epsilon = +value;
  14941. }
  14942. /**
  14943. * Run the specific task of this node
  14944. * @param {SpeedyGPU} gpu
  14945. * @returns {void|SpeedyPromise<void>}
  14946. */
  14947. _run(gpu) {
  14948. const {
  14949. encodedKeypoints,
  14950. descriptorSize,
  14951. extraSize,
  14952. encoderLength
  14953. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('previousKeypoints').read();
  14954. const previousImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('previousImage').read()).image;
  14955. const nextImage = ( /** @type {SpeedyPipelineMessageWithImage} */this.input('nextImage').read()).image;
  14956. const previousKeypoints = encodedKeypoints;
  14957. const levels = this._levels;
  14958. const windowSize = this._windowSize;
  14959. const wsize = windowSize.width; // square window
  14960. const numberOfIterations = this._numberOfIterations;
  14961. const discardThreshold = this._discardThreshold;
  14962. const epsilon = this._epsilon;
  14963. const keypoints = gpu.programs.keypoints;
  14964. const tex = this._tex;
  14965. // do we need a pyramid?
  14966. if (!(levels == 1 || previousImage.hasMipmaps() && nextImage.hasMipmaps())) throw new utils_errors/* IllegalOperationError */.Er(`LK: a pyramid is required if levels > 1`);else if (previousImage.width !== nextImage.width || previousImage.height !== nextImage.height) throw new utils_errors/* IllegalOperationError */.Er(`LK: can't use input images of different size`);
  14967. // select the appropriate program
  14968. const lk = keypoints[LK_PROGRAM[wsize]];
  14969. // find the dimensions of the flow texture (1 pixel per flow vector)
  14970. const numKeypoints = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  14971. const lkEncoderLength = Math.max(1, Math.ceil(Math.sqrt(numKeypoints)));
  14972. lk.outputs(lkEncoderLength, lkEncoderLength, tex[0], tex[1]);
  14973. // compute optical-flow
  14974. let flow = lk.clear();
  14975. for (let lod = levels - 1; lod >= 0; lod--) flow = lk(flow, previousKeypoints, nextImage, previousImage, lod, levels, numberOfIterations, discardThreshold, epsilon, descriptorSize, extraSize, encoderLength);
  14976. // transfer optical-flow to nextKeypoints
  14977. keypoints.transferFlow.outputs(encoderLength, encoderLength, tex[2]);
  14978. const nextKeypoints = keypoints.transferFlow(flow, previousKeypoints, descriptorSize, extraSize, encoderLength);
  14979. // done!
  14980. this.output().swrite(nextKeypoints, descriptorSize, extraSize, encoderLength);
  14981. this.output('flow').swrite(flow);
  14982. }
  14983. }
  14984. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-static-tables.js
  14985. /*
  14986. * speedy-vision.js
  14987. * GPU-accelerated Computer Vision for JavaScript
  14988. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  14989. *
  14990. * Licensed under the Apache License, Version 2.0 (the "License");
  14991. * you may not use this file except in compliance with the License.
  14992. * You may obtain a copy of the License at
  14993. *
  14994. * http://www.apache.org/licenses/LICENSE-2.0
  14995. *
  14996. * Unless required by applicable law or agreed to in writing, software
  14997. * distributed under the License is distributed on an "AS IS" BASIS,
  14998. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14999. * See the License for the specific language governing permissions and
  15000. * limitations under the License.
  15001. *
  15002. * lsh-static-tables.js
  15003. * Static LSH tables
  15004. */
  15005. /**
  15006. * Static LSH tables
  15007. */
  15008. class SpeedyPipelineNodeStaticLSHTables extends SpeedyPipelineSourceNode {
  15009. /**
  15010. * Constructor
  15011. * @param {string} [name] name of the node
  15012. */
  15013. constructor(name = undefined) {
  15014. super(name, 2, [OutputPort().expects(SpeedyPipelineMessageType.LSHTables)]);
  15015. /** @type {SpeedyKeypoint[]} "training" keypoints */
  15016. this._keypoints = [];
  15017. /** @type {SpeedyKeypoint[]} internal copy of the "training" keypoints */
  15018. this._keypointsCopy = [];
  15019. /** @type {number} number of tables in the LSH data structure */
  15020. this._numberOfTables = LSH_DEFAULT_NUMBER_OF_TABLES;
  15021. /** @type {number} number of bits of a hash */
  15022. this._hashSize = LSH_DEFAULT_HASH_SIZE;
  15023. /** @type {SpeedyLSH|null} LSH data structure */
  15024. this._lsh = null;
  15025. }
  15026. /**
  15027. * "Training" keypoints
  15028. * @returns {SpeedyKeypoint[]}
  15029. */
  15030. get keypoints() {
  15031. return this._keypoints;
  15032. }
  15033. /**
  15034. * "Training" keypoints
  15035. * @param {SpeedyKeypoint[]} keypoints
  15036. */
  15037. set keypoints(keypoints) {
  15038. if (!Array.isArray(keypoints) || keypoints.find(keypoint => !(keypoint instanceof SpeedyKeypoint))) throw new utils_errors/* IllegalArgumentError */.qw(`Static LSH tables: an invalid set of keypoints has been provided`);
  15039. if (this._keypoints !== keypoints) {
  15040. this._keypoints = keypoints; // update internal pointer
  15041. this._keypointsCopy = keypoints.slice(0); // clone the array, so it won't be modified externally
  15042. this._lsh = null; // (re)train the model
  15043. }
  15044. }
  15045. /**
  15046. * Number of tables in the LSH data structure
  15047. * @returns {number}
  15048. */
  15049. get numberOfTables() {
  15050. return this._numberOfTables;
  15051. }
  15052. /**
  15053. * Number of tables in the LSH data structure
  15054. * @param {number} n
  15055. */
  15056. set numberOfTables(n) {
  15057. if (!LSH_ACCEPTABLE_NUMBER_OF_TABLES.includes(n)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid number of tables: ${n}. Acceptable values: ${LSH_ACCEPTABLE_NUMBER_OF_TABLES.join(', ')}`);
  15058. if (n !== this._numberOfTables) {
  15059. this._numberOfTables = n | 0;
  15060. this._lsh = null; // need to retrain the model
  15061. }
  15062. }
  15063. /**
  15064. * Number of bits of a hash
  15065. * @returns {number}
  15066. */
  15067. get hashSize() {
  15068. return this._hashSize;
  15069. }
  15070. /**
  15071. * Number of bits of a hash
  15072. * @param {number} h
  15073. */
  15074. set hashSize(h) {
  15075. if (!LSH_ACCEPTABLE_HASH_SIZES.includes(h)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid hash size: ${h}. Acceptable values: ${LSH_ACCEPTABLE_HASH_SIZES.join(', ')}`);
  15076. if (h !== this._hashSize) {
  15077. this._hashSize = h | 0;
  15078. this._lsh = null; // need to retrain the model
  15079. }
  15080. }
  15081. /**
  15082. * Run the specific task of this node
  15083. * @param {SpeedyGPU} gpu
  15084. * @returns {void|SpeedyPromise<void>}
  15085. */
  15086. _run(gpu) {
  15087. // Need to train the model?
  15088. if (this._lsh == null) {
  15089. // internal work textures are only available after initialization,
  15090. // i.e., after calling this._init()
  15091. this._lsh = this._train();
  15092. }
  15093. // Pass it forward
  15094. this.output().swrite(this._lsh);
  15095. }
  15096. /**
  15097. * Train the model
  15098. * @returns {SpeedyLSH}
  15099. */
  15100. _train() {
  15101. const keypoints = this._keypointsCopy;
  15102. const numberOfTables = this._numberOfTables;
  15103. const hashSize = this._hashSize;
  15104. if (keypoints.find(keypoint => keypoint.descriptor == null)) throw new utils_errors/* IllegalOperationError */.Er(`Static LSH tables: can't train the model with no keypoint descriptors!`);
  15105. const descriptors = keypoints.map(keypoint => keypoint.descriptor.data);
  15106. const lshTables = this._tex[0];
  15107. const descriptorDB = this._tex[1];
  15108. return new SpeedyLSH(lshTables, descriptorDB, descriptors, numberOfTables, hashSize);
  15109. }
  15110. }
  15111. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/lsh-knn.js
  15112. /*
  15113. * speedy-vision.js
  15114. * GPU-accelerated Computer Vision for JavaScript
  15115. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15116. *
  15117. * Licensed under the Apache License, Version 2.0 (the "License");
  15118. * you may not use this file except in compliance with the License.
  15119. * You may obtain a copy of the License at
  15120. *
  15121. * http://www.apache.org/licenses/LICENSE-2.0
  15122. *
  15123. * Unless required by applicable law or agreed to in writing, software
  15124. * distributed under the License is distributed on an "AS IS" BASIS,
  15125. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15126. * See the License for the specific language governing permissions and
  15127. * limitations under the License.
  15128. *
  15129. * lsh-knn.js
  15130. * K approximate nearest neighbors matcher
  15131. */
  15132. /** @typedef {'fastest' | 'default' | 'demanding'} LSHKNNQualityLevel quality of the approximate matching */
  15133. /** @type {number} how many neighbors to search for, by default */
  15134. const DEFAULT_K = 1;
  15135. /** @type {LSHKNNQualityLevel} default quality level */
  15136. const DEFAULT_QUALITY = 'default';
  15137. /** @type {{ [key in LSHKNNQualityLevel]: number }} maps quality level to bit swaps */
  15138. const NUMBER_OF_BIT_SWAPS = {
  15139. 'fastest': 0,
  15140. 'default': 1,
  15141. 'demanding': 2
  15142. };
  15143. /** @type {object} program names indexed as LSH_KNN[descriptorSize][hashSize][level] */
  15144. const LSH_KNN = (fd => LSH_ACCEPTABLE_DESCRIPTOR_SIZES.reduce((o, d) => (o[d] = fd(d), o), {}))(d => (fh => LSH_ACCEPTABLE_HASH_SIZES.reduce((o, h) => (o[h] = fh(h), o), {}))(h => (fl => [0, 1, 2].reduce((o, l) => (o[l] = fl(l), o), {}))(l => `lshKnn${d}h${h}lv${l}`)));
  15145. /**
  15146. * K approximate nearest neighbors matcher
  15147. */
  15148. class SpeedyPipelineNodeLSHKNNKeypointMatcher extends SpeedyPipelineNode {
  15149. /**
  15150. * Constructor
  15151. * @param {string} [name] name of the node
  15152. */
  15153. constructor(name = undefined) {
  15154. super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('lsh').expects(SpeedyPipelineMessageType.LSHTables), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
  15155. /** @type {number} how many neighbors do you want? */
  15156. this._k = DEFAULT_K;
  15157. /** @type {LSHKNNQualityLevel} quality of the matching */
  15158. this._quality = DEFAULT_QUALITY;
  15159. }
  15160. /**
  15161. * How many neighbors do you want?
  15162. * @returns {number}
  15163. */
  15164. get k() {
  15165. return this._k;
  15166. }
  15167. /**
  15168. * How many neighbors do you want?
  15169. * @param {number} k number of neighbors
  15170. */
  15171. set k(k) {
  15172. this._k = Math.max(1, k | 0);
  15173. }
  15174. /**
  15175. * Quality of the matching
  15176. * @returns {LSHKNNQualityLevel}
  15177. */
  15178. get quality() {
  15179. return this._quality;
  15180. }
  15181. /**
  15182. * Quality of the matching
  15183. * @param {LSHKNNQualityLevel} quality
  15184. */
  15185. set quality(quality) {
  15186. if (!Object.prototype.hasOwnProperty.call(NUMBER_OF_BIT_SWAPS, quality)) throw new utils_errors/* IllegalArgumentError */.qw(`Invalid quality level: "${quality}"`);
  15187. this._quality = quality;
  15188. }
  15189. /**
  15190. * Run the specific task of this node
  15191. * @param {SpeedyGPU} gpu
  15192. * @returns {void|SpeedyPromise<void>}
  15193. */
  15194. _run(gpu) {
  15195. const {
  15196. encodedKeypoints,
  15197. descriptorSize,
  15198. extraSize,
  15199. encoderLength
  15200. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  15201. /** @type {SpeedyLSH} */
  15202. const lsh = this.input('lsh').read().lsh;
  15203. const keypoints = gpu.programs.keypoints;
  15204. const tables = lsh.tables;
  15205. const descriptorDB = lsh.descriptorDB;
  15206. const tablesStride = tables.width;
  15207. const descriptorDBStride = descriptorDB.width;
  15208. const tableCount = lsh.tableCount;
  15209. const hashSize = lsh.hashSize;
  15210. const bucketCapacity = lsh.bucketCapacity;
  15211. const bucketsPerTable = lsh.bucketsPerTable;
  15212. const sequences = lsh.sequences;
  15213. const candidatesA = this._tex[0];
  15214. const candidatesB = this._tex[1];
  15215. const candidatesC = this._tex[2];
  15216. const filters = this._tex[3];
  15217. const transferA = this._tex[4];
  15218. const transferB = this._tex[5];
  15219. const level = NUMBER_OF_BIT_SWAPS[this._quality];
  15220. const matchesPerKeypoint = this._k;
  15221. // validate parameters
  15222. if (descriptorSize !== lsh.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Can't match different types of descriptors in ${this.fullName}`);
  15223. utils/* Utils */.A.assert(LSH_KNN[descriptorSize] != undefined);
  15224. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize] != undefined);
  15225. utils/* Utils */.A.assert(LSH_KNN[descriptorSize][hashSize][level] != undefined);
  15226. // configure the output texture
  15227. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15228. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  15229. let encodedMatches = transferB;
  15230. keypoints.lshKnnTransfer.outputs(matcherLength, matcherLength, transferA, transferB);
  15231. // prepare the LSH matching
  15232. const kthMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15233. keypoints.lshKnnInitCandidates.outputs(kthMatcherLength, kthMatcherLength, candidatesA);
  15234. keypoints.lshKnnInitFilters.outputs(kthMatcherLength, kthMatcherLength, filters);
  15235. const lshKnn = keypoints[LSH_KNN[descriptorSize][hashSize][level]];
  15236. lshKnn.outputs(kthMatcherLength, kthMatcherLength, candidatesB, candidatesC);
  15237. lshKnn.setUBO('LSHSequences', sequences);
  15238. // match keypoints
  15239. encodedMatches.clear();
  15240. keypoints.lshKnnInitFilters();
  15241. for (let i = 0; i < matchesPerKeypoint; i++) {
  15242. // find the (i+1)-th best match
  15243. let candidates = keypoints.lshKnnInitCandidates();
  15244. for (let tableIndex = 0; tableIndex < tableCount; tableIndex++) {
  15245. candidates = lshKnn(candidates, filters, kthMatcherLength, tables, descriptorDB, tableIndex, bucketCapacity, bucketsPerTable, tablesStride, descriptorDBStride, encodedKeypoints, descriptorSize, extraSize, encoderLength);
  15246. gpu.gl.flush();
  15247. }
  15248. candidates.copyTo(filters);
  15249. // transfer matches to an encoded matches texture
  15250. encodedMatches = keypoints.lshKnnTransfer(encodedMatches, candidates, matchesPerKeypoint, i);
  15251. }
  15252. // done
  15253. this.output().swrite(encodedMatches, matchesPerKeypoint);
  15254. /*
  15255. // debug
  15256. let data = filters.inspect32(gpu), debug = [];
  15257. for(let i = 0; i < data.length; i++) {
  15258. const bits = MATCH_INDEX_BITS;
  15259. const mask = (1 << bits) - 1;
  15260. const u32 = data[i];
  15261. const index = u32 & mask, distance = u32 >>> bits;
  15262. //debug.push('|'+[ u32 ].toString());
  15263. debug.push('|'+[ index, distance ].toString());
  15264. }
  15265. console.log(debug.join(','));
  15266. */
  15267. }
  15268. }
  15269. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/matchers/bf-knn.js
  15270. /*
  15271. * speedy-vision.js
  15272. * GPU-accelerated Computer Vision for JavaScript
  15273. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15274. *
  15275. * Licensed under the Apache License, Version 2.0 (the "License");
  15276. * you may not use this file except in compliance with the License.
  15277. * You may obtain a copy of the License at
  15278. *
  15279. * http://www.apache.org/licenses/LICENSE-2.0
  15280. *
  15281. * Unless required by applicable law or agreed to in writing, software
  15282. * distributed under the License is distributed on an "AS IS" BASIS,
  15283. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15284. * See the License for the specific language governing permissions and
  15285. * limitations under the License.
  15286. *
  15287. * bf-knn.js
  15288. * Brute Force KNN Keypoint Matcher
  15289. */
  15290. /** @type {Object<number,string>} program name indexed by descriptor size */
  15291. const PROGRAM_NAME = {
  15292. 32: 'bfMatcher32',
  15293. 64: 'bfMatcher64'
  15294. };
  15295. /**
  15296. * Brute Force KNN Keypoint Matcher. Make sure to use a Keypoint Clipper before
  15297. * invoking this (use a database of 50 keypoints or so - your mileage may vary)
  15298. */
  15299. class SpeedyPipelineNodeBruteForceKNNKeypointMatcher extends SpeedyPipelineNode {
  15300. /**
  15301. * Constructor
  15302. * @param {string} [name] name of the node
  15303. */
  15304. constructor(name = undefined) {
  15305. super(name, 6, [InputPort('keypoints').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('database').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.KeypointMatches)]);
  15306. /** @type {number} number of matches per keypoint (the "k" of knn) */
  15307. this._matchesPerKeypoint = 1;
  15308. }
  15309. /**
  15310. * Number of matches per keypoint
  15311. * @returns {number}
  15312. */
  15313. get k() {
  15314. return this._matchesPerKeypoint;
  15315. }
  15316. /**
  15317. * Number of matches per keypoint
  15318. * @param {number} value
  15319. */
  15320. set k(value) {
  15321. this._matchesPerKeypoint = Math.max(1, value | 0);
  15322. }
  15323. /**
  15324. * Run the specific task of this node
  15325. * @param {SpeedyGPU} gpu
  15326. * @returns {void|SpeedyPromise<void>}
  15327. */
  15328. _run(gpu) {
  15329. const {
  15330. encodedKeypoints,
  15331. descriptorSize,
  15332. extraSize,
  15333. encoderLength
  15334. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('keypoints').read();
  15335. const database = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('database').read();
  15336. const candidatesA = this._tex[0];
  15337. const candidatesB = this._tex[1];
  15338. const candidatesC = this._tex[2];
  15339. const encodedFiltersA = this._tex[3];
  15340. const encodedMatchesA = this._tex[4];
  15341. const encodedMatchesB = this._tex[5];
  15342. const matchesPerKeypoint = this._matchesPerKeypoint;
  15343. const keypoints = gpu.programs.keypoints;
  15344. // validate parameters
  15345. if (descriptorSize !== database.descriptorSize) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible descriptors in ${this.fullName}`);else if (!Object.prototype.hasOwnProperty.call(PROGRAM_NAME, descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Unsupported descriptor size (${descriptorSize}) in ${this.fullName}`);
  15346. // prepare the brute force matching
  15347. const bfMatcher = keypoints[PROGRAM_NAME[descriptorSize]];
  15348. const capacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(descriptorSize, extraSize, encoderLength);
  15349. const dbCapacity = SpeedyPipelineNodeKeypointDetector.encoderCapacity(database.descriptorSize, database.extraSize, database.encoderLength);
  15350. const numberOfKeypointsPerPass = bfMatcher.definedConstant('NUMBER_OF_KEYPOINTS_PER_PASS');
  15351. const numberOfPasses = Math.ceil(dbCapacity / numberOfKeypointsPerPass);
  15352. const partialMatcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity)));
  15353. const matcherLength = Math.max(1, Math.ceil(Math.sqrt(capacity * matchesPerKeypoint)));
  15354. keypoints.bfMatcherTransfer.outputs(matcherLength, matcherLength, encodedMatchesA, encodedMatchesB);
  15355. keypoints.bfMatcherInitCandidates.outputs(partialMatcherLength, partialMatcherLength, candidatesC);
  15356. keypoints.bfMatcherInitFilters.outputs(partialMatcherLength, partialMatcherLength, encodedFiltersA);
  15357. bfMatcher.outputs(partialMatcherLength, partialMatcherLength, candidatesA, candidatesB);
  15358. // match keypoints
  15359. let encodedMatches = encodedMatchesB.clear(); // will hold all best matches
  15360. let encodedFilters = keypoints.bfMatcherInitFilters();
  15361. for (let k = 0; k < matchesPerKeypoint; k++) {
  15362. let encodedPartialMatches = keypoints.bfMatcherInitCandidates(); // hold the (k+1)-th best matches
  15363. // find the (k+1)-th best match
  15364. for (let passId = 0; passId < numberOfPasses; passId++) {
  15365. encodedPartialMatches = bfMatcher(encodedPartialMatches, encodedFilters, partialMatcherLength, database.encodedKeypoints, database.descriptorSize, database.extraSize, database.encoderLength, encodedKeypoints, descriptorSize, extraSize, encoderLength, passId);
  15366. gpu.gl.flush();
  15367. }
  15368. //gpu.gl.flush();
  15369. // copy the (k+1)-th best match to the filter
  15370. if (matchesPerKeypoint > 1) encodedPartialMatches.copyTo(encodedFilters);
  15371. // aggregate matches
  15372. encodedMatches = keypoints.bfMatcherTransfer(encodedMatches, encodedPartialMatches, matchesPerKeypoint, k);
  15373. }
  15374. // done!
  15375. this.output().swrite(encodedMatches, matchesPerKeypoint);
  15376. }
  15377. }
  15378. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/distance-filter.js
  15379. /*
  15380. * speedy-vision.js
  15381. * GPU-accelerated Computer Vision for JavaScript
  15382. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15383. *
  15384. * Licensed under the Apache License, Version 2.0 (the "License");
  15385. * you may not use this file except in compliance with the License.
  15386. * You may obtain a copy of the License at
  15387. *
  15388. * http://www.apache.org/licenses/LICENSE-2.0
  15389. *
  15390. * Unless required by applicable law or agreed to in writing, software
  15391. * distributed under the License is distributed on an "AS IS" BASIS,
  15392. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15393. * See the License for the specific language governing permissions and
  15394. * limitations under the License.
  15395. *
  15396. * distance-filter.js
  15397. * Given a set of pairs of keypoints, discard all pairs whose distance is
  15398. * above a user-defined threshold. Useful for bidirectional optical-flow.
  15399. */
  15400. /**
  15401. * Given a set of pairs of keypoints, discard all pairs whose distance is
  15402. * above a user-defined threshold. Useful for bidirectional optical-flow.
  15403. *
  15404. * The pairs of keypoints are provided as two separate sets, "in" and
  15405. * "reference". Keypoints that are kept will have their data extracted
  15406. * from the "in" set.
  15407. */
  15408. class SpeedyPipelineNodeKeypointDistanceFilter extends SpeedyPipelineNode {
  15409. /**
  15410. * Constructor
  15411. * @param {string} [name] name of the node
  15412. */
  15413. constructor(name = undefined) {
  15414. super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15415. /** @type {number} maximum accepted distance */
  15416. this._threshold = globals.MAX_TEXTURE_LENGTH + 1;
  15417. }
  15418. /**
  15419. * Maximum accepted distance
  15420. * @returns {number}
  15421. */
  15422. get threshold() {
  15423. return this._threshold;
  15424. }
  15425. /**
  15426. * Maximum accepted distance
  15427. * @param {number} value
  15428. */
  15429. set threshold(value) {
  15430. this._threshold = Math.max(0, +value);
  15431. }
  15432. /**
  15433. * Run the specific task of this node
  15434. * @param {SpeedyGPU} gpu
  15435. * @returns {void|SpeedyPromise<void>}
  15436. */
  15437. _run(gpu) {
  15438. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
  15439. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
  15440. const threshold = this._threshold;
  15441. // validate shapes
  15442. if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The distance filter requires two compatible shapes of keypoint streams`);
  15443. // calculate the shape of the output
  15444. const outputTexture = this._tex[0];
  15445. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  15446. const descriptorSize = set0.descriptorSize;
  15447. const extraSize = set0.extraSize;
  15448. // apply the distance filter
  15449. gpu.programs.keypoints.distanceFilter.outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  15450. // done!
  15451. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15452. }
  15453. }
  15454. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/hamming-distance-filter.js
  15455. /*
  15456. * speedy-vision.js
  15457. * GPU-accelerated Computer Vision for JavaScript
  15458. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15459. *
  15460. * Licensed under the Apache License, Version 2.0 (the "License");
  15461. * you may not use this file except in compliance with the License.
  15462. * You may obtain a copy of the License at
  15463. *
  15464. * http://www.apache.org/licenses/LICENSE-2.0
  15465. *
  15466. * Unless required by applicable law or agreed to in writing, software
  15467. * distributed under the License is distributed on an "AS IS" BASIS,
  15468. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15469. * See the License for the specific language governing permissions and
  15470. * limitations under the License.
  15471. *
  15472. * hamming-distance-filter.js
  15473. * Given a set of pairs of keypoints, discard all pairs whose hamming
  15474. * distance (of descriptor) is above a user-defined threshold
  15475. */
  15476. /** @type {Object<number,string>} Program names */
  15477. const hamming_distance_filter_PROGRAM_NAME = {
  15478. 32: 'hammingDistanceFilter32',
  15479. 64: 'hammingDistanceFilter64'
  15480. };
  15481. /**
  15482. * Given a set of pairs of keypoints, discard all pairs whose hamming
  15483. * distance (of descriptor) is above a user-defined threshold
  15484. *
  15485. * The pairs of keypoints are provided as two separate sets, "in" and
  15486. * "reference". Keypoints that are kept will have their data extracted
  15487. * from the "in" set.
  15488. */
  15489. class SpeedyPipelineNodeKeypointHammingDistanceFilter extends SpeedyPipelineNode {
  15490. /**
  15491. * Constructor
  15492. * @param {string} [name] name of the node
  15493. */
  15494. constructor(name = undefined) {
  15495. super(name, 1, [InputPort('in').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), InputPort('reference').expects(SpeedyPipelineMessageType.Keypoints).satisfying(( /** @type {SpeedyPipelineMessageWithKeypoints} */msg) => msg.descriptorSize > 0), OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15496. /** @type {number} distance threshold, an integer */
  15497. this._threshold = globals.MAX_DESCRIPTOR_SIZE * 8; // convert from bytes to bits
  15498. }
  15499. /**
  15500. * Distance threshold, an integer
  15501. * @returns {number}
  15502. */
  15503. get threshold() {
  15504. return this._threshold;
  15505. }
  15506. /**
  15507. * Distance threshold, an integer
  15508. * @param {number} value
  15509. */
  15510. set threshold(value) {
  15511. this._threshold = Math.max(0, value | 0);
  15512. }
  15513. /**
  15514. * Run the specific task of this node
  15515. * @param {SpeedyGPU} gpu
  15516. * @returns {void|SpeedyPromise<void>}
  15517. */
  15518. _run(gpu) {
  15519. const set0 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('in').read();
  15520. const set1 = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input('reference').read();
  15521. const threshold = this._threshold;
  15522. // validate shapes
  15523. if (set0.descriptorSize != set1.descriptorSize || set0.extraSize != set1.extraSize) throw new utils_errors/* IllegalOperationError */.Er(`The Hamming distance filter requires two compatible shapes of keypoint streams`);
  15524. // validate descriptor size
  15525. if (!Object.prototype.hasOwnProperty.call(hamming_distance_filter_PROGRAM_NAME, set0.descriptorSize)) throw new utils_errors/* NotSupportedError */.EM(`Hamming distance filter - invalid descriptor size: ${set0.descriptorSize}`);
  15526. // calculate the shape of the output
  15527. const outputTexture = this._tex[0];
  15528. const encoderLength = Math.max(set0.encoderLength, set1.encoderLength);
  15529. const descriptorSize = set0.descriptorSize;
  15530. const extraSize = set0.extraSize;
  15531. // apply the distance filter
  15532. const program = hamming_distance_filter_PROGRAM_NAME[set0.descriptorSize];
  15533. gpu.programs.keypoints[program].outputs(encoderLength, encoderLength, outputTexture)(set0.encodedKeypoints, set0.encoderLength, set1.encodedKeypoints, set1.encoderLength, descriptorSize, extraSize, encoderLength, threshold);
  15534. // done!
  15535. this.output().swrite(outputTexture, descriptorSize, extraSize, encoderLength);
  15536. }
  15537. }
  15538. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/keypoints/portal.js
  15539. /*
  15540. * speedy-vision.js
  15541. * GPU-accelerated Computer Vision for JavaScript
  15542. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15543. *
  15544. * Licensed under the Apache License, Version 2.0 (the "License");
  15545. * you may not use this file except in compliance with the License.
  15546. * You may obtain a copy of the License at
  15547. *
  15548. * http://www.apache.org/licenses/LICENSE-2.0
  15549. *
  15550. * Unless required by applicable law or agreed to in writing, software
  15551. * distributed under the License is distributed on an "AS IS" BASIS,
  15552. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15553. * See the License for the specific language governing permissions and
  15554. * limitations under the License.
  15555. *
  15556. * portal.js
  15557. * Keypoint Portals
  15558. */
  15559. /**
  15560. * A sink of a Keypoint Portal
  15561. * This is not a pipeline sink - it doesn't export any data!
  15562. */
  15563. class SpeedyPipelineNodeKeypointPortalSink extends SpeedyPipelineNode {
  15564. /**
  15565. * Constructor
  15566. * @param {string} [name] name of the node
  15567. */
  15568. constructor(name = undefined) {
  15569. super(name, 1, [InputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15570. /** @type {number} descriptor size, in bytes */
  15571. this._descriptorSize = 0;
  15572. /** @type {number} extra size, in bytes */
  15573. this._extraSize = 0;
  15574. /** @type {number} extra size */
  15575. this._encoderLength = 0;
  15576. /** @type {boolean} is this node initialized? */
  15577. this._initialized = false;
  15578. }
  15579. /**
  15580. * Encoded keypoints
  15581. * @returns {SpeedyTexture}
  15582. */
  15583. get encodedKeypoints() {
  15584. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15585. return this._tex[0];
  15586. }
  15587. /**
  15588. * Descriptor size, in bytes
  15589. * @returns {number}
  15590. */
  15591. get descriptorSize() {
  15592. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15593. return this._descriptorSize;
  15594. }
  15595. /**
  15596. * Extra size, in bytes
  15597. * @returns {number}
  15598. */
  15599. get extraSize() {
  15600. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15601. return this._extraSize;
  15602. }
  15603. /**
  15604. * Encoder length
  15605. * @returns {number}
  15606. */
  15607. get encoderLength() {
  15608. if (!this._initialized) throw new utils_errors/* IllegalOperationError */.Er(`Portal error: ${this.fullName} holds no data`);
  15609. return this._encoderLength;
  15610. }
  15611. /**
  15612. * Initializes this node
  15613. * @param {SpeedyGPU} gpu
  15614. */
  15615. init(gpu) {
  15616. super.init(gpu);
  15617. const encoderLength = SpeedyPipelineNodeKeypointDetector.encoderLength(0, 0, 0);
  15618. this._tex[0].resize(encoderLength, encoderLength).clearToColor(1, 1, 1, 1); // initial texture
  15619. this._descriptorSize = this._extraSize = 0;
  15620. this._encoderLength = encoderLength;
  15621. this._initialized = true;
  15622. }
  15623. /**
  15624. * Releases this node
  15625. * @param {SpeedyGPU} gpu
  15626. */
  15627. release(gpu) {
  15628. this._initialized = false;
  15629. super.release(gpu);
  15630. }
  15631. /**
  15632. * Run the specific task of this node
  15633. * @param {SpeedyGPU} gpu
  15634. * @returns {void|SpeedyPromise<void>}
  15635. */
  15636. _run(gpu) {
  15637. const {
  15638. encodedKeypoints,
  15639. descriptorSize,
  15640. extraSize,
  15641. encoderLength
  15642. } = /** @type {SpeedyPipelineMessageWithKeypoints} */this.input().read();
  15643. const tex = this._tex[0];
  15644. // copy input
  15645. tex.resize(encodedKeypoints.width, encodedKeypoints.height);
  15646. encodedKeypoints.copyTo(tex);
  15647. this._descriptorSize = descriptorSize;
  15648. this._extraSize = extraSize;
  15649. this._encoderLength = encoderLength;
  15650. }
  15651. }
  15652. /**
  15653. * A source of a Keypoint Portal
  15654. */
  15655. class SpeedyPipelineNodeKeypointPortalSource extends SpeedyPipelineSourceNode {
  15656. /**
  15657. * Constructor
  15658. * @param {string} [name] name of the node
  15659. */
  15660. constructor(name = undefined) {
  15661. super(name, 0, [OutputPort().expects(SpeedyPipelineMessageType.Keypoints)]);
  15662. /** @type {SpeedyPipelineNodeKeypointPortalSink|null} portal sink */
  15663. this._source = null;
  15664. }
  15665. /**
  15666. * Data source
  15667. * @returns {SpeedyPipelineNodeKeypointPortalSink|null}
  15668. */
  15669. get source() {
  15670. return this._source;
  15671. }
  15672. /**
  15673. * Data source
  15674. * @param {SpeedyPipelineNodeKeypointPortalSink|null} node
  15675. */
  15676. set source(node) {
  15677. if (node !== null && !(node instanceof SpeedyPipelineNodeKeypointPortalSink)) throw new utils_errors/* IllegalArgumentError */.qw(`Incompatible source for ${this.fullName}`);
  15678. this._source = node;
  15679. }
  15680. /**
  15681. * Run the specific task of this node
  15682. * @param {SpeedyGPU} gpu
  15683. * @returns {void|SpeedyPromise<void>}
  15684. */
  15685. _run(gpu) {
  15686. if (this._source == null) throw new utils_errors/* IllegalOperationError */.Er(`${this.fullName} has no source`);
  15687. this.output().swrite(this._source.encodedKeypoints, this._source.descriptorSize, this._source.extraSize, this._source.encoderLength);
  15688. }
  15689. }
  15690. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/keypoint-factory.js
  15691. /*
  15692. * speedy-vision.js
  15693. * GPU-accelerated Computer Vision for JavaScript
  15694. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15695. *
  15696. * Licensed under the Apache License, Version 2.0 (the "License");
  15697. * you may not use this file except in compliance with the License.
  15698. * You may obtain a copy of the License at
  15699. *
  15700. * http://www.apache.org/licenses/LICENSE-2.0
  15701. *
  15702. * Unless required by applicable law or agreed to in writing, software
  15703. * distributed under the License is distributed on an "AS IS" BASIS,
  15704. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15705. * See the License for the specific language governing permissions and
  15706. * limitations under the License.
  15707. *
  15708. * keypoint-factory.js
  15709. * Keypoint-related nodes
  15710. */
  15711. /**
  15712. * Keypoint detectors
  15713. */
  15714. class SpeedyPipelineKeypointDetectorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15715. /**
  15716. * FAST corner detector
  15717. * @param {string} [name]
  15718. * @returns {SpeedyPipelineNodeFASTKeypointDetector}
  15719. */
  15720. static FAST(name = undefined) {
  15721. return new SpeedyPipelineNodeFASTKeypointDetector(name);
  15722. }
  15723. /**
  15724. * Harris corner detector
  15725. * @param {string} [name]
  15726. * @returns {SpeedyPipelineNodeHarrisKeypointDetector}
  15727. */
  15728. static Harris(name = undefined) {
  15729. return new SpeedyPipelineNodeHarrisKeypointDetector(name);
  15730. }
  15731. }
  15732. /**
  15733. * Keypoint descriptors
  15734. */
  15735. class SpeedyPipelineKeypointDescriptorFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15736. /**
  15737. * ORB descriptors
  15738. * @param {string} [name]
  15739. * @returns {SpeedyPipelineNodeORBKeypointDescriptor}
  15740. */
  15741. static ORB(name = undefined) {
  15742. return new SpeedyPipelineNodeORBKeypointDescriptor(name);
  15743. }
  15744. }
  15745. /**
  15746. * Keypoint trackers
  15747. */
  15748. class SpeedyPipelineKeypointTrackerFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15749. /**
  15750. * LK optical-flow
  15751. * @param {string} [name]
  15752. * @returns {SpeedyPipelineNodeLKKeypointTracker}
  15753. */
  15754. static LK(name = undefined) {
  15755. return new SpeedyPipelineNodeLKKeypointTracker(name);
  15756. }
  15757. }
  15758. /**
  15759. * Keypoint matchers
  15760. */
  15761. class SpeedyPipelineKeypointMatcherFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15762. /**
  15763. * Static LSH tables
  15764. * @param {string} [name]
  15765. * @returns {SpeedyPipelineNodeStaticLSHTables}
  15766. */
  15767. static StaticLSHTables(name = undefined) {
  15768. return new SpeedyPipelineNodeStaticLSHTables(name);
  15769. }
  15770. /**
  15771. * LSH-based K-approximate nearest neighbors
  15772. * @param {string} [name]
  15773. * @returns {SpeedyPipelineNodeLSHKNNKeypointMatcher}
  15774. */
  15775. static LSHKNN(name = undefined) {
  15776. return new SpeedyPipelineNodeLSHKNNKeypointMatcher(name);
  15777. }
  15778. /**
  15779. * Brute-force K-nearest neighbors keypoint matcher
  15780. * @param {string} [name]
  15781. * @returns {SpeedyPipelineNodeBruteForceKNNKeypointMatcher}
  15782. */
  15783. static BFKNN(name = undefined) {
  15784. return new SpeedyPipelineNodeBruteForceKNNKeypointMatcher(name);
  15785. }
  15786. }
  15787. /**
  15788. * Portal nodes
  15789. */
  15790. class SpeedyPipelineKeypointPortalFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15791. /**
  15792. * Create an image portal source
  15793. * @param {string} [name] name of the node
  15794. * @returns {SpeedyPipelineNodeKeypointPortalSource}
  15795. */
  15796. static Source(name = undefined) {
  15797. return new SpeedyPipelineNodeKeypointPortalSource(name);
  15798. }
  15799. /**
  15800. * Create an image portal sink
  15801. * @param {string} [name] name of the node
  15802. * @returns {SpeedyPipelineNodeKeypointPortalSink}
  15803. */
  15804. static Sink(name = undefined) {
  15805. return new SpeedyPipelineNodeKeypointPortalSink(name);
  15806. }
  15807. }
  15808. /**
  15809. * Keypoint-related nodes
  15810. */
  15811. class SpeedyPipelineKeypointFactory extends speedy_namespace/* SpeedyNamespace */.Q {
  15812. /**
  15813. * Keypoint detectors
  15814. * @returns {typeof SpeedyPipelineKeypointDetectorFactory}
  15815. */
  15816. static get Detector() {
  15817. return SpeedyPipelineKeypointDetectorFactory;
  15818. }
  15819. /**
  15820. * Keypoint descriptors
  15821. * @returns {typeof SpeedyPipelineKeypointDescriptorFactory}
  15822. */
  15823. static get Descriptor() {
  15824. return SpeedyPipelineKeypointDescriptorFactory;
  15825. }
  15826. /**
  15827. * Keypoint trackers
  15828. * @returns {typeof SpeedyPipelineKeypointTrackerFactory}
  15829. */
  15830. static get Tracker() {
  15831. return SpeedyPipelineKeypointTrackerFactory;
  15832. }
  15833. /**
  15834. * Keypoint matchers
  15835. * @returns {typeof SpeedyPipelineKeypointMatcherFactory}
  15836. */
  15837. static get Matcher() {
  15838. return SpeedyPipelineKeypointMatcherFactory;
  15839. }
  15840. /**
  15841. * Keypoint Portals
  15842. * @returns {typeof SpeedyPipelineKeypointPortalFactory}
  15843. */
  15844. static get Portal() {
  15845. return SpeedyPipelineKeypointPortalFactory;
  15846. }
  15847. /**
  15848. * Create a keypoint source
  15849. * @param {string} [name]
  15850. * @returns {SpeedyPipelineNodeKeypointSource}
  15851. */
  15852. static Source(name = undefined) {
  15853. return new SpeedyPipelineNodeKeypointSource(name);
  15854. }
  15855. /**
  15856. * Create a keypoint sink
  15857. * @param {string} [name]
  15858. * @returns {SpeedyPipelineNodeKeypointSink}
  15859. */
  15860. static Sink(name = undefined) {
  15861. return new SpeedyPipelineNodeKeypointSink(name);
  15862. }
  15863. /**
  15864. * Create a sink of tracked keypoints
  15865. * @param {string} [name]
  15866. * @returns {SpeedyPipelineNodeTrackedKeypointSink}
  15867. */
  15868. static SinkOfTrackedKeypoints(name = undefined) {
  15869. return new SpeedyPipelineNodeTrackedKeypointSink(name);
  15870. }
  15871. /**
  15872. * Create a sink of matched keypoints
  15873. * @param {string} [name]
  15874. * @returns {SpeedyPipelineNodeMatchedKeypointSink}
  15875. */
  15876. static SinkOfMatchedKeypoints(name = undefined) {
  15877. return new SpeedyPipelineNodeMatchedKeypointSink(name);
  15878. }
  15879. /**
  15880. * Keypoint clipper
  15881. * @param {string} [name]
  15882. * @returns {SpeedyPipelineNodeKeypointClipper}
  15883. */
  15884. static Clipper(name = undefined) {
  15885. return new SpeedyPipelineNodeKeypointClipper(name);
  15886. }
  15887. /**
  15888. * Border Clipper
  15889. * @param {string} [name]
  15890. * @returns {SpeedyPipelineNodeKeypointBorderClipper}
  15891. */
  15892. static BorderClipper(name = undefined) {
  15893. return new SpeedyPipelineNodeKeypointBorderClipper(name);
  15894. }
  15895. /**
  15896. * Create a keypoint buffer
  15897. * @param {string} [name]
  15898. * @returns {SpeedyPipelineNodeKeypointBuffer}
  15899. */
  15900. static Buffer(name = undefined) {
  15901. return new SpeedyPipelineNodeKeypointBuffer(name);
  15902. }
  15903. /**
  15904. * Create a keypoint mixer
  15905. * @param {string} [name]
  15906. * @returns {SpeedyPipelineNodeKeypointMixer}
  15907. */
  15908. static Mixer(name = undefined) {
  15909. return new SpeedyPipelineNodeKeypointMixer(name);
  15910. }
  15911. /**
  15912. * Create a keypoint shuffler
  15913. * @param {string} [name]
  15914. * @returns {SpeedyPipelineNodeKeypointShuffler}
  15915. */
  15916. static Shuffler(name = undefined) {
  15917. return new SpeedyPipelineNodeKeypointShuffler(name);
  15918. }
  15919. /**
  15920. * Create a keypoint multiplexer
  15921. * @param {string} [name]
  15922. * @returns {SpeedyPipelineNodeKeypointMultiplexer}
  15923. */
  15924. static Multiplexer(name = undefined) {
  15925. return new SpeedyPipelineNodeKeypointMultiplexer(name);
  15926. }
  15927. /**
  15928. * Create a keypoint transformer
  15929. * @param {string} [name]
  15930. * @returns {SpeedyPipelineNodeKeypointTransformer}
  15931. */
  15932. static Transformer(name = undefined) {
  15933. return new SpeedyPipelineNodeKeypointTransformer(name);
  15934. }
  15935. /**
  15936. * Create a subpixel refiner of keypoint locations
  15937. * @param {string} [name]
  15938. * @returns {SpeedyPipelineNodeKeypointSubpixelRefiner}
  15939. */
  15940. static SubpixelRefiner(name = undefined) {
  15941. return new SpeedyPipelineNodeKeypointSubpixelRefiner(name);
  15942. }
  15943. /**
  15944. * Distance filter
  15945. * @param {string} [name]
  15946. * @returns {SpeedyPipelineNodeDistanceFilter}
  15947. */
  15948. static DistanceFilter(name = undefined) {
  15949. return new SpeedyPipelineNodeKeypointDistanceFilter(name);
  15950. }
  15951. /**
  15952. * Hamming distance filter
  15953. * @param {string} [name]
  15954. * @returns {SpeedyPipelineNodeHammingDistanceFilter}
  15955. */
  15956. static HammingDistanceFilter(name = undefined) {
  15957. return new SpeedyPipelineNodeKeypointHammingDistanceFilter(name);
  15958. }
  15959. }
  15960. ;// CONCATENATED MODULE: ./src/core/pipeline/nodes/vector2/sink.js
  15961. /*
  15962. * speedy-vision.js
  15963. * GPU-accelerated Computer Vision for JavaScript
  15964. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  15965. *
  15966. * Licensed under the Apache License, Version 2.0 (the "License");
  15967. * you may not use this file except in compliance with the License.
  15968. * You may obtain a copy of the License at
  15969. *
  15970. * http://www.apache.org/licenses/LICENSE-2.0
  15971. *
  15972. * Unless required by applicable law or agreed to in writing, software
  15973. * distributed under the License is distributed on an "AS IS" BASIS,
  15974. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15975. * See the License for the specific language governing permissions and
  15976. * limitations under the License.
  15977. *
  15978. * sink.js
  15979. * Gets keypoints out of the pipeline
  15980. */
  15981. // next power of 2
  15982. const vector2_sink_nextPot = x => x > 1 ? 1 << Math.ceil(Math.log2(x)) : 1;
  15983. /**
  15984. * Gets 2D vectors out of the pipeline
  15985. */
  15986. class SpeedyPipelineNodeVector2Sink extends SpeedyPipelineSinkNode {
  15987. /**
  15988. * Constructor
  15989. * @param {string} [name] name of the node
  15990. */
  15991. constructor(name = 'vec2') {
  15992. super(name, 2, [InputPort().expects(SpeedyPipelineMessageType.Vector2)]);
  15993. /** @type {SpeedyVector2[]} 2D vectors (output) */
  15994. this._vectors = [];
  15995. /** @type {SpeedyTextureReader} texture reader */
  15996. this._textureReader = new SpeedyTextureReader();
  15997. /** @type {number} page flipping index */
  15998. this._page = 0;
  15999. /** @type {boolean} accelerate GPU-CPU transfers */
  16000. this._turbo = false;
  16001. }
  16002. /**
  16003. * Accelerate GPU-CPU transfers
  16004. * @returns {boolean}
  16005. */
  16006. get turbo() {
  16007. return this._turbo;
  16008. }
  16009. /**
  16010. * Accelerate GPU-CPU transfers
  16011. * @param {boolean} value
  16012. */
  16013. set turbo(value) {
  16014. this._turbo = Boolean(value);
  16015. }
  16016. /**
  16017. * Initializes this node
  16018. * @param {SpeedyGPU} gpu
  16019. */
  16020. init(gpu) {
  16021. super.init(gpu);
  16022. this._textureReader.init(gpu);
  16023. }
  16024. /**
  16025. * Releases this node
  16026. * @param {SpeedyGPU} gpu
  16027. */
  16028. release(gpu) {
  16029. this._textureReader.release(gpu);
  16030. super.release(gpu);
  16031. }
  16032. /**
  16033. * Export data from this node to the user
  16034. * @returns {SpeedyPromise<SpeedyVector2[]>}
  16035. */
  16036. export() {
  16037. return speedy_promise/* SpeedyPromise */.i.resolve(this._vectors);
  16038. }
  16039. /**
  16040. * Run the specific task of this node
  16041. * @param {SpeedyGPU} gpu
  16042. * @returns {void|SpeedyPromise<void>}
  16043. */
  16044. _run(gpu) {
  16045. const {
  16046. vectors
  16047. } = /** @type {SpeedyPipelineMessageWith2DVectors} */this.input().read();
  16048. const useBufferedDownloads = this._turbo;
  16049. const encoderLength = vectors.width;
  16050. /*
  16051. I have found experimentally that, in Firefox, readPixelsAsync()
  16052. performs MUCH better if the width of the target texture is a power
  16053. of two. I have no idea why this is the case, nor if it's related to
  16054. some interaction with the GL drivers, somehow. This seems to make no
  16055. difference on Chrome, however. In any case, let's convert the input
  16056. texture to POT.
  16057. */
  16058. const encoderWidth = vector2_sink_nextPot(encoderLength);
  16059. const encoderHeight = vector2_sink_nextPot(Math.ceil(encoderLength * encoderLength / encoderWidth));
  16060. //const encoderHeight = (Math.ceil(encoderLength * encoderLength / encoderWidth));
  16061. // copy the set of vectors to an internal texture
  16062. const copiedTexture = this._tex[this._page];
  16063. gpu.programs.utils.copy2DVectors.outputs(encoderWidth, encoderHeight, copiedTexture)(vectors);
  16064. // flip page
  16065. this._page = 1 - this._page;
  16066. // download the internal texture
  16067. return this._textureReader.readPixelsAsync(copiedTexture, 0, 0, copiedTexture.width, copiedTexture.height, useBufferedDownloads).then(pixels => {
  16068. this._vectors = SpeedyPipelineNodeVector2Sink._decode(pixels, encoderWidth, encoderHeight);
  16069. });
  16070. }
  16071. /**
  16072. * Decode a sequence of vectors, given a flattened image of encoded pixels
  16073. * @param {Uint8Array} pixels pixels in the [r,g,b,a,...] format
  16074. * @param {number} encoderWidth
  16075. * @param {number} encoderHeight
  16076. * @returns {SpeedyVector2[]} vectors
  16077. */
  16078. static _decode(pixels, encoderWidth, encoderHeight) {
  16079. const bytesPerVector = 4; // 1 pixel per vector
  16080. const vectors = [];
  16081. let hi = 0,
  16082. lo = 0;
  16083. let x = 0,
  16084. y = 0;
  16085. // how many bytes should we read?
  16086. const e2 = encoderWidth * encoderHeight * bytesPerVector;
  16087. const size = Math.min(pixels.length, e2);
  16088. // for each encoded vector
  16089. for (let i = 0; i < size; i += bytesPerVector) {
  16090. // extract 16-bit words
  16091. lo = pixels[i + 1] << 8 | pixels[i];
  16092. hi = pixels[i + 3] << 8 | pixels[i + 2];
  16093. // the vector is "null": we have reached the end of the list
  16094. if (lo == 0xFFFF && hi == 0xFFFF) break;
  16095. // the vector must be discarded
  16096. if (lo == 0xFF00 && hi == 0xFF00) continue;
  16097. // decode floats
  16098. x = utils/* Utils */.A.decodeFloat16(lo);
  16099. y = utils/* Utils */.A.decodeFloat16(hi);
  16100. // register vector
  16101. vectors.push(new SpeedyVector2(x, y));
  16102. }
  16103. // done!
  16104. return vectors;
  16105. }
  16106. }
  16107. ;// CONCATENATED MODULE: ./src/core/pipeline/factories/vector2-factory.js
  16108. /*
  16109. * speedy-vision.js
  16110. * GPU-accelerated Computer Vision for JavaScript
  16111. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16112. *
  16113. * Licensed under the Apache License, Version 2.0 (the "License");
  16114. * you may not use this file except in compliance with the License.
  16115. * You may obtain a copy of the License at
  16116. *
  16117. * http://www.apache.org/licenses/LICENSE-2.0
  16118. *
  16119. * Unless required by applicable law or agreed to in writing, software
  16120. * distributed under the License is distributed on an "AS IS" BASIS,
  16121. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16122. * See the License for the specific language governing permissions and
  16123. * limitations under the License.
  16124. *
  16125. * vector2-factory.js
  16126. * 2D vectors
  16127. */
  16128. /**
  16129. * 2D vectors
  16130. */
  16131. class SpeedyPipelineVector2Factory extends Function {
  16132. /**
  16133. * Constructor
  16134. */
  16135. constructor() {
  16136. // This factory can be invoked as a function
  16137. super('...args', 'return this._create(...args)');
  16138. return this.bind(this);
  16139. }
  16140. /**
  16141. * @private
  16142. *
  16143. * Create a 2D vector
  16144. * @param {number} x x-coordinate
  16145. * @param {number} y y-coordinate
  16146. * @returns {SpeedyVector2}
  16147. */
  16148. _create(x, y) {
  16149. return new SpeedyVector2(x, y);
  16150. }
  16151. /**
  16152. * Create a Vector2 sink
  16153. * @param {string} [name]
  16154. * @returns {SpeedyPipelineNodeVector2Sink}
  16155. */
  16156. Sink(name = undefined) {
  16157. return new SpeedyPipelineNodeVector2Sink(name);
  16158. }
  16159. }
  16160. ;// CONCATENATED MODULE: ./src/utils/fps-counter.js
  16161. /*
  16162. * speedy-vision.js
  16163. * GPU-accelerated Computer Vision for JavaScript
  16164. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16165. *
  16166. * Licensed under the Apache License, Version 2.0 (the "License");
  16167. * you may not use this file except in compliance with the License.
  16168. * You may obtain a copy of the License at
  16169. *
  16170. * http://www.apache.org/licenses/LICENSE-2.0
  16171. *
  16172. * Unless required by applicable law or agreed to in writing, software
  16173. * distributed under the License is distributed on an "AS IS" BASIS,
  16174. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16175. * See the License for the specific language governing permissions and
  16176. * limitations under the License.
  16177. *
  16178. * fps-counter.js
  16179. * A FPS counter
  16180. */
  16181. /** @const {number} update interval in milliseconds */
  16182. const UPDATE_INTERVAL = 500;
  16183. /** @type {FPSCounter|null} Singleton */
  16184. let instance = null;
  16185. /**
  16186. * FPS counter
  16187. */
  16188. class FPSCounter {
  16189. /**
  16190. * Creates a new FPSCounter
  16191. * @private
  16192. */
  16193. constructor() {
  16194. /** @type {number} current FPS rate */
  16195. this._fps = 60;
  16196. /** @type {number} frame counter */
  16197. this._frames = 0;
  16198. /** @type {number} update interval in milliseconds */
  16199. this._updateInterval = UPDATE_INTERVAL;
  16200. /** @type {number} time of the last update */
  16201. this._lastUpdate = performance.now();
  16202. /** @type {function(): void} bound update function */
  16203. this._boundUpdate = this._update.bind(this);
  16204. // this should never happen...
  16205. if (instance !== null) throw new utils_errors/* IllegalOperationError */.Er(`Can't have multiple instances of FPSCounter`);
  16206. // start FPS counter
  16207. this._boundUpdate();
  16208. }
  16209. /**
  16210. * Gets an instance of the FPS counter.
  16211. * We use lazy loading, i.e., we will not
  16212. * create a FPS counter unless we need to!
  16213. * @returns {FPSCounter}
  16214. */
  16215. static get instance() {
  16216. if (instance === null) instance = new FPSCounter();
  16217. return instance;
  16218. }
  16219. /**
  16220. * Get the FPS rate
  16221. * @returns {number} frames per second
  16222. */
  16223. get fps() {
  16224. return this._fps;
  16225. }
  16226. /**
  16227. * Updates the FPS counter
  16228. */
  16229. _update() {
  16230. const now = performance.now();
  16231. const deltaTime = now - this._lastUpdate;
  16232. if (deltaTime >= this._updateInterval) {
  16233. this._fps = Math.round(this._frames / (deltaTime * 0.001));
  16234. this._frames = 0;
  16235. this._lastUpdate = now;
  16236. }
  16237. this._frames++;
  16238. requestAnimationFrame(this._boundUpdate);
  16239. }
  16240. }
  16241. ;// CONCATENATED MODULE: ./src/main.js
  16242. /*
  16243. * speedy-vision.js
  16244. * GPU-accelerated Computer Vision for JavaScript
  16245. * Copyright 2020-2024 Alexandre Martins <alemartf(at)gmail.com>
  16246. *
  16247. * Licensed under the Apache License, Version 2.0 (the "License");
  16248. * you may not use this file except in compliance with the License.
  16249. * You may obtain a copy of the License at
  16250. *
  16251. * http://www.apache.org/licenses/LICENSE-2.0
  16252. *
  16253. * Unless required by applicable law or agreed to in writing, software
  16254. * distributed under the License is distributed on an "AS IS" BASIS,
  16255. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16256. * See the License for the specific language governing permissions and
  16257. * limitations under the License.
  16258. *
  16259. * main.js
  16260. * The entry point of the library
  16261. */
  16262. /* eslint-disable no-undef */
  16263. /** @typedef {import('./core/speedy-matrix').SpeedyMatrix} SpeedyMatrix */
  16264. /** @typedef {import('./core/speedy-matrix-expr').SpeedyMatrixExpr} SpeedyMatrixExpr */
  16265. /** @typedef {import('./core/speedy-media').SpeedyMediaOptions} SpeedyMediaOptions */
  16266. /** @typedef {import('./core/speedy-media-source').SpeedyMediaSourceNativeElement} SpeedyMediaSourceNativeElement */
  16267. // Constants
  16268. /** @type {SpeedyMatrixFactory} */
  16269. const matrixFactory = new SpeedyMatrixFactory();
  16270. /** @type {SpeedyPipelineVector2Factory} */
  16271. const vector2Factory = new SpeedyPipelineVector2Factory();
  16272. /**
  16273. * GPU-accelerated Computer Vision for JavaScript
  16274. */
  16275. class Speedy {
  16276. /**
  16277. * The version of the library
  16278. * @returns {string}
  16279. */
  16280. static get version() {
  16281. if (false) {}else return "0.9.1";
  16282. }
  16283. /**
  16284. * Checks if Speedy can be executed in this machine & browser
  16285. * @returns {boolean}
  16286. */
  16287. static isSupported() {
  16288. return typeof WebAssembly !== 'undefined' && typeof WebGL2RenderingContext !== 'undefined' && speedy_gl/* SpeedyGL */.c.instance.gl != null;
  16289. }
  16290. /**
  16291. * Global settings
  16292. * @returns {typeof Settings}
  16293. */
  16294. static get Settings() {
  16295. return settings/* Settings */.w;
  16296. }
  16297. /**
  16298. * Create a 2D vector
  16299. * @returns {SpeedyPipelineVector2Factory & ((x: number, y: number) => SpeedyVector2)}
  16300. */
  16301. static get Vector2() {
  16302. return vector2Factory;
  16303. }
  16304. /**
  16305. * Create a 2D point
  16306. * @param {number} x
  16307. * @param {number} y
  16308. * @returns {SpeedyPoint2}
  16309. */
  16310. static Point2(x, y) {
  16311. return new SpeedyPoint2(x, y);
  16312. }
  16313. /**
  16314. * Create a new size object
  16315. * @param {number} width
  16316. * @param {number} height
  16317. * @returns {SpeedySize}
  16318. */
  16319. static Size(width, height) {
  16320. return new SpeedySize(width, height);
  16321. }
  16322. /**
  16323. * Create a Matrix (entries are given in column-major format)
  16324. * @returns {SpeedyMatrixFactory & ((rows: number, columns: number, entries: number[]) => SpeedyMatrix) & ((expr: SpeedyMatrixExpr) => SpeedyMatrix)}
  16325. */
  16326. static get Matrix() {
  16327. return matrixFactory;
  16328. }
  16329. /**
  16330. * Speedy Promises
  16331. * @returns {typeof SpeedyPromise}
  16332. */
  16333. static get Promise() {
  16334. return speedy_promise/* SpeedyPromise */.i;
  16335. }
  16336. /**
  16337. * Create a new Pipeline
  16338. * @returns {SpeedyPipeline}
  16339. */
  16340. static Pipeline() {
  16341. return new SpeedyPipeline();
  16342. }
  16343. /**
  16344. * Image-related nodes
  16345. * @returns {typeof SpeedyPipelineImageFactory}
  16346. */
  16347. static get Image() {
  16348. return SpeedyPipelineImageFactory;
  16349. }
  16350. /**
  16351. * Image filters
  16352. * @returns {typeof SpeedyPipelineFilterFactory}
  16353. */
  16354. static get Filter() {
  16355. return SpeedyPipelineFilterFactory;
  16356. }
  16357. /**
  16358. * Image transforms
  16359. * @returns {typeof SpeedyPipelineTransformFactory}
  16360. */
  16361. static get Transform() {
  16362. return SpeedyPipelineTransformFactory;
  16363. }
  16364. /**
  16365. * Keypoint-related nodes
  16366. * @returns {typeof SpeedyPipelineKeypointFactory}
  16367. */
  16368. static get Keypoint() {
  16369. return SpeedyPipelineKeypointFactory;
  16370. }
  16371. /**
  16372. * Loads a SpeedyMedia object based on the provided source element
  16373. * @param {SpeedyMediaSourceNativeElement} sourceElement The source media
  16374. * @param {SpeedyMediaOptions} [options] Additional options for advanced configuration
  16375. * @returns {SpeedyPromise<SpeedyMedia>}
  16376. */
  16377. static load(sourceElement, options = {}) {
  16378. return SpeedyMedia.load(sourceElement, options);
  16379. }
  16380. /**
  16381. * Loads a camera stream
  16382. * @param {number | MediaStreamConstraints} [widthOrConstraints] width of the stream or contraints object
  16383. * @param {number} [height] height of the stream
  16384. * @returns {SpeedyPromise<SpeedyMedia>}
  16385. */
  16386. static camera(widthOrConstraints = 640, height = 360) {
  16387. const constraints = typeof widthOrConstraints === 'object' ? widthOrConstraints : {
  16388. audio: false,
  16389. video: {
  16390. width: widthOrConstraints | 0,
  16391. height: height | 0
  16392. }
  16393. };
  16394. return utils/* Utils */.A.requestCameraStream(constraints).then(video => SpeedyMedia.load(video));
  16395. }
  16396. /**
  16397. * Utilities to query information about the graphics driver
  16398. * @returns {typeof SpeedyPlatform}
  16399. */
  16400. static get Platform() {
  16401. return SpeedyPlatform;
  16402. }
  16403. /**
  16404. * The FPS rate
  16405. * @returns {number} Frames per second (FPS)
  16406. */
  16407. static get fps() {
  16408. return FPSCounter.instance.fps;
  16409. }
  16410. }
  16411. // Freeze the namespace
  16412. Object.freeze(Speedy);
  16413. // Display a notice
  16414. utils/* Utils */.A.log(`Speedy Vision version ${Speedy.version}. ` + `GPU-accelerated Computer Vision for JavaScript by Alexandre Martins. ` + "https://github.com/alemart/speedy-vision");
  16415. // Big-endian machine? Currently untested.
  16416. if (!globals.LITTLE_ENDIAN) utils/* Utils */.A.warning('Running on a big-endian machine');
  16417. })();
  16418. __nested_webpack_exports__ = __nested_webpack_exports__["default"];
  16419. /******/ return __nested_webpack_exports__;
  16420. /******/ })()
  16421. ;
  16422. });
  16423. /***/ })
  16424. /******/ });
  16425. /************************************************************************/
  16426. /******/ // The module cache
  16427. /******/ var __webpack_module_cache__ = {};
  16428. /******/
  16429. /******/ // The require function
  16430. /******/ function __webpack_require__(moduleId) {
  16431. /******/ // Check if module is in cache
  16432. /******/ var cachedModule = __webpack_module_cache__[moduleId];
  16433. /******/ if (cachedModule !== undefined) {
  16434. /******/ return cachedModule.exports;
  16435. /******/ }
  16436. /******/ // Create a new module (and put it into the cache)
  16437. /******/ var module = __webpack_module_cache__[moduleId] = {
  16438. /******/ // no module.id needed
  16439. /******/ // no module.loaded needed
  16440. /******/ exports: {}
  16441. /******/ };
  16442. /******/
  16443. /******/ // Execute the module function
  16444. /******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
  16445. /******/
  16446. /******/ // Return the exports of the module
  16447. /******/ return module.exports;
  16448. /******/ }
  16449. /******/
  16450. /************************************************************************/
  16451. /******/ /* webpack/runtime/compat get default export */
  16452. /******/ (() => {
  16453. /******/ // getDefaultExport function for compatibility with non-harmony modules
  16454. /******/ __webpack_require__.n = (module) => {
  16455. /******/ var getter = module && module.__esModule ?
  16456. /******/ () => (module['default']) :
  16457. /******/ () => (module);
  16458. /******/ __webpack_require__.d(getter, { a: getter });
  16459. /******/ return getter;
  16460. /******/ };
  16461. /******/ })();
  16462. /******/
  16463. /******/ /* webpack/runtime/define property getters */
  16464. /******/ (() => {
  16465. /******/ // define getter functions for harmony exports
  16466. /******/ __webpack_require__.d = (exports, definition) => {
  16467. /******/ for(var key in definition) {
  16468. /******/ if(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {
  16469. /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
  16470. /******/ }
  16471. /******/ }
  16472. /******/ };
  16473. /******/ })();
  16474. /******/
  16475. /******/ /* webpack/runtime/hasOwnProperty shorthand */
  16476. /******/ (() => {
  16477. /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
  16478. /******/ })();
  16479. /******/
  16480. /************************************************************************/
  16481. var __webpack_exports__ = {};
  16482. // This entry need to be wrapped in an IIFE because it need to be in strict mode.
  16483. (() => {
  16484. "use strict";
  16485. // EXPORTS
  16486. __webpack_require__.d(__webpack_exports__, {
  16487. "default": () => (/* binding */ AR)
  16488. });
  16489. // EXTERNAL MODULE: ./node_modules/speedy-vision/dist/speedy-vision.js
  16490. var speedy_vision = __webpack_require__(774);
  16491. var speedy_vision_default = /*#__PURE__*/__webpack_require__.n(speedy_vision);
  16492. ;// CONCATENATED MODULE: ./src/utils/errors.ts
  16493. /*
  16494. * encantar.js
  16495. * GPU-accelerated Augmented Reality for the web
  16496. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16497. *
  16498. * This program is free software: you can redistribute it and/or modify
  16499. * it under the terms of the GNU Lesser General Public License as published
  16500. * by the Free Software Foundation, either version 3 of the License, or
  16501. * (at your option) any later version.
  16502. *
  16503. * This program is distributed in the hope that it will be useful,
  16504. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16505. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16506. * GNU Lesser General Public License for more details.
  16507. *
  16508. * You should have received a copy of the GNU Lesser General Public License
  16509. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16510. *
  16511. * errors.ts
  16512. * Error classes
  16513. */
  16514. /**
  16515. * Base error class
  16516. */
  16517. class BaseError extends Error {
  16518. /**
  16519. * Constructor
  16520. * @param message error message
  16521. * @param cause cause of the error
  16522. */
  16523. constructor(message = '', cause = null) {
  16524. super(message);
  16525. this.cause = cause;
  16526. }
  16527. /*{
  16528. // incorrect when minified
  16529. //return this.constructor.name;
  16530. }*/
  16531. /**
  16532. * Convert to string
  16533. */
  16534. toString() {
  16535. const extendedMessage = this.cause ? '\n-> ' + this.cause.toString() : '';
  16536. if (this.message != '')
  16537. return this.name + ': ' + this.message + extendedMessage;
  16538. else
  16539. return this.name + extendedMessage;
  16540. }
  16541. }
  16542. /**
  16543. * A method has received one or more illegal arguments
  16544. */
  16545. class IllegalArgumentError extends BaseError {
  16546. get name() {
  16547. return 'IllegalArgumentError';
  16548. }
  16549. }
  16550. /**
  16551. * The method arguments are valid, but the method can't be called due to the
  16552. * current state of the object
  16553. */
  16554. class IllegalOperationError extends BaseError {
  16555. get name() {
  16556. return 'IllegalOperationError';
  16557. }
  16558. }
  16559. /**
  16560. * The requested operation is not supported
  16561. */
  16562. class NotSupportedError extends BaseError {
  16563. get name() {
  16564. return 'NotSupportedError';
  16565. }
  16566. }
  16567. /**
  16568. * Access denied
  16569. */
  16570. class AccessDeniedError extends BaseError {
  16571. get name() {
  16572. return 'AccessDeniedError';
  16573. }
  16574. }
  16575. /**
  16576. * Timeout
  16577. */
  16578. class TimeoutError extends BaseError {
  16579. get name() {
  16580. return 'TimeoutError';
  16581. }
  16582. }
  16583. /**
  16584. * Assertion error
  16585. */
  16586. class AssertionError extends BaseError {
  16587. get name() {
  16588. return 'AssertionError';
  16589. }
  16590. }
  16591. /**
  16592. * Tracking error
  16593. */
  16594. class TrackingError extends BaseError {
  16595. get name() {
  16596. return 'TrackingError';
  16597. }
  16598. }
  16599. /**
  16600. * Detection error
  16601. */
  16602. class DetectionError extends BaseError {
  16603. get name() {
  16604. return 'DetectionError';
  16605. }
  16606. }
  16607. /**
  16608. * Training error
  16609. */
  16610. class TrainingError extends BaseError {
  16611. get name() {
  16612. return 'TrainingError';
  16613. }
  16614. }
  16615. ;// CONCATENATED MODULE: ./src/utils/resolution.ts
  16616. /*
  16617. * encantar.js
  16618. * GPU-accelerated Augmented Reality for the web
  16619. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16620. *
  16621. * This program is free software: you can redistribute it and/or modify
  16622. * it under the terms of the GNU Lesser General Public License as published
  16623. * by the Free Software Foundation, either version 3 of the License, or
  16624. * (at your option) any later version.
  16625. *
  16626. * This program is distributed in the hope that it will be useful,
  16627. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16628. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16629. * GNU Lesser General Public License for more details.
  16630. *
  16631. * You should have received a copy of the GNU Lesser General Public License
  16632. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16633. *
  16634. * resolution.ts
  16635. * Resolution utilities
  16636. */
  16637. /** Reference heights when in landscape mode, measured in pixels */
  16638. const REFERENCE_HEIGHT = {
  16639. 'xs': 120,
  16640. 'xs+': 160,
  16641. 'sm': 200,
  16642. 'sm+': 240,
  16643. 'md': 320,
  16644. 'md+': 360,
  16645. 'lg': 480,
  16646. 'lg+': 600,
  16647. };
  16648. /**
  16649. * Convert a resolution type to a (width, height) pair
  16650. * @param resolution resolution type
  16651. * @param aspectRatio desired width / height ratio
  16652. * @returns size in pixels
  16653. */
  16654. function computeResolution(resolution, aspectRatio) {
  16655. const referenceHeight = REFERENCE_HEIGHT[resolution];
  16656. let width = 0, height = 0;
  16657. if (referenceHeight === undefined)
  16658. throw new IllegalArgumentError('Invalid resolution: ' + resolution);
  16659. else if (aspectRatio <= 0)
  16660. throw new IllegalArgumentError('Invalid aspect ratio: ' + aspectRatio);
  16661. if (aspectRatio >= 1) {
  16662. // landscape
  16663. height = referenceHeight;
  16664. width = Math.round(height * aspectRatio);
  16665. width -= width % 2;
  16666. }
  16667. else {
  16668. // portrait
  16669. width = referenceHeight;
  16670. height = Math.round(width / aspectRatio);
  16671. height -= height % 2;
  16672. }
  16673. return speedy_vision_default().Size(width, height);
  16674. }
  16675. ;// CONCATENATED MODULE: ./src/utils/utils.ts
  16676. /*
  16677. * encantar.js
  16678. * GPU-accelerated Augmented Reality for the web
  16679. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16680. *
  16681. * This program is free software: you can redistribute it and/or modify
  16682. * it under the terms of the GNU Lesser General Public License as published
  16683. * by the Free Software Foundation, either version 3 of the License, or
  16684. * (at your option) any later version.
  16685. *
  16686. * This program is distributed in the hope that it will be useful,
  16687. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16688. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16689. * GNU Lesser General Public License for more details.
  16690. *
  16691. * You should have received a copy of the GNU Lesser General Public License
  16692. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16693. *
  16694. * utils.ts
  16695. * Generic utilities
  16696. */
  16697. /**
  16698. * Generic utilities
  16699. */
  16700. class Utils {
  16701. /**
  16702. * Log a message
  16703. * @param message
  16704. * @param args optional additional messages
  16705. */
  16706. static log(message, ...args) {
  16707. console.log('[encantar-js]', message, ...args);
  16708. }
  16709. /**
  16710. * Display a warning
  16711. * @param message
  16712. * @param args optional additional messages
  16713. */
  16714. static warning(message, ...args) {
  16715. console.warn('[encantar-js]', message, ...args);
  16716. }
  16717. /**
  16718. * Display an error message
  16719. * @param message
  16720. * @param args optional additional messages
  16721. */
  16722. static error(message, ...args) {
  16723. console.error('[encantar-js]', message, ...args);
  16724. }
  16725. /**
  16726. * Assertion
  16727. * @param expr expression
  16728. * @param errorMessage optional error message
  16729. * @throws {AssertionError}
  16730. */
  16731. static assert(expr, errorMessage = '') {
  16732. if (!expr)
  16733. throw new AssertionError(errorMessage);
  16734. }
  16735. /**
  16736. * Returns a range [0, 1, ..., n-1]
  16737. * @param n non-negative integer
  16738. * @returns range from 0 to n-1, inclusive
  16739. */
  16740. static range(n) {
  16741. if ((n |= 0) < 0)
  16742. throw new IllegalArgumentError();
  16743. return Array.from({ length: n }, (_, i) => i);
  16744. }
  16745. /**
  16746. * Convert a resolution type to a resolution measured in pixels
  16747. * @param resolution resolution type
  16748. * @param aspectRatio width / height ratio
  16749. * @returns resolution measured in pixels
  16750. */
  16751. static resolution(resolution, aspectRatio) {
  16752. return computeResolution(resolution, aspectRatio);
  16753. }
  16754. /**
  16755. * Returns a string containing platform brand information
  16756. * @returns platform brand information
  16757. */
  16758. static platformString() {
  16759. return ((navigator) => typeof navigator.userAgentData === 'object' ? // prefer the NavigatorUAData interface
  16760. navigator.userAgentData.platform : // use only low entropy data
  16761. navigator.platform // navigator.platform is deprecated
  16762. )(navigator);
  16763. }
  16764. /**
  16765. * Checks if we're on iOS
  16766. * @returns true if we're on iOS
  16767. */
  16768. static isIOS() {
  16769. // at the time of this writing, navigator.userAgentData is not yet
  16770. // compatible with Safari. navigator.platform is deprecated, but
  16771. // predictable.
  16772. if (/(iOS|iPhone|iPad|iPod)/i.test(navigator.platform))
  16773. return true;
  16774. if (/Mac/i.test(navigator.platform) && navigator.maxTouchPoints !== undefined) // iPad OS 13+
  16775. return navigator.maxTouchPoints > 2;
  16776. return false;
  16777. }
  16778. /**
  16779. * Checks if we're on a WebKit-based browser
  16780. * @returns true if we're on a WebKit-based browser
  16781. */
  16782. static isWebKit() {
  16783. // note: navigator.vendor is deprecated
  16784. if (/Apple/.test(navigator.vendor))
  16785. return true;
  16786. // Can a non WebKit-based browser pass this test?
  16787. // Test masked GL_RENDERER == "Apple GPU" (valid since Feb 2020)
  16788. // https://bugs.webkit.org/show_bug.cgi?id=207608
  16789. /*
  16790. if(Speedy.Platform.renderer == 'Apple GPU' && Speedy.Platform.vendor == 'Apple Inc.')
  16791. return true;
  16792. */
  16793. // Desktop and Mobile Safari, Epiphany on Linux
  16794. if (/AppleWebKit\/.* Version\//.test(navigator.userAgent))
  16795. return true;
  16796. // Chrome, Firefox, Edge on iOS
  16797. if (/(CriOS\/|FxiOS\/|EdgiOS\/)/.test(navigator.userAgent))
  16798. return true;
  16799. // not WebKit
  16800. return false;
  16801. }
  16802. /**
  16803. * Device-specific information for debugging purposes
  16804. */
  16805. static deviceInfo() {
  16806. return 'Device info: ' + JSON.stringify({
  16807. isIOS: Utils.isIOS(),
  16808. isWebKit: Utils.isWebKit(),
  16809. renderer: (speedy_vision_default()).Platform.renderer,
  16810. vendor: (speedy_vision_default()).Platform.vendor,
  16811. screen: [screen.width, screen.height].join('x'),
  16812. platform: [navigator.platform, navigator.vendor].join('; '),
  16813. userAgent: navigator.userAgent,
  16814. userAgentData: navigator.userAgentData || null,
  16815. }, null, 2);
  16816. }
  16817. }
  16818. ;// CONCATENATED MODULE: ./src/utils/ar-events.ts
  16819. /*
  16820. * encantar.js
  16821. * GPU-accelerated Augmented Reality for the web
  16822. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16823. *
  16824. * This program is free software: you can redistribute it and/or modify
  16825. * it under the terms of the GNU Lesser General Public License as published
  16826. * by the Free Software Foundation, either version 3 of the License, or
  16827. * (at your option) any later version.
  16828. *
  16829. * This program is distributed in the hope that it will be useful,
  16830. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16831. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16832. * GNU Lesser General Public License for more details.
  16833. *
  16834. * You should have received a copy of the GNU Lesser General Public License
  16835. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16836. *
  16837. * ar-events.ts
  16838. * AR-related Events
  16839. */
  16840. /**
  16841. * AR Event
  16842. */
  16843. class AREvent extends Event {
  16844. /**
  16845. * Constructor
  16846. * @param type event type
  16847. */
  16848. constructor(type) {
  16849. super(type);
  16850. }
  16851. /**
  16852. * Event type
  16853. */
  16854. get type() {
  16855. return super.type;
  16856. }
  16857. }
  16858. /**
  16859. * AR Event Target
  16860. */
  16861. class AREventTarget {
  16862. /**
  16863. * Constructor
  16864. */
  16865. constructor() {
  16866. this._delegate = new EventTarget();
  16867. }
  16868. /**
  16869. * Add event listener
  16870. * @param type event type
  16871. * @param callback
  16872. */
  16873. addEventListener(type, callback) {
  16874. this._delegate.addEventListener(type, callback);
  16875. }
  16876. /**
  16877. * Remove event listener
  16878. * @param type event type
  16879. * @param callback
  16880. */
  16881. removeEventListener(type, callback) {
  16882. this._delegate.removeEventListener(type, callback);
  16883. }
  16884. /**
  16885. * Synchronously trigger an event
  16886. * @param event
  16887. * @returns same value as a standard event target
  16888. * @internal
  16889. */
  16890. dispatchEvent(event) {
  16891. return this._delegate.dispatchEvent(event);
  16892. }
  16893. }
  16894. ;// CONCATENATED MODULE: ./src/core/stats.ts
  16895. /*
  16896. * encantar.js
  16897. * GPU-accelerated Augmented Reality for the web
  16898. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16899. *
  16900. * This program is free software: you can redistribute it and/or modify
  16901. * it under the terms of the GNU Lesser General Public License as published
  16902. * by the Free Software Foundation, either version 3 of the License, or
  16903. * (at your option) any later version.
  16904. *
  16905. * This program is distributed in the hope that it will be useful,
  16906. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16907. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16908. * GNU Lesser General Public License for more details.
  16909. *
  16910. * You should have received a copy of the GNU Lesser General Public License
  16911. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16912. *
  16913. * stats.ts
  16914. * Stats for performance measurements
  16915. */
  16916. /** update interval, given in seconds */
  16917. const UPDATE_INTERVAL = 0.5;
  16918. /**
  16919. * Stats for performance measurements
  16920. */
  16921. class Stats {
  16922. /**
  16923. * Constructor
  16924. */
  16925. constructor() {
  16926. this._timeOfLastUpdate = this._now();
  16927. this._partialCycleCount = 0;
  16928. this._cyclesPerSecond = 0;
  16929. }
  16930. /**
  16931. * Update stats - call every frame
  16932. */
  16933. update() {
  16934. const now = this._now();
  16935. ++this._partialCycleCount;
  16936. if (now >= this._timeOfLastUpdate + 1000 * UPDATE_INTERVAL) {
  16937. this._cyclesPerSecond = this._partialCycleCount / UPDATE_INTERVAL;
  16938. this._partialCycleCount = 0;
  16939. this._timeOfLastUpdate = now;
  16940. }
  16941. }
  16942. /**
  16943. * Reset stats
  16944. */
  16945. reset() {
  16946. this._timeOfLastUpdate = this._now();
  16947. this._partialCycleCount = 0;
  16948. this._cyclesPerSecond = 0;
  16949. }
  16950. /**
  16951. * Number of cycles per second
  16952. */
  16953. get cyclesPerSecond() {
  16954. return this._cyclesPerSecond;
  16955. }
  16956. /**
  16957. * A measurement of time, in milliseconds
  16958. * @returns time in ms
  16959. */
  16960. _now() {
  16961. return performance.now();
  16962. }
  16963. }
  16964. ;// CONCATENATED MODULE: ./src/core/stats-panel.ts
  16965. /*
  16966. * encantar.js
  16967. * GPU-accelerated Augmented Reality for the web
  16968. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  16969. *
  16970. * This program is free software: you can redistribute it and/or modify
  16971. * it under the terms of the GNU Lesser General Public License as published
  16972. * by the Free Software Foundation, either version 3 of the License, or
  16973. * (at your option) any later version.
  16974. *
  16975. * This program is distributed in the hope that it will be useful,
  16976. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16977. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16978. * GNU Lesser General Public License for more details.
  16979. *
  16980. * You should have received a copy of the GNU Lesser General Public License
  16981. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  16982. *
  16983. * stats-panel.ts
  16984. * Stats panel used for development purposes
  16985. */
  16986. /** Update interval, in ms */
  16987. const stats_panel_UPDATE_INTERVAL = 500;
  16988. /** Icons for different power profiles */
  16989. const POWER_ICON = Object.freeze({
  16990. 'default': '',
  16991. 'low-power': '&#x1F50B',
  16992. 'high-performance': '&#x26A1'
  16993. });
  16994. /**
  16995. * Stats panel used for development purposes
  16996. */
  16997. class StatsPanel {
  16998. /**
  16999. * Constructor
  17000. * @param parent parent element of the panel
  17001. */
  17002. constructor(viewport) {
  17003. this._viewport = viewport;
  17004. this._lastUpdate = 0;
  17005. this._container = this._createContainer();
  17006. viewport.hud.container.appendChild(this._container);
  17007. }
  17008. /**
  17009. * Release the panel
  17010. */
  17011. release() {
  17012. this._container.remove();
  17013. }
  17014. /**
  17015. * A method to be called in the update loop
  17016. * @param time current time in ms
  17017. * @param trackers the trackers attached to the session
  17018. * @param sources the sources of media linked to the session
  17019. * @param gpu GPU cycles per second
  17020. * @param fps frames per second
  17021. */
  17022. update(time, trackers, sources, gpu, fps) {
  17023. if (time >= this._lastUpdate + stats_panel_UPDATE_INTERVAL) {
  17024. this._lastUpdate = time;
  17025. this._update(trackers, sources, fps, gpu);
  17026. }
  17027. }
  17028. /**
  17029. * Visibility of the panel
  17030. */
  17031. get visible() {
  17032. return !this._container.hidden;
  17033. }
  17034. /**
  17035. * Visibility of the panel
  17036. */
  17037. set visible(visible) {
  17038. this._container.hidden = !visible;
  17039. }
  17040. /**
  17041. * Update the contents of the panel
  17042. * @param trackers the trackers attached to the session
  17043. * @param sources the sources of media linked to the session
  17044. * @param fps frames per second
  17045. * @param gpu GPU cycles per second
  17046. */
  17047. _update(trackers, sources, fps, gpu) {
  17048. // all sanitized
  17049. const lfps = this._label('_ar_fps');
  17050. if (lfps !== null) {
  17051. lfps.style.color = this._color(fps);
  17052. lfps.innerText = String(fps);
  17053. }
  17054. const lgpu = this._label('_ar_gpu');
  17055. if (lgpu !== null) {
  17056. lgpu.style.color = this._color(gpu);
  17057. lgpu.innerText = String(gpu);
  17058. }
  17059. const lpower = this._label('_ar_power');
  17060. if (lpower !== null)
  17061. lpower.innerHTML = POWER_ICON[Settings.powerPreference];
  17062. const lin = this._label('_ar_in');
  17063. if (lin !== null) {
  17064. const sourceStats = sources.map(source => source._stats).join(', ');
  17065. lin.innerText = sourceStats;
  17066. }
  17067. const lout = this._label('_ar_out');
  17068. if (lout !== null) {
  17069. const trackerStats = trackers.map(tracker => tracker._stats).join(', ');
  17070. lout.innerText = trackerStats;
  17071. }
  17072. }
  17073. /**
  17074. * Get a label of the panel
  17075. * @param className
  17076. * @returns the HTML element, or null if it doesn't exist
  17077. */
  17078. _label(className) {
  17079. return this._container.getElementsByClassName(className).item(0);
  17080. }
  17081. /**
  17082. * Associate a color to a frequency number
  17083. * @param f frequency given in cycles per second
  17084. * @returns colorized number (HTML)
  17085. */
  17086. _color(f) {
  17087. const GREEN = '#0f0', YELLOW = '#ff0', RED = '#f33';
  17088. const color3 = f >= 50 ? GREEN : (f >= 30 ? YELLOW : RED);
  17089. const color2 = f >= 30 ? GREEN : RED;
  17090. const color = Settings.powerPreference != 'low-power' ? color3 : color2;
  17091. return color;
  17092. }
  17093. /**
  17094. * Create the container for the panel
  17095. * @returns a container
  17096. */
  17097. _createContainer() {
  17098. const container = document.createElement('div');
  17099. container.style.position = 'absolute';
  17100. container.style.left = container.style.top = '0px';
  17101. container.style.zIndex = '1000000';
  17102. container.style.padding = '0px';
  17103. container.appendChild(this._createTitle());
  17104. container.appendChild(this._createContent());
  17105. return container;
  17106. }
  17107. /**
  17108. * Create a title
  17109. * @returns a title
  17110. */
  17111. _createTitle() {
  17112. const title = document.createElement('div');
  17113. title.style.backgroundColor = '#7e56c2';
  17114. title.style.color = 'white';
  17115. title.style.fontFamily = 'monospace';
  17116. title.style.fontSize = '14px';
  17117. title.style.fontWeight = 'bold';
  17118. title.style.padding = '2px';
  17119. title.innerHTML = '&#x2728;';
  17120. title.innerText += 'encantar.js ' + AR.version;
  17121. return title;
  17122. }
  17123. /**
  17124. * Create a content container
  17125. * @returns a content container
  17126. */
  17127. _createContent() {
  17128. const content = document.createElement('div');
  17129. const print = (html) => content.insertAdjacentHTML('beforeend', html);
  17130. content.style.backgroundColor = 'rgba(0,0,0,0.5)';
  17131. content.style.color = 'white';
  17132. content.style.fontFamily = 'monospace';
  17133. content.style.fontSize = '14px';
  17134. content.style.padding = '2px';
  17135. content.style.whiteSpace = 'pre-line';
  17136. // all sanitized
  17137. print('FPS: <span class="_ar_fps"></span> | ');
  17138. print('GPU: <span class="_ar_gpu"></span> ');
  17139. print('<span class="_ar_power"></span>');
  17140. print('<br>');
  17141. print('IN: <span class="_ar_in"></span>');
  17142. print('<br>');
  17143. print('OUT: <span class="_ar_out"></span>');
  17144. if (this._viewport.fullscreenAvailable) {
  17145. print('<br>');
  17146. content.appendChild(this._createFullscreenToggle());
  17147. }
  17148. return content;
  17149. }
  17150. /**
  17151. * Create a fullscreen toggle
  17152. * @returns a fullscreen toggle
  17153. */
  17154. _createFullscreenToggle() {
  17155. const toggle = document.createElement('a');
  17156. Utils.assert(this._viewport != null);
  17157. toggle.href = 'javascript:void(0)';
  17158. toggle.innerText = 'Toggle fullscreen';
  17159. toggle.style.color = 'white';
  17160. toggle.setAttribute('role', 'button');
  17161. toggle.addEventListener('click', () => {
  17162. if (!this._viewport.fullscreen) {
  17163. this._viewport.requestFullscreen().catch(err => {
  17164. alert(`Can't enable fullscreen mode. ` + err.toString());
  17165. });
  17166. }
  17167. else {
  17168. this._viewport.exitFullscreen();
  17169. }
  17170. });
  17171. return toggle;
  17172. }
  17173. }
  17174. ;// CONCATENATED MODULE: ./src/core/frame.ts
  17175. /*
  17176. * encantar.js
  17177. * GPU-accelerated Augmented Reality for the web
  17178. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17179. *
  17180. * This program is free software: you can redistribute it and/or modify
  17181. * it under the terms of the GNU Lesser General Public License as published
  17182. * by the Free Software Foundation, either version 3 of the License, or
  17183. * (at your option) any later version.
  17184. *
  17185. * This program is distributed in the hope that it will be useful,
  17186. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17187. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17188. * GNU Lesser General Public License for more details.
  17189. *
  17190. * You should have received a copy of the GNU Lesser General Public License
  17191. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17192. *
  17193. * frame.ts
  17194. * A Frame holds information used to render a single animation frame of a Session
  17195. */
  17196. /**
  17197. * Iterable frame results (helper class)
  17198. */
  17199. class IterableTrackerResults {
  17200. constructor(_results) {
  17201. this._results = _results;
  17202. this._index = 0;
  17203. }
  17204. next() {
  17205. const i = this._index++;
  17206. return i < this._results.length ?
  17207. { done: false, value: this._results[i] } :
  17208. { done: true, value: undefined };
  17209. }
  17210. [Symbol.iterator]() {
  17211. return this;
  17212. }
  17213. }
  17214. /**
  17215. * A Frame holds information used to render a single animation frame of a Session
  17216. */
  17217. class Frame {
  17218. /**
  17219. * Constructor
  17220. * @param session
  17221. * @param results
  17222. */
  17223. constructor(session, results) {
  17224. this._session = session;
  17225. this._results = new IterableTrackerResults(results);
  17226. }
  17227. /**
  17228. * The session of which this frame holds data
  17229. */
  17230. get session() {
  17231. return this._session;
  17232. }
  17233. /**
  17234. * The results of all trackers in this frame
  17235. */
  17236. get results() {
  17237. return this._results;
  17238. }
  17239. }
  17240. ;// CONCATENATED MODULE: ./src/core/time.ts
  17241. /*
  17242. * encantar.js
  17243. * GPU-accelerated Augmented Reality for the web
  17244. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17245. *
  17246. * This program is free software: you can redistribute it and/or modify
  17247. * it under the terms of the GNU Lesser General Public License as published
  17248. * by the Free Software Foundation, either version 3 of the License, or
  17249. * (at your option) any later version.
  17250. *
  17251. * This program is distributed in the hope that it will be useful,
  17252. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17253. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17254. * GNU Lesser General Public License for more details.
  17255. *
  17256. * You should have received a copy of the GNU Lesser General Public License
  17257. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17258. *
  17259. * time.ts
  17260. * Time utilities
  17261. */
  17262. /**
  17263. * Time Manager
  17264. */
  17265. class Time {
  17266. constructor() {
  17267. /** time scale */
  17268. this._scale = 1;
  17269. /** time since the start of the session, in milliseconds */
  17270. this._time = 0;
  17271. /** unscaled time since the start of the session, in milliseconds */
  17272. this._unscaledTime = 0;
  17273. /** elapsed time between the current and the previous frame, in milliseconds */
  17274. this._delta = 0;
  17275. /** time of the first update call, in milliseconds */
  17276. this._firstUpdate = 0;
  17277. /** time of the last update call, in milliseconds */
  17278. this._lastUpdate = Number.POSITIVE_INFINITY;
  17279. }
  17280. /**
  17281. * Update the Time Manager
  17282. * @param timestamp in milliseconds
  17283. * @internal
  17284. */
  17285. _update(timestamp) {
  17286. if (timestamp < this._lastUpdate) {
  17287. this._firstUpdate = this._lastUpdate = timestamp;
  17288. return;
  17289. }
  17290. this._delta = (timestamp - this._lastUpdate) * this._scale;
  17291. this._time += this._delta;
  17292. this._unscaledTime = timestamp - this._firstUpdate;
  17293. this._lastUpdate = timestamp;
  17294. }
  17295. /**
  17296. * Elapsed time since the start of the session, measured at the
  17297. * beginning of the current animation frame and given in seconds
  17298. */
  17299. get elapsed() {
  17300. return this._time * 0.001;
  17301. }
  17302. /**
  17303. * Elapsed time between the current and the previous animation
  17304. * frame, given in seconds
  17305. */
  17306. get delta() {
  17307. return this._delta * 0.001;
  17308. }
  17309. /**
  17310. * Time scale (defaults to 1)
  17311. */
  17312. get scale() {
  17313. return this._scale;
  17314. }
  17315. /**
  17316. * Time scale (defaults to 1)
  17317. */
  17318. set scale(scale) {
  17319. this._scale = Math.max(0, +scale);
  17320. }
  17321. /**
  17322. * Time scale independent elapsed time since the start of the session,
  17323. * measured at the beginning of the current animation frame and given
  17324. * in seconds
  17325. */
  17326. get unscaled() {
  17327. return this._unscaledTime * 0.001;
  17328. }
  17329. }
  17330. ;// CONCATENATED MODULE: ./src/core/gizmos.ts
  17331. /*
  17332. * encantar.js
  17333. * GPU-accelerated Augmented Reality for the web
  17334. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17335. *
  17336. * This program is free software: you can redistribute it and/or modify
  17337. * it under the terms of the GNU Lesser General Public License as published
  17338. * by the Free Software Foundation, either version 3 of the License, or
  17339. * (at your option) any later version.
  17340. *
  17341. * This program is distributed in the hope that it will be useful,
  17342. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17343. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17344. * GNU Lesser General Public License for more details.
  17345. *
  17346. * You should have received a copy of the GNU Lesser General Public License
  17347. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17348. *
  17349. * gizmos.ts
  17350. * Visual cues for testing & debugging
  17351. */
  17352. /** The maximum match distance ratio we'll consider to be "good" */
  17353. const GOOD_MATCH_THRESHOLD = 0.7;
  17354. /**
  17355. * Visual cues for testing & debugging
  17356. */
  17357. class Gizmos {
  17358. /**
  17359. * Constructor
  17360. */
  17361. constructor() {
  17362. this._visible = false;
  17363. }
  17364. /**
  17365. * Whether or not the gizmos will be rendered
  17366. */
  17367. get visible() {
  17368. return this._visible;
  17369. }
  17370. /**
  17371. * Whether or not the gizmos will be rendered
  17372. */
  17373. set visible(visible) {
  17374. this._visible = visible;
  17375. }
  17376. /**
  17377. * Render gizmos
  17378. * @param viewport
  17379. * @param trackers
  17380. * @internal
  17381. */
  17382. _render(viewport, trackers) {
  17383. // no need to render?
  17384. if (!this._visible)
  17385. return;
  17386. // viewport
  17387. const viewportSize = viewport._realSize;
  17388. const canvas = viewport._backgroundCanvas;
  17389. const ctx = canvas.getContext('2d', { alpha: false });
  17390. if (!ctx)
  17391. throw new IllegalOperationError();
  17392. // debug
  17393. //ctx.fillStyle = '#000';
  17394. //ctx.fillRect(0, 0, canvas.width, canvas.height);
  17395. //ctx.clearRect(0, 0, canvas.width, canvas.height);
  17396. // render keypoints
  17397. for (let i = 0; i < trackers.length; i++) {
  17398. if (trackers[i].type != 'image-tracker')
  17399. continue;
  17400. const output = trackers[i]._output;
  17401. const keypoints = output.keypoints;
  17402. const screenSize = output.screenSize;
  17403. if (keypoints !== undefined && screenSize !== undefined)
  17404. this._splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize);
  17405. }
  17406. // render polylines
  17407. for (let i = 0; i < trackers.length; i++) {
  17408. if (trackers[i].type != 'image-tracker')
  17409. continue;
  17410. const output = trackers[i]._output;
  17411. const polyline = output.polyline;
  17412. const screenSize = output.screenSize;
  17413. if (polyline !== undefined && screenSize !== undefined)
  17414. this._renderPolyline(ctx, polyline, screenSize, viewportSize);
  17415. }
  17416. // render the axes of the 3D coordinate system
  17417. for (let i = 0; i < trackers.length; i++) {
  17418. if (trackers[i].type != 'image-tracker')
  17419. continue;
  17420. const output = trackers[i]._output;
  17421. const cameraMatrix = output.cameraMatrix;
  17422. const screenSize = output.screenSize;
  17423. if (cameraMatrix !== undefined && screenSize !== undefined)
  17424. this._renderAxes(ctx, cameraMatrix, screenSize, viewportSize);
  17425. }
  17426. }
  17427. /**
  17428. * Split keypoints in matched/unmatched categories and
  17429. * render them for testing & development purposes
  17430. * @param ctx canvas 2D context
  17431. * @param keypoints keypoints to render
  17432. * @param screenSize AR screen size
  17433. * @param viewportSize viewport size
  17434. * @param size base keypoint rendering size
  17435. */
  17436. _splitAndRenderKeypoints(ctx, keypoints, screenSize, viewportSize, size = 1) {
  17437. if (keypoints.length == 0)
  17438. return;
  17439. if (!Object.prototype.hasOwnProperty.call(keypoints[0], '_matches')) { // hack...
  17440. this._renderKeypoints(ctx, keypoints, screenSize, viewportSize, '#f00', size);
  17441. return;
  17442. }
  17443. const isGoodMatch = (keypoint) => (keypoint.matches.length == 1 && keypoint.matches[0].index >= 0) ||
  17444. (keypoint.matches.length > 1 &&
  17445. keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0 &&
  17446. keypoint.matches[0].distance <= GOOD_MATCH_THRESHOLD * keypoint.matches[1].distance);
  17447. const matchedKeypoints = keypoints;
  17448. const goodMatches = matchedKeypoints.filter(keypoint => isGoodMatch(keypoint));
  17449. const badMatches = matchedKeypoints.filter(keypoint => !isGoodMatch(keypoint));
  17450. this._renderKeypoints(ctx, badMatches, screenSize, viewportSize, '#f00', size);
  17451. this._renderKeypoints(ctx, goodMatches, screenSize, viewportSize, '#0f0', size);
  17452. }
  17453. /**
  17454. * Render keypoints for testing & development purposes
  17455. * @param ctx canvas 2D context
  17456. * @param keypoints keypoints to render
  17457. * @param screenSize AR screen size
  17458. * @param viewportSize viewport size
  17459. * @param color color of the rendered keypoints
  17460. * @param size base keypoint rendering size
  17461. */
  17462. _renderKeypoints(ctx, keypoints, screenSize, viewportSize, color = 'red', size = 1) {
  17463. const sx = viewportSize.width / screenSize.width;
  17464. const sy = viewportSize.height / screenSize.height;
  17465. ctx.beginPath();
  17466. for (let i = keypoints.length - 1; i >= 0; i--) {
  17467. const keypoint = keypoints[i];
  17468. const x = (keypoint.x * sx + 0.5) | 0;
  17469. const y = (keypoint.y * sy + 0.5) | 0;
  17470. const r = (size * keypoint.scale + 0.5) | 0;
  17471. ctx.rect(x - r, y - r, 2 * r, 2 * r);
  17472. }
  17473. ctx.strokeStyle = color;
  17474. ctx.lineWidth = 1;
  17475. ctx.stroke();
  17476. }
  17477. /**
  17478. * Render polyline for testing & development purposes
  17479. * @param ctx canvas 2D context
  17480. * @param polyline vertices
  17481. * @param screenSize AR screen size
  17482. * @param viewportSize viewport size
  17483. * @param color color of the rendered polyline
  17484. * @param lineWidth
  17485. */
  17486. _renderPolyline(ctx, polyline, screenSize, viewportSize, color = '#0f0', lineWidth = 2) {
  17487. if (polyline.length == 0)
  17488. return;
  17489. const n = polyline.length;
  17490. const sx = viewportSize.width / screenSize.width;
  17491. const sy = viewportSize.height / screenSize.height;
  17492. // render polyline
  17493. ctx.beginPath();
  17494. ctx.moveTo(polyline[n - 1].x * sx, polyline[n - 1].y * sy);
  17495. for (let j = 0; j < n; j++)
  17496. ctx.lineTo(polyline[j].x * sx, polyline[j].y * sy);
  17497. ctx.strokeStyle = color;
  17498. ctx.lineWidth = lineWidth;
  17499. ctx.stroke();
  17500. }
  17501. /**
  17502. * Render the axes of a 3D coordinate system
  17503. * @param ctx canvas 2D context
  17504. * @param cameraMatrix 3x4 camera matrix that maps normalized coordinates [-1,1]^3 to AR screen space
  17505. * @param screenSize AR screen size
  17506. * @param viewportSize viewport size
  17507. * @param lineWidth
  17508. */
  17509. _renderAxes(ctx, cameraMatrix, screenSize, viewportSize, lineWidth = 4) {
  17510. const RED = '#f00', GREEN = '#0f0', BLUE = '#00f';
  17511. const color = [RED, GREEN, BLUE]; // color of each axis: (X,Y,Z)
  17512. const length = 1; // length of each axis-corresponding line, given in normalized space units
  17513. const sx = viewportSize.width / screenSize.width;
  17514. const sy = viewportSize.height / screenSize.height;
  17515. /*
  17516. Multiply the 3x4 camera matrix P by:
  17517. [ 0 L 0 0 ]
  17518. [ 0 0 L 0 ] , where L = length in normalized space of the lines
  17519. [ 0 0 0 L ] corresponding to the 3 axes (typically 1)
  17520. [ 1 1 1 1 ]
  17521. Each column of the resulting matrix will give us the pixel coordinates
  17522. we're looking for: origin and the axes.
  17523. Note: we're working with homogeneous coordinates
  17524. */
  17525. const p = cameraMatrix.read();
  17526. const l = length;
  17527. const o = [p[9], p[10], p[11]]; // origin of the coordinate system
  17528. const x = [l * p[0] + p[9], l * p[1] + p[10], l * p[2] + p[11]]; // x-axis
  17529. const y = [l * p[3] + p[9], l * p[4] + p[10], l * p[5] + p[11]]; // y-axis
  17530. const z = [l * p[6] + p[9], l * p[7] + p[10], l * p[8] + p[11]]; // z-axis
  17531. const axis = [x, y, z];
  17532. // draw each axis
  17533. const ox = o[0] / o[2], oy = o[1] / o[2];
  17534. for (let i = 0; i < 3; i++) {
  17535. const q = axis[i];
  17536. const x = q[0] / q[2], y = q[1] / q[2];
  17537. ctx.beginPath();
  17538. ctx.moveTo(ox * sx, oy * sy);
  17539. ctx.lineTo(x * sx, y * sy);
  17540. ctx.strokeStyle = color[i];
  17541. ctx.lineWidth = lineWidth;
  17542. ctx.stroke();
  17543. }
  17544. //console.log("Origin",ox,oy);
  17545. }
  17546. }
  17547. ;// CONCATENATED MODULE: ./src/utils/asap.ts
  17548. /*
  17549. * encantar.js
  17550. * GPU-accelerated Augmented Reality for the web
  17551. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17552. *
  17553. * This program is free software: you can redistribute it and/or modify
  17554. * it under the terms of the GNU Lesser General Public License as published
  17555. * by the Free Software Foundation, either version 3 of the License, or
  17556. * (at your option) any later version.
  17557. *
  17558. * This program is distributed in the hope that it will be useful,
  17559. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17560. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17561. * GNU Lesser General Public License for more details.
  17562. *
  17563. * You should have received a copy of the GNU Lesser General Public License
  17564. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17565. *
  17566. * asap.ts
  17567. * Schedule a function to run "as soon as possible"
  17568. */
  17569. /** callbacks */
  17570. const callbacks = [];
  17571. /** arguments to be passed to the callbacks */
  17572. const args = [];
  17573. /** asap key */
  17574. const ASAP_KEY = 'asap' + Math.random().toString(36).substr(1);
  17575. // Register an event listener
  17576. window.addEventListener('message', event => {
  17577. if (event.source !== window || event.data !== ASAP_KEY)
  17578. return;
  17579. event.stopPropagation();
  17580. if (callbacks.length == 0)
  17581. return;
  17582. const fn = callbacks.pop();
  17583. const argArray = args.pop();
  17584. fn.apply(undefined, argArray);
  17585. }, true);
  17586. /**
  17587. * Schedule a function to run "as soon as possible"
  17588. * @param fn callback
  17589. * @param params optional parameters
  17590. */
  17591. function asap(fn, ...params) {
  17592. callbacks.unshift(fn);
  17593. args.unshift(params);
  17594. window.postMessage(ASAP_KEY, '*');
  17595. }
  17596. ;// CONCATENATED MODULE: ./src/core/session.ts
  17597. /*
  17598. * encantar.js
  17599. * GPU-accelerated Augmented Reality for the web
  17600. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  17601. *
  17602. * This program is free software: you can redistribute it and/or modify
  17603. * it under the terms of the GNU Lesser General Public License as published
  17604. * by the Free Software Foundation, either version 3 of the License, or
  17605. * (at your option) any later version.
  17606. *
  17607. * This program is distributed in the hope that it will be useful,
  17608. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17609. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17610. * GNU Lesser General Public License for more details.
  17611. *
  17612. * You should have received a copy of the GNU Lesser General Public License
  17613. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  17614. *
  17615. * session.ts
  17616. * WebAR Session
  17617. */
  17618. /** An event emitted by a Session */
  17619. class SessionEvent extends AREvent {
  17620. }
  17621. /** Default options when starting a session */
  17622. const DEFAULT_OPTIONS = {
  17623. mode: 'immersive',
  17624. trackers: [],
  17625. sources: [],
  17626. viewport: null,
  17627. stats: false,
  17628. gizmos: false,
  17629. };
  17630. /**
  17631. * A Session represents an intent to display AR content
  17632. * and encapsulates the main loop (update-render cycle)
  17633. */
  17634. class Session extends AREventTarget {
  17635. /**
  17636. * Constructor
  17637. * @param sources previously initialized sources of data
  17638. * @param mode session mode
  17639. * @param viewport viewport
  17640. * @param stats render stats panel?
  17641. * @param gizmos render gizmos?
  17642. */
  17643. constructor(sources, mode, viewport, stats, gizmos) {
  17644. super();
  17645. this._mode = mode;
  17646. this._trackers = [];
  17647. this._sources = sources;
  17648. this._updateStats = new Stats();
  17649. this._renderStats = new Stats();
  17650. this._active = true;
  17651. this._frameReady = true; // no trackers at the moment
  17652. this._rafQueue = [];
  17653. this._time = new Time();
  17654. this._gizmos = new Gizmos();
  17655. this._gizmos.visible = gizmos;
  17656. // validate the mode
  17657. if (mode == 'immersive') {
  17658. if (viewport.style != 'best-fit' && viewport.style != 'stretch') {
  17659. Utils.warning(`Invalid viewport style \"${viewport.style}\" for the \"${mode}\" mode`);
  17660. viewport.style = 'best-fit';
  17661. }
  17662. }
  17663. else if (mode == 'inline') {
  17664. if (viewport.style != 'inline') {
  17665. Utils.warning(`Invalid viewport style \"${viewport.style}\" for the \"${mode}\" mode`);
  17666. viewport.style = 'inline';
  17667. }
  17668. }
  17669. else
  17670. throw new IllegalArgumentError(`Invalid session mode "${mode}"`);
  17671. // get media
  17672. const media = this.media;
  17673. const getMediaSize = () => media.size;
  17674. // setup the viewport
  17675. this._viewport = viewport;
  17676. this._viewport._init(getMediaSize);
  17677. // setup the main loop
  17678. this._setupUpdateLoop();
  17679. this._setupRenderLoop();
  17680. // setup the stats panel
  17681. this._statsPanel = new StatsPanel(this._viewport);
  17682. this._statsPanel.visible = stats;
  17683. // done!
  17684. Session._count++;
  17685. Utils.log(`The ${mode} session is now active!`);
  17686. }
  17687. /**
  17688. * Checks if the engine can be run in the browser the client is using
  17689. * @returns true if the engine is compatible with the browser
  17690. */
  17691. static isSupported() {
  17692. //alert(Utils.deviceInfo()); // debug
  17693. // If Safari / iOS, require version 15.2 or later
  17694. if (/(Mac|iOS|iPhone|iPad|iPod)/i.test(Utils.platformString())) {
  17695. /*
  17696. iOS compatibility
  17697. -----------------
  17698. The engine is known to work on iPhone 8 or later, with iOS 15.2 or
  17699. later. Tested on many devices, including iPads, on the cloud.
  17700. The engine crashes on an iPhone 13 Pro Max with iOS 15.1 and on an
  17701. iPhone 12 Pro with iOS 15.0.2. A (valid) shader from speedy-vision
  17702. version 0.9.1 (bf-knn) fails to compile: "WebGL error. Program has
  17703. not been successfully linked".
  17704. The engine freezes on an older iPhone 6S (2015) with iOS 15.8.2.
  17705. The exact cause is unknown, but it happens when training an image
  17706. tracker, at ImageTrackerTrainingState._gpuUpdate() (a WebGL error?
  17707. a hardware limitation?)
  17708. Successfully tested down to iPhone 8 so far.
  17709. Successfully tested down to iOS 15.2.
  17710. >> WebGL2 support was introduced in Safari 15 <<
  17711. Note: the webp image format used in the demos is supported on
  17712. Safari for iOS 14+. Desktop Safari 14-15.6 supports webp, but
  17713. requires macOS 11 Big Sur or later. https://caniuse.com/webp
  17714. */
  17715. const ios = /(iPhone|iPad|iPod).* (CPU[\s\w]* OS|CPU iPhone|iOS) ([\d\._]+)/.exec(navigator.userAgent); // Chrome, Firefox, Edge, Safari on iOS
  17716. const safari = /(AppleWebKit)\/.* (Version)\/([\d\.]+)/.exec(navigator.userAgent); // Desktop and Mobile Safari, Epiphany on Linux
  17717. const matches = safari || ios; // match safari first (min version)
  17718. if (matches !== null) {
  17719. const version = matches[3] || '0.0';
  17720. const [x, y] = version.split(/[\._]/).map(v => parseInt(v) | 0);
  17721. if ((x < 15) || (x == 15 && y < 2)) {
  17722. Utils.error(`${matches === safari ? 'Safari' : 'iOS'} version ${version} is not supported! User agent: ${navigator.userAgent}`);
  17723. return false;
  17724. }
  17725. // XXX reject older iPhone models? Which ones?
  17726. /*if(navigator.userAgent.includes('iPhone')) {
  17727. // detect screen size?
  17728. }*/
  17729. }
  17730. else
  17731. Utils.warning(`Unrecognized user agent: ${navigator.userAgent}`);
  17732. }
  17733. // Android: reject very old / weak devices?
  17734. // XXX establish criteria?
  17735. /*if(Utils.isAndroid()) {
  17736. }*/
  17737. // Check if WebGL2 and WebAssembly are supported
  17738. return speedy_vision_default().isSupported();
  17739. }
  17740. /**
  17741. * Instantiate a session
  17742. * @param options options
  17743. * @returns a promise that resolves to a new session
  17744. */
  17745. static instantiate(options = DEFAULT_OPTIONS) {
  17746. const { mode = DEFAULT_OPTIONS.mode, sources = DEFAULT_OPTIONS.sources, trackers = DEFAULT_OPTIONS.trackers, viewport = DEFAULT_OPTIONS.viewport, stats = DEFAULT_OPTIONS.stats, gizmos = DEFAULT_OPTIONS.gizmos, } = options;
  17747. Utils.log(`Starting a new ${mode} session...`);
  17748. return speedy_vision_default().Promise.resolve().then(() => {
  17749. // is the engine supported?
  17750. if (!Session.isSupported())
  17751. throw new NotSupportedError('You need a browser/device compatible with WebGL2 and WebAssembly in order to experience Augmented Reality with encantar.js');
  17752. // block multiple immersive sessions
  17753. if (mode !== 'inline' && Session.count > 0)
  17754. throw new IllegalOperationError(`Can't start more than one immersive session`);
  17755. // initialize matrix routines
  17756. return speedy_vision_default().Matrix.ready();
  17757. }).then(() => {
  17758. // validate sources of data
  17759. const videoSources = sources.filter(source => source._type == 'video');
  17760. if (videoSources.length != 1)
  17761. throw new IllegalArgumentError(`One video source of data must be provided`);
  17762. for (let i = sources.length - 1; i >= 0; i--) {
  17763. if (sources.indexOf(sources[i]) < i)
  17764. throw new IllegalArgumentError(`Found repeated sources of data`);
  17765. }
  17766. // initialize sources of data
  17767. return speedy_vision_default().Promise.all(sources.map(source => source._init()));
  17768. }).then(() => {
  17769. // get the viewport
  17770. if (!viewport)
  17771. throw new IllegalArgumentError(`Can't create a session without a viewport`);
  17772. // instantiate session
  17773. return new Session(sources, mode, viewport, stats, gizmos);
  17774. }).then(session => {
  17775. // validate trackers
  17776. if (trackers.length == 0)
  17777. Utils.warning(`No trackers have been attached to the session!`);
  17778. for (let i = trackers.length - 1; i >= 0; i--) {
  17779. if (trackers.indexOf(trackers[i]) < i)
  17780. throw new IllegalArgumentError(`Found repeated trackers`);
  17781. }
  17782. // attach trackers and return the session
  17783. return speedy_vision_default().Promise.all(trackers.map(tracker => session._attachTracker(tracker))).then(() => session);
  17784. }).catch(err => {
  17785. // log errors, if any
  17786. Utils.error(`Can't start session: ${err.message}`);
  17787. throw err;
  17788. });
  17789. }
  17790. /**
  17791. * Number of active sessions
  17792. */
  17793. static get count() {
  17794. return this._count;
  17795. }
  17796. /**
  17797. * End the session
  17798. * @returns promise that resolves after the session is shut down
  17799. */
  17800. end() {
  17801. // is the session inactive?
  17802. if (!this._active)
  17803. return speedy_vision_default().Promise.resolve();
  17804. // deactivate the session
  17805. Utils.log('Shutting down the session...');
  17806. this._active = false; // set before wait()
  17807. // wait a few ms, so that the GPU is no longer sending any data
  17808. const wait = (ms) => new (speedy_vision_default()).Promise(resolve => {
  17809. setTimeout(resolve, ms);
  17810. });
  17811. // release resources
  17812. return wait(100).then(() => speedy_vision_default().Promise.all(
  17813. // release trackers
  17814. this._trackers.map(tracker => tracker._release()))).then(() => speedy_vision_default().Promise.all(
  17815. // release input sources
  17816. this._sources.map(source => source._release()))).then(() => {
  17817. this._sources.length = 0;
  17818. this._trackers.length = 0;
  17819. // release internal components
  17820. this._updateStats.reset();
  17821. this._renderStats.reset();
  17822. this._statsPanel.release();
  17823. this._viewport._release();
  17824. // end the session
  17825. Session._count--;
  17826. // dispatch event
  17827. const event = new SessionEvent('end');
  17828. this.dispatchEvent(event);
  17829. // done!
  17830. Utils.log('Session ended.');
  17831. });
  17832. }
  17833. /**
  17834. * Analogous to window.requestAnimationFrame()
  17835. * @param callback
  17836. * @returns a handle
  17837. */
  17838. requestAnimationFrame(callback) {
  17839. const handle = Symbol('raf-handle');
  17840. if (this._active)
  17841. this._rafQueue.push([handle, callback]);
  17842. else
  17843. throw new IllegalOperationError(`Can't requestAnimationFrame(): session ended.`);
  17844. return handle;
  17845. }
  17846. /**
  17847. * Analogous to window.cancelAnimationFrame()
  17848. * @param handle a handle returned by this.requestAnimationFrame()
  17849. */
  17850. cancelAnimationFrame(handle) {
  17851. for (let i = this._rafQueue.length - 1; i >= 0; i--) {
  17852. if (this._rafQueue[i][0] === handle) {
  17853. this._rafQueue.splice(i, 1);
  17854. break;
  17855. }
  17856. }
  17857. }
  17858. /**
  17859. * The underlying media (generally a camera stream)
  17860. * @internal
  17861. */
  17862. get media() {
  17863. for (let i = this._sources.length - 1; i >= 0; i--) {
  17864. if (this._sources[i]._type == 'video')
  17865. return this._sources[i]._data;
  17866. }
  17867. // this shouldn't happen
  17868. throw new IllegalOperationError(`Invalid input source`);
  17869. }
  17870. /**
  17871. * Session mode
  17872. */
  17873. get mode() {
  17874. return this._mode;
  17875. }
  17876. /**
  17877. * Rendering viewport
  17878. */
  17879. get viewport() {
  17880. return this._viewport;
  17881. }
  17882. /**
  17883. * Time utilities
  17884. */
  17885. get time() {
  17886. return this._time;
  17887. }
  17888. /**
  17889. * Visual cues for testing & debugging
  17890. */
  17891. get gizmos() {
  17892. return this._gizmos;
  17893. }
  17894. /**
  17895. * Attach a tracker to the session
  17896. * @param tracker
  17897. */
  17898. _attachTracker(tracker) {
  17899. if (this._trackers.indexOf(tracker) >= 0)
  17900. throw new IllegalArgumentError(`Duplicate tracker attached to the session`);
  17901. else if (!this._active)
  17902. throw new IllegalOperationError(`Inactive session`);
  17903. this._trackers.push(tracker);
  17904. return tracker._init(this);
  17905. }
  17906. /**
  17907. * Render the user media to the background canvas
  17908. */
  17909. _renderUserMedia() {
  17910. const canvas = this._viewport._backgroundCanvas;
  17911. const ctx = canvas.getContext('2d', { alpha: false });
  17912. if (ctx && this.media.type != 'data') {
  17913. ctx.imageSmoothingEnabled = false;
  17914. // draw user media
  17915. const image = this.media.source;
  17916. ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
  17917. // render output image(s)
  17918. for (let i = 0; i < this._trackers.length; i++) {
  17919. const media = this._trackers[i]._output.image;
  17920. if (media !== undefined) {
  17921. const image = media.source;
  17922. ctx.drawImage(image, 0, 0, canvas.width, canvas.height);
  17923. //ctx.drawImage(image, canvas.width - media.width, canvas.height - media.height, media.width, media.height);
  17924. }
  17925. }
  17926. // render gizmos
  17927. this._gizmos._render(this._viewport, this._trackers);
  17928. }
  17929. }
  17930. /**
  17931. * Setup the update loop
  17932. */
  17933. _setupUpdateLoop() {
  17934. const scheduleNextFrame = () => {
  17935. if (this._active) {
  17936. if (Settings.powerPreference == 'high-performance')
  17937. asap(repeat);
  17938. else
  17939. window.requestAnimationFrame(repeat);
  17940. }
  17941. };
  17942. const update = () => {
  17943. this._update().then(scheduleNextFrame).turbocharge();
  17944. };
  17945. function repeat() {
  17946. if (Settings.powerPreference == 'low-power') // 30 fps
  17947. window.requestAnimationFrame(update);
  17948. else
  17949. update();
  17950. }
  17951. window.requestAnimationFrame(update);
  17952. }
  17953. /**
  17954. * The core of the update loop
  17955. */
  17956. _update() {
  17957. // active session?
  17958. if (this._active) {
  17959. return speedy_vision_default().Promise.all(
  17960. // update trackers
  17961. this._trackers.map(tracker => tracker._update().turbocharge())).then(() => {
  17962. // update internals
  17963. this._updateStats.update();
  17964. this._frameReady = true;
  17965. }).catch(err => {
  17966. // log error
  17967. Utils.error('Tracking error: ' + err.toString(), err);
  17968. // handle WebGL errors
  17969. const cause = err.cause;
  17970. if (err.name == 'GLError') {
  17971. alert(err.message); // fatal error?
  17972. alert(Utils.deviceInfo()); // display useful info
  17973. throw err;
  17974. }
  17975. else if (typeof cause == 'object' && cause.name == 'GLError') {
  17976. alert(err.message);
  17977. alert(cause.message);
  17978. alert(Utils.deviceInfo());
  17979. throw err;
  17980. }
  17981. });
  17982. }
  17983. else {
  17984. // inactive session
  17985. this._updateStats.reset();
  17986. return speedy_vision_default().Promise.resolve();
  17987. }
  17988. }
  17989. /**
  17990. * Setup the render loop
  17991. */
  17992. _setupRenderLoop() {
  17993. let skip = false, toggle = false;
  17994. const render = (timestamp) => {
  17995. const enableFrameSkipping = (Settings.powerPreference == 'low-power');
  17996. const highPerformance = (Settings.powerPreference == 'high-performance');
  17997. // advance time
  17998. this._time._update(timestamp);
  17999. // skip frames
  18000. if (!enableFrameSkipping || !(skip = !skip))
  18001. this._render(timestamp, false);
  18002. //this._render(timestamp, !enableFrameSkipping && !highPerformance && (toggle = !toggle));
  18003. // repeat
  18004. if (this._active)
  18005. window.requestAnimationFrame(render);
  18006. };
  18007. window.requestAnimationFrame(render);
  18008. }
  18009. /**
  18010. * Render a frame (RAF callback)
  18011. * @param time current time, in ms
  18012. * @param skipUserMedia skip copying the pixels of the user media to the background canvas in order to reduce the processing load (video stream is probably at 30fps?)
  18013. */
  18014. _render(time, skipUserMedia) {
  18015. // is the session active?
  18016. if (this._active) {
  18017. // are we ready to render a frame?
  18018. if (this._frameReady) {
  18019. // create a frame
  18020. const results = this._trackers.map(tracker => tracker._output.exports || ({
  18021. tracker: tracker,
  18022. trackables: [],
  18023. }));
  18024. const frame = new Frame(this, results);
  18025. // clone & clear the RAF queue
  18026. const rafQueue = this._rafQueue.slice(0);
  18027. this._rafQueue.length = 0;
  18028. // render user media
  18029. if (!skipUserMedia)
  18030. this._renderUserMedia();
  18031. // render frame
  18032. for (let i = 0; i < rafQueue.length; i++)
  18033. rafQueue[i][1].call(undefined, time, frame);
  18034. // update internals
  18035. this._renderStats.update();
  18036. this._statsPanel.update(time, this._trackers, this._sources, this._updateStats.cyclesPerSecond, this._renderStats.cyclesPerSecond);
  18037. this._frameReady = false;
  18038. }
  18039. else {
  18040. // skip frame
  18041. ;
  18042. // we'll update the renderStats even if we skip the frame,
  18043. // otherwise this becomes updateStats! (approximately)
  18044. // This is a window.requestAnimationFrame() call, so the
  18045. // browser is rendering content even if we're not.
  18046. this._renderStats.update();
  18047. }
  18048. }
  18049. else {
  18050. // inactive session
  18051. this._renderStats.reset();
  18052. }
  18053. }
  18054. }
  18055. /** Number of active sessions */
  18056. Session._count = 0;
  18057. ;// CONCATENATED MODULE: ./src/core/settings.ts
  18058. /*
  18059. * encantar.js
  18060. * GPU-accelerated Augmented Reality for the web
  18061. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18062. *
  18063. * This program is free software: you can redistribute it and/or modify
  18064. * it under the terms of the GNU Lesser General Public License as published
  18065. * by the Free Software Foundation, either version 3 of the License, or
  18066. * (at your option) any later version.
  18067. *
  18068. * This program is distributed in the hope that it will be useful,
  18069. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18070. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18071. * GNU Lesser General Public License for more details.
  18072. *
  18073. * You should have received a copy of the GNU Lesser General Public License
  18074. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18075. *
  18076. * settings.ts
  18077. * Global Settings
  18078. */
  18079. /**
  18080. * Global Settings
  18081. */
  18082. class Settings {
  18083. /**
  18084. * Power preference (may impact performance x battery life)
  18085. */
  18086. static get powerPreference() {
  18087. return this._powerPreference;
  18088. }
  18089. /**
  18090. * Power preference (may impact performance x battery life)
  18091. * Note: this setting should be the very first thing you set
  18092. * (before the WebGL context is created by Speedy)
  18093. */
  18094. static set powerPreference(value) {
  18095. // validate
  18096. if (Session.count > 0)
  18097. throw new IllegalOperationError(`Can't change the powerPreference while there are active sessions going on`);
  18098. else if (!('low-power' == value || 'default' == value || 'high-performance' == value))
  18099. throw new IllegalArgumentError(`Invalid powerPreference: "${value}"`);
  18100. /*
  18101. // we won't use 'high-performance' for Speedy's GPU computations
  18102. // see the WebGL 1.0 spec sec 5.2.1 for battery life considerations
  18103. // also, it seems like low-power mode may break WebGL2 in some drivers?!
  18104. if(value == 'high-performance')
  18105. Speedy.Settings.powerPreference = 'default';
  18106. else
  18107. Speedy.Settings.powerPreference = value;
  18108. */
  18109. // change the GPU polling mode
  18110. if (value == 'high-performance')
  18111. (speedy_vision_default()).Settings.gpuPollingMode = 'asap';
  18112. else
  18113. (speedy_vision_default()).Settings.gpuPollingMode = 'raf';
  18114. // update the power preference
  18115. this._powerPreference = value;
  18116. // log
  18117. Utils.log(`Changed the powerPreference to "${this._powerPreference}"`);
  18118. }
  18119. }
  18120. Settings._powerPreference = 'default';
  18121. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/reference-image-database.ts
  18122. /*
  18123. * encantar.js
  18124. * GPU-accelerated Augmented Reality for the web
  18125. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18126. *
  18127. * This program is free software: you can redistribute it and/or modify
  18128. * it under the terms of the GNU Lesser General Public License as published
  18129. * by the Free Software Foundation, either version 3 of the License, or
  18130. * (at your option) any later version.
  18131. *
  18132. * This program is distributed in the hope that it will be useful,
  18133. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18134. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18135. * GNU Lesser General Public License for more details.
  18136. *
  18137. * You should have received a copy of the GNU Lesser General Public License
  18138. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18139. *
  18140. * reference-image-database.ts
  18141. * A collection of Reference Images
  18142. */
  18143. /** Default capacity of a Reference Image Database */
  18144. const DEFAULT_CAPACITY = 100; // this number should exceed normal usage
  18145. // XXX this number may be changed (is 100 too conservative?)
  18146. // further testing is needed to verify the appropriateness of this number;
  18147. // it depends on the images, on the keypoint descriptors, and even on the target devices
  18148. /** Generate a unique name for a reference image */
  18149. const generateUniqueName = () => 'target-' + Math.random().toString(16).substr(2);
  18150. /**
  18151. * A collection of Reference Images
  18152. */
  18153. class ReferenceImageDatabase {
  18154. /**
  18155. * Constructor
  18156. */
  18157. constructor() {
  18158. this._capacity = DEFAULT_CAPACITY;
  18159. this._database = [];
  18160. this._locked = false;
  18161. }
  18162. /**
  18163. * The number of reference images stored in this database
  18164. */
  18165. get count() {
  18166. return this._database.length;
  18167. }
  18168. /**
  18169. * Maximum number of elements
  18170. */
  18171. get capacity() {
  18172. return this._capacity;
  18173. }
  18174. /**
  18175. * Maximum number of elements
  18176. * Increasing the capacity is considered experimental
  18177. */
  18178. set capacity(value) {
  18179. const capacity = Math.max(0, value | 0);
  18180. if (this.count > capacity)
  18181. throw new IllegalArgumentError(`Can't set the capacity of the database to ${capacity}: it currently stores ${this.count} entries`);
  18182. this._capacity = capacity;
  18183. }
  18184. /**
  18185. * Iterates over the collection
  18186. */
  18187. *[Symbol.iterator]() {
  18188. const ref = this._database.map(entry => entry.referenceImage);
  18189. yield* ref;
  18190. }
  18191. /**
  18192. * Add reference images to this database
  18193. * Add only the images you actually need to track!
  18194. * (each image take up storage space)
  18195. * @param referenceImages one or more reference images with unique names (a unique name will
  18196. * be generated automatically if you don't specify one)
  18197. * @returns a promise that resolves as soon as the images are loaded and added to this database
  18198. */
  18199. add(referenceImages) {
  18200. // handle no input
  18201. if (referenceImages.length == 0)
  18202. return speedy_vision_default().Promise.resolve();
  18203. // handle multiple images as input
  18204. if (referenceImages.length > 1) {
  18205. const promises = referenceImages.map(image => this.add([image]));
  18206. return speedy_vision_default().Promise.all(promises).then(() => void (0));
  18207. }
  18208. // handle a single image as input
  18209. const referenceImage = referenceImages[0];
  18210. // locked database?
  18211. if (this._locked)
  18212. throw new IllegalOperationError(`Can't add reference image to the database: it's locked`);
  18213. // reached full capacity?
  18214. if (this.count >= this.capacity)
  18215. throw new IllegalOperationError(`Can't add reference image to the database: the capacity of ${this.capacity} images has been exceeded.`);
  18216. // check for duplicate names
  18217. if (this._database.find(entry => entry.referenceImage.name === referenceImage.name) !== undefined)
  18218. throw new IllegalArgumentError(`Can't add reference image to the database: found duplicated name "${referenceImage.name}"`);
  18219. // load the media and add the reference image to the database
  18220. return speedy_vision_default().load(referenceImage.image).then(media => {
  18221. this._database.push({
  18222. referenceImage: Object.freeze(Object.assign(Object.assign({}, referenceImage), { name: referenceImage.name || generateUniqueName() })),
  18223. media: media
  18224. });
  18225. });
  18226. }
  18227. /**
  18228. * Lock the database, so that new reference images can no longer be added to it
  18229. * @internal
  18230. */
  18231. _lock() {
  18232. this._locked = true;
  18233. }
  18234. /**
  18235. * Get the media object associated to a reference image
  18236. * @param name reference image name
  18237. * @returns media
  18238. * @internal
  18239. */
  18240. _findMedia(name) {
  18241. for (let i = 0; i < this._database.length; i++) {
  18242. if (this._database[i].referenceImage.name === name)
  18243. return this._database[i].media;
  18244. }
  18245. throw new IllegalArgumentError(`Can't find reference image "${name}"`);
  18246. }
  18247. }
  18248. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/settings.ts
  18249. /*
  18250. * encantar.js
  18251. * GPU-accelerated Augmented Reality for the web
  18252. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18253. *
  18254. * This program is free software: you can redistribute it and/or modify
  18255. * it under the terms of the GNU Lesser General Public License as published
  18256. * by the Free Software Foundation, either version 3 of the License, or
  18257. * (at your option) any later version.
  18258. *
  18259. * This program is distributed in the hope that it will be useful,
  18260. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18261. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18262. * GNU Lesser General Public License for more details.
  18263. *
  18264. * You should have received a copy of the GNU Lesser General Public License
  18265. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18266. *
  18267. * settings.ts
  18268. * Settings of the Image Tracker
  18269. */
  18270. /** Default tracking resolution */
  18271. const DEFAULT_TRACKING_RESOLUTION = 'sm+';
  18272. /** Maximum number of keypoints to be stored for each reference image when in the training state */
  18273. const TRAIN_MAX_KEYPOINTS = 1024; //512;
  18274. /** Percentage relative to the screen size adjusted to the aspect ratio of the reference image */
  18275. const TRAIN_IMAGE_SCALE = 0.8; // ORB is not scale invariant
  18276. /** Normalized width & height of an image target, in pixels */
  18277. const TRAIN_TARGET_NORMALIZED_SIZE = 1024; // keypoint positions are stored as fixed point
  18278. /** Used to identify the best maches */
  18279. const SCAN_MATCH_RATIO = 0.7; // usually a value in [0.6, 0.8]
  18280. /** Maximum number of keypoints to be analyzed when in the scanning state */
  18281. const SCAN_MAX_KEYPOINTS = 512;
  18282. /** Number of pyramid levels to be scanned by the corner detector when in the scanning & training states */
  18283. const SCAN_PYRAMID_LEVELS = 4; //7;
  18284. /** Scale factor between pyramid levels to be scanned by the corner detector when in the scanning & training states */
  18285. const SCAN_PYRAMID_SCALEFACTOR = 1.19; // 2 ^ 0.25
  18286. /** Threshold of the FAST corner detector used in the scanning/training states */
  18287. const SCAN_FAST_THRESHOLD = 60;
  18288. /** Minimum number of accepted matches for us to move out from the scanning state */
  18289. const SCAN_MIN_MATCHES = 20; //30;
  18290. /** When in the scanning state, we require the image to be matched during a few consecutive frames before accepting it */
  18291. const SCAN_CONSECUTIVE_FRAMES = 30; //15;//45;
  18292. /** Reprojection error, in pixels, used when estimating a motion model (scanning state) */
  18293. const SCAN_RANSAC_REPROJECTIONERROR = 5;
  18294. /** Number of tables used in the LSH-based keypoint matching */
  18295. const SCAN_LSH_TABLES = 8; // up to 32
  18296. /** Hash size, in bits, used in the LSH-based keypoint matching */
  18297. const SCAN_LSH_HASHSIZE = 15; // up to 16
  18298. /** Use the Nightvision filter when in the scanning/training state? */
  18299. const SCAN_WITH_NIGHTVISION = true;
  18300. /** Nightvision filter: gain */
  18301. const NIGHTVISION_GAIN = 0.3; // 0.2;
  18302. /** Nightvision filter: offset */
  18303. const NIGHTVISION_OFFSET = 0.5;
  18304. /** Nightvision filter: decay */
  18305. const NIGHTVISION_DECAY = 0.0;
  18306. /** Nightvision filter: quality level */
  18307. const NIGHTVISION_QUALITY = 'low';
  18308. /** Kernel size (square) of the Gaussian filter applied before computing the ORB descriptors */
  18309. const ORB_GAUSSIAN_KSIZE = 9;
  18310. /** Sigma of the Gaussian filter applied before computing the ORB descriptors */
  18311. const ORB_GAUSSIAN_SIGMA = 2.0;
  18312. /** Kernel size (square) of the Gaussian filter applied before subpixel refinement for noise reduction */
  18313. const SUBPIXEL_GAUSSIAN_KSIZE = 5;
  18314. /** Sigma of the Gaussian filter applied before subpixel refinement for noise reduction */
  18315. const SUBPIXEL_GAUSSIAN_SIGMA = 1.0;
  18316. /** Subpixel refinement method */
  18317. const SUBPIXEL_METHOD = 'bilinear-upsample'; // 'quadratic1d';
  18318. /** Minimum acceptable number of matched keypoints when in the tracking state */
  18319. const TRACK_MIN_MATCHES = 4; //10; //20;
  18320. /** Maximum number of keypoints to be analyzed in the tracking state */
  18321. const TRACK_MAX_KEYPOINTS = 200; //400; // <-- impacts performance!
  18322. /** Capacity of the keypoint detector used in the the tracking state */
  18323. const TRACK_DETECTOR_CAPACITY = 2048; //4096;
  18324. /** Quality of the Harris/Shi-Tomasi corner detector */
  18325. const TRACK_HARRIS_QUALITY = 0.005; // get a lot of keypoints
  18326. /** Use the Nightvision filter when in the tracking state? */
  18327. const TRACK_WITH_NIGHTVISION = false; // produces shaking?
  18328. /** Relative size (%) of the (top, right, bottom, left) borders of the rectified image */
  18329. const TRACK_RECTIFIED_BORDER = 0.15; //0.20;
  18330. /** Relative size (%) used to clip keypoints from the borders of the rectified image */
  18331. const TRACK_CLIPPING_BORDER = TRACK_RECTIFIED_BORDER * 1.20; //1.25; //1.15;
  18332. /** Number of iterations used to refine the target image before tracking */
  18333. const TRACK_REFINEMENT_ITERATIONS = 3;
  18334. /** Reprojection error, in pixels, used when estimating a motion model (tracking state) */
  18335. const TRACK_RANSAC_REPROJECTIONERROR = 3; //2.5;
  18336. /** We use a N x N grid to spatially distribute the keypoints in order to compute a better homography */
  18337. const TRACK_GRID_GRANULARITY = 10; //20; // the value of N
  18338. /** Used to identify the best maches */
  18339. const TRACK_MATCH_RATIO = 0.75; // usually a value in [0.6, 0.8] - low values => strict tracking
  18340. /** Number of consecutive frames in which we tolerate a "target lost" situation */
  18341. const TRACK_LOST_TOLERANCE = 10;
  18342. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/state.ts
  18343. /*
  18344. * encantar.js
  18345. * GPU-accelerated Augmented Reality for the web
  18346. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18347. *
  18348. * This program is free software: you can redistribute it and/or modify
  18349. * it under the terms of the GNU Lesser General Public License as published
  18350. * by the Free Software Foundation, either version 3 of the License, or
  18351. * (at your option) any later version.
  18352. *
  18353. * This program is distributed in the hope that it will be useful,
  18354. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18355. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18356. * GNU Lesser General Public License for more details.
  18357. *
  18358. * You should have received a copy of the GNU Lesser General Public License
  18359. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18360. *
  18361. * state.ts
  18362. * Abstract state of the Image Tracker
  18363. */
  18364. /**
  18365. * Abstract state of the Image Tracker
  18366. */
  18367. class ImageTrackerState {
  18368. /**
  18369. * Constructor
  18370. * @param name
  18371. * @param imageTracker
  18372. */
  18373. constructor(name, imageTracker) {
  18374. this._name = name;
  18375. this._imageTracker = imageTracker;
  18376. this._pipeline = this._createPipeline();
  18377. }
  18378. /**
  18379. * State name
  18380. */
  18381. get name() {
  18382. return this._name;
  18383. }
  18384. /**
  18385. * AR screen size
  18386. */
  18387. get screenSize() {
  18388. const screen = this._pipeline.node('screen');
  18389. if (!screen)
  18390. throw new IllegalOperationError();
  18391. // this is available once this state has run at least once
  18392. return screen.size;
  18393. }
  18394. /**
  18395. * Initialize the state
  18396. */
  18397. init() {
  18398. }
  18399. /**
  18400. * Release resources
  18401. */
  18402. release() {
  18403. return this._pipeline.release();
  18404. }
  18405. /**
  18406. * Update the state
  18407. * @param media user media
  18408. * @param screenSize AR screen size for image processing
  18409. * @param state all states
  18410. * @returns promise
  18411. */
  18412. update(media, screenSize) {
  18413. const source = this._pipeline.node('source');
  18414. const screen = this._pipeline.node('screen');
  18415. // validate the pipeline
  18416. if (!source || !screen)
  18417. throw new IllegalOperationError();
  18418. // prepare the pipeline
  18419. source.media = media;
  18420. screen.size = screenSize;
  18421. // run the pipeline
  18422. return this._beforeUpdate().then(() => this._gpuUpdate()).then(result => this._afterUpdate(result));
  18423. }
  18424. /**
  18425. * Called as soon as this becomes the active state, just before update() runs for the first time
  18426. * @param settings
  18427. */
  18428. onEnterState(settings) {
  18429. }
  18430. /**
  18431. * Called when leaving the state, after update()
  18432. */
  18433. onLeaveState() {
  18434. }
  18435. /**
  18436. * Called just before the GPU processing
  18437. * @returns promise
  18438. */
  18439. _beforeUpdate() {
  18440. return speedy_vision_default().Promise.resolve();
  18441. }
  18442. /**
  18443. * GPU processing
  18444. * @returns promise with the pipeline results
  18445. */
  18446. _gpuUpdate() {
  18447. return this._pipeline.run();
  18448. }
  18449. //
  18450. // Some utility methods common to various states
  18451. //
  18452. /**
  18453. * Find the coordinates of a polyline surrounding the target image
  18454. * @param homography maps the target image to the AR screen
  18455. * @param targetSize size of the target space
  18456. * @returns promise that resolves to 4 points in AR screen space
  18457. */
  18458. _findPolylineCoordinates(homography, targetSize) {
  18459. const w = targetSize.width, h = targetSize.height;
  18460. const referenceImageCoordinates = speedy_vision_default().Matrix(2, 4, [
  18461. 0, 0,
  18462. w, 0,
  18463. w, h,
  18464. 0, h,
  18465. ]);
  18466. const polylineCoordinates = speedy_vision_default().Matrix.Zeros(2, 4);
  18467. return speedy_vision_default().Matrix.applyPerspectiveTransform(polylineCoordinates, referenceImageCoordinates, homography);
  18468. }
  18469. /**
  18470. * Find a polyline surrounding the target image
  18471. * @param homography maps the target image to the AR screen
  18472. * @param targetSize size of the target space
  18473. * @returns promise that resolves to 4 points in AR screen space
  18474. */
  18475. _findPolyline(homography, targetSize) {
  18476. return this._findPolylineCoordinates(homography, targetSize).then(polylineCoordinates => {
  18477. const polydata = polylineCoordinates.read();
  18478. const polyline = Array.from({ length: 4 }, (_, i) => speedy_vision_default().Point2(polydata[2 * i], polydata[2 * i + 1]));
  18479. return polyline;
  18480. });
  18481. }
  18482. /**
  18483. * Whether or not to rotate the warped image in order to best fit the AR screen
  18484. * @param media media associated with the reference image
  18485. * @param screenSize AR screen
  18486. * @returns boolean
  18487. */
  18488. _mustRotateWarpedImage(media, screenSize) {
  18489. const screenAspectRatio = screenSize.width / screenSize.height;
  18490. const mediaAspectRatio = media.width / media.height;
  18491. const eps = 0.1;
  18492. return (mediaAspectRatio >= 1 + eps && screenAspectRatio < 1 - eps) || (mediaAspectRatio < 1 - eps && screenAspectRatio >= 1 + eps);
  18493. }
  18494. /**
  18495. * Find a rectification matrix to be applied to an image fitting the entire AR screen
  18496. * @param media media associated with the reference image
  18497. * @param screenSize AR screen
  18498. * @returns promise that resolves to a rectification matrix
  18499. */
  18500. _findRectificationMatrixOfFullscreenImage(media, screenSize) {
  18501. const b = TRACK_RECTIFIED_BORDER;
  18502. const sw = screenSize.width, sh = screenSize.height;
  18503. const mediaAspectRatio = media.width / media.height;
  18504. const mustRotate = this._mustRotateWarpedImage(media, screenSize);
  18505. // compute the vertices of the target in screen space
  18506. // we suppose portrait or landscape mode for both screen & media
  18507. const c = mustRotate ? 1 / mediaAspectRatio : mediaAspectRatio;
  18508. const top = sw >= sh ? b * sh : (sh - sw * (1 - 2 * b) / c) / 2;
  18509. const left = sw >= sh ? (sw - sh * (1 - 2 * b) * c) / 2 : b * sw;
  18510. const right = sw - left;
  18511. const bottom = sh - top;
  18512. const targetVertices = speedy_vision_default().Matrix(2, 4, [
  18513. left, top,
  18514. right, top,
  18515. right, bottom,
  18516. left, bottom,
  18517. ]);
  18518. const screenVertices = speedy_vision_default().Matrix(2, 4, [
  18519. 0, 0,
  18520. sw, 0,
  18521. sw, sh,
  18522. 0, sh
  18523. ]);
  18524. const preRectificationMatrix = speedy_vision_default().Matrix.Eye(3);
  18525. const alignmentMatrix = speedy_vision_default().Matrix.Zeros(3);
  18526. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  18527. return (mustRotate ? speedy_vision_default().Matrix.perspective(
  18528. // pre-rectifation: rotate by 90 degrees counterclockwise and scale to screenSize
  18529. preRectificationMatrix, screenVertices, speedy_vision_default().Matrix(2, 4, [0, sh, 0, 0, sw, 0, sw, sh])) : speedy_vision_default().Promise.resolve(preRectificationMatrix)).then(_ =>
  18530. // alignment: align the target to the center of the screen
  18531. speedy_vision_default().Matrix.perspective(alignmentMatrix, screenVertices, targetVertices)).then(_ =>
  18532. // pre-rectify and then align
  18533. rectificationMatrix.setTo(alignmentMatrix.times(preRectificationMatrix)));
  18534. }
  18535. /**
  18536. * Find a rectification matrix to be applied to the target image
  18537. * @param homography maps a reference image to the AR screen
  18538. * @param targetSize size of the target space
  18539. * @param media media associated with the reference image
  18540. * @param screenSize AR screen
  18541. * @returns promise that resolves to a rectification matrix
  18542. */
  18543. _findRectificationMatrixOfCameraImage(homography, targetSize, media, screenSize) {
  18544. const sw = screenSize.width, sh = screenSize.height;
  18545. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  18546. const rectificationMatrix = speedy_vision_default().Matrix.Zeros(3);
  18547. return this._findPolylineCoordinates(homography, targetSize).then(polyline =>
  18548. // from target space to (full)screen
  18549. speedy_vision_default().Matrix.perspective(rectificationMatrix, polyline, screen)).then(_ =>
  18550. // from (full)screen to rectified coordinates
  18551. this._findRectificationMatrixOfFullscreenImage(media, screenSize)).then(mat =>
  18552. // function composition
  18553. rectificationMatrix.setTo(mat.times(rectificationMatrix)));
  18554. }
  18555. }
  18556. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/initial.ts
  18557. /*
  18558. * encantar.js
  18559. * GPU-accelerated Augmented Reality for the web
  18560. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18561. *
  18562. * This program is free software: you can redistribute it and/or modify
  18563. * it under the terms of the GNU Lesser General Public License as published
  18564. * by the Free Software Foundation, either version 3 of the License, or
  18565. * (at your option) any later version.
  18566. *
  18567. * This program is distributed in the hope that it will be useful,
  18568. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18569. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18570. * GNU Lesser General Public License for more details.
  18571. *
  18572. * You should have received a copy of the GNU Lesser General Public License
  18573. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18574. *
  18575. * initial.ts
  18576. * Initial state of the Image Tracker
  18577. */
  18578. /**
  18579. * The purpose of the initial state of the Image Tracker
  18580. * is to initialize the training state using the state machine
  18581. */
  18582. class ImageTrackerInitialState extends ImageTrackerState {
  18583. /**
  18584. * Constructor
  18585. * @param imageTracker
  18586. */
  18587. constructor(imageTracker) {
  18588. super('initial', imageTracker);
  18589. }
  18590. /**
  18591. * Called just before the GPU processing
  18592. * @returns promise
  18593. */
  18594. _beforeUpdate() {
  18595. const source = this._pipeline.node('source');
  18596. const media = source.media;
  18597. const mediaSize = media.size;
  18598. if (mediaSize.area() < this.screenSize.area())
  18599. Utils.warning('The resolution of the tracker is larger than the resolution of the video. This is inefficient.');
  18600. return speedy_vision_default().Promise.resolve();
  18601. }
  18602. /**
  18603. * Post processing that takes place just after the GPU processing
  18604. * @param result pipeline results
  18605. * @returns state output
  18606. */
  18607. _afterUpdate(result) {
  18608. return speedy_vision_default().Promise.resolve({
  18609. nextState: 'training',
  18610. trackerOutput: {},
  18611. });
  18612. }
  18613. /**
  18614. * Create & setup the pipeline
  18615. * @returns pipeline
  18616. */
  18617. _createPipeline() {
  18618. // this pipeline does nothing useful,
  18619. // but it does preload some shaders...
  18620. const pipeline = speedy_vision_default().Pipeline();
  18621. const source = speedy_vision_default().Image.Source('source');
  18622. const screen = speedy_vision_default().Transform.Resize('screen');
  18623. const greyscale = speedy_vision_default().Filter.Greyscale();
  18624. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp();
  18625. const nightvision = speedy_vision_default().Filter.Nightvision();
  18626. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  18627. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  18628. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  18629. const blur = speedy_vision_default().Filter.GaussianBlur();
  18630. const clipper = speedy_vision_default().Keypoint.Clipper();
  18631. const borderClipper = speedy_vision_default().Keypoint.BorderClipper();
  18632. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  18633. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  18634. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  18635. const keypointRectifier = speedy_vision_default().Keypoint.Transformer();
  18636. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink();
  18637. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source();
  18638. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  18639. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer();
  18640. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer();
  18641. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints();
  18642. source.media = null;
  18643. screen.size = speedy_vision_default().Size(0, 0);
  18644. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  18645. nightvision.quality = NIGHTVISION_QUALITY;
  18646. subpixel.method = SUBPIXEL_METHOD;
  18647. //borderClipper.imageSize = screen.size;
  18648. borderClipper.imageSize = speedy_vision_default().Size(100, 100);
  18649. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  18650. matcher.k = 1; //2;
  18651. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  18652. keypointPortalSource.source = keypointPortalSink;
  18653. muxOfReferenceKeypoints.port = 0;
  18654. muxOfBufferOfReferenceKeypoints.port = 0;
  18655. bufferOfReferenceKeypoints.frozen = false;
  18656. keypointSink.turbo = false;
  18657. // prepare input
  18658. source.output().connectTo(screen.input());
  18659. screen.output().connectTo(greyscale.input());
  18660. // preprocess images
  18661. greyscale.output().connectTo(imageRectifier.input());
  18662. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  18663. imageRectifier.output().connectTo(nightvision.input());
  18664. nightvision.output().connectTo(nightvisionMux.input('in1'));
  18665. nightvisionMux.output().connectTo(blur.input());
  18666. // keypoint detection & clipping
  18667. nightvisionMux.output().connectTo(detector.input());
  18668. detector.output().connectTo(borderClipper.input());
  18669. borderClipper.output().connectTo(clipper.input());
  18670. // keypoint refinement
  18671. imageRectifier.output().connectTo(denoiser.input());
  18672. denoiser.output().connectTo(subpixel.input('image'));
  18673. clipper.output().connectTo(subpixel.input('keypoints'));
  18674. // keypoint description
  18675. blur.output().connectTo(descriptor.input('image'));
  18676. subpixel.output().connectTo(descriptor.input('keypoints'));
  18677. // keypoint matching
  18678. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  18679. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  18680. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  18681. descriptor.output().connectTo(matcher.input('keypoints'));
  18682. // store reference keypoints
  18683. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  18684. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  18685. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  18686. // portals
  18687. descriptor.output().connectTo(keypointPortalSink.input());
  18688. // prepare output
  18689. descriptor.output().connectTo(keypointRectifier.input());
  18690. keypointRectifier.output().connectTo(keypointSink.input());
  18691. matcher.output().connectTo(keypointSink.input('matches'));
  18692. // done!
  18693. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  18694. /*
  18695. const run = pipeline.run.bind(pipeline);
  18696. pipeline.run = function() {
  18697. console.time("TIME");
  18698. return run().then(x => {
  18699. console.timeEnd("TIME");
  18700. return x;
  18701. });
  18702. };
  18703. */
  18704. return pipeline;
  18705. }
  18706. }
  18707. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/training.ts
  18708. /*
  18709. * encantar.js
  18710. * GPU-accelerated Augmented Reality for the web
  18711. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18712. *
  18713. * This program is free software: you can redistribute it and/or modify
  18714. * it under the terms of the GNU Lesser General Public License as published
  18715. * by the Free Software Foundation, either version 3 of the License, or
  18716. * (at your option) any later version.
  18717. *
  18718. * This program is distributed in the hope that it will be useful,
  18719. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18720. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18721. * GNU Lesser General Public License for more details.
  18722. *
  18723. * You should have received a copy of the GNU Lesser General Public License
  18724. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18725. *
  18726. * training.ts
  18727. * Training state of the Image Tracker
  18728. */
  18729. /**
  18730. * Training state of the Image Tracker
  18731. */
  18732. class ImageTrackerTrainingState extends ImageTrackerState {
  18733. /**
  18734. * Constructor
  18735. * @param imageTracker
  18736. */
  18737. constructor(imageTracker) {
  18738. super('training', imageTracker);
  18739. this._currentImageIndex = 0;
  18740. this._image = [];
  18741. // initialize the training map
  18742. this._trainingMap = {
  18743. referenceImageIndex: [],
  18744. referenceImage: [],
  18745. keypoints: []
  18746. };
  18747. }
  18748. /**
  18749. * Called as soon as this becomes the active state, just before update() runs for the first time
  18750. * @param settings
  18751. */
  18752. onEnterState(settings) {
  18753. const database = this._imageTracker.database;
  18754. // validate
  18755. if (database.count == 0)
  18756. throw new TrainingError(`Can't train the Image Tracker: the Reference Image Database is empty`);
  18757. // prepare to train...
  18758. this._currentImageIndex = 0;
  18759. this._image.length = 0;
  18760. this._trainingMap.referenceImageIndex.length = 0;
  18761. this._trainingMap.referenceImage.length = 0;
  18762. this._trainingMap.keypoints.length = 0;
  18763. // lock the database
  18764. Utils.log(`Image Tracker: training using ${database.count} reference image${database.count != 1 ? 's' : ''}`);
  18765. database._lock();
  18766. // collect all images
  18767. for (const referenceImage of database)
  18768. this._image.push(referenceImage);
  18769. }
  18770. /**
  18771. * Called just before the GPU processing
  18772. * @returns promise
  18773. */
  18774. _beforeUpdate() {
  18775. const arScreenSize = this.screenSize;
  18776. const source = this._pipeline.node('source');
  18777. const screen = this._pipeline.node('screen');
  18778. const keypointScaler = this._pipeline.node('keypointScaler');
  18779. // this shouldn't happen
  18780. if (this._currentImageIndex >= this._image.length)
  18781. return speedy_vision_default().Promise.reject(new IllegalOperationError());
  18782. // set the appropriate training media
  18783. const database = this._imageTracker.database;
  18784. const referenceImage = this._image[this._currentImageIndex];
  18785. const media = database._findMedia(referenceImage.name);
  18786. source.media = media;
  18787. // compute the appropriate size of the training image space
  18788. const resolution = this._imageTracker.resolution;
  18789. const scale = TRAIN_IMAGE_SCALE; // ORB is not scale-invariant
  18790. const aspectRatioOfTrainingImage = media.width / media.height;
  18791. /*
  18792. let sin = 0, cos = 1;
  18793. if((aspectRatioOfSourceVideo - 1) * (aspectRatioOfTrainingImage - 1) >= 0) {
  18794. // training image and source video: both in landscape mode or both in portrait mode
  18795. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  18796. screen.size.width = Math.round(screen.size.width * scale);
  18797. screen.size.height = Math.round(screen.size.height * scale);
  18798. }
  18799. else if(aspectRatioOfTrainingImage > aspectRatioOfSourceVideo) {
  18800. // training image: portrait mode; source video: landscape mode
  18801. screen.size = Utils.resolution(resolution, 1 / aspectRatioOfTrainingImage);
  18802. screen.size.width = Math.round(screen.size.width * scale);
  18803. screen.size.height = Math.round(screen.size.height * scale);
  18804. sin = 1; cos = 0; // rotate 90deg
  18805. }
  18806. else {
  18807. // training image: landscape mode; source video: portrait mode
  18808. }
  18809. */
  18810. screen.size = Utils.resolution(resolution, aspectRatioOfTrainingImage);
  18811. screen.size.width = Math.round(screen.size.width * scale);
  18812. screen.size.height = Math.round(screen.size.height * scale);
  18813. // convert keypoints from the training image space to AR screen space
  18814. // let's pretend that trained keypoints belong to the AR screen space,
  18815. // regardless of the size of the target image. This will make things
  18816. // easier when computing the homography.
  18817. /*
  18818. const sw = arScreenSize.width / screen.size.width;
  18819. const sh = arScreenSize.height / screen.size.height;
  18820. */
  18821. const sw = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.width;
  18822. const sh = TRAIN_TARGET_NORMALIZED_SIZE / screen.size.height;
  18823. keypointScaler.transform = speedy_vision_default().Matrix(3, 3, [
  18824. sw, 0, 0,
  18825. 0, sh, 0,
  18826. 0, 0, 1,
  18827. ]);
  18828. // log
  18829. Utils.log(`Image Tracker: training using reference image "${referenceImage.name}" at ${screen.size.width}x${screen.size.height}...`);
  18830. // done!
  18831. return speedy_vision_default().Promise.resolve();
  18832. }
  18833. /**
  18834. * Post processing that takes place just after the GPU processing
  18835. * @param result pipeline results
  18836. * @returns state output
  18837. */
  18838. _afterUpdate(result) {
  18839. const referenceImage = this._image[this._currentImageIndex];
  18840. const keypoints = result.keypoints;
  18841. const image = result.image;
  18842. // log
  18843. Utils.log(`Image Tracker: found ${keypoints.length} keypoints in reference image "${referenceImage.name}"`);
  18844. // set the training map, so that we can map all keypoints of the current image to the current image
  18845. this._trainingMap.referenceImage.push(referenceImage);
  18846. for (let i = 0; i < keypoints.length; i++) {
  18847. this._trainingMap.keypoints.push(keypoints[i]);
  18848. this._trainingMap.referenceImageIndex.push(this._currentImageIndex);
  18849. }
  18850. // the current image has been processed!
  18851. ++this._currentImageIndex;
  18852. // set output
  18853. if (this._currentImageIndex >= this._image.length) {
  18854. // finished training!
  18855. return speedy_vision_default().Promise.resolve({
  18856. //nextState: 'training',
  18857. nextState: 'scanning',
  18858. nextStateSettings: {
  18859. keypoints: this._trainingMap.keypoints,
  18860. },
  18861. trackerOutput: {},
  18862. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  18863. });
  18864. }
  18865. else {
  18866. // we're not done yet
  18867. return speedy_vision_default().Promise.resolve({
  18868. nextState: 'training',
  18869. trackerOutput: {},
  18870. //trackerOutput: { image, keypoints, screenSize: this.screenSize },
  18871. });
  18872. }
  18873. }
  18874. /**
  18875. * Create & setup the pipeline
  18876. * @returns pipeline
  18877. */
  18878. _createPipeline() {
  18879. const pipeline = speedy_vision_default().Pipeline();
  18880. const source = speedy_vision_default().Image.Source('source');
  18881. const screen = speedy_vision_default().Transform.Resize('screen');
  18882. const greyscale = speedy_vision_default().Filter.Greyscale();
  18883. const blur = speedy_vision_default().Filter.GaussianBlur();
  18884. const nightvision = speedy_vision_default().Filter.Nightvision();
  18885. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  18886. const pyramid = speedy_vision_default().Image.Pyramid();
  18887. const detector = speedy_vision_default().Keypoint.Detector.FAST('fast');
  18888. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  18889. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  18890. const blurredPyramid = speedy_vision_default().Image.Pyramid();
  18891. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  18892. const clipper = speedy_vision_default().Keypoint.Clipper();
  18893. const keypointScaler = speedy_vision_default().Keypoint.Transformer('keypointScaler');
  18894. const keypointSink = speedy_vision_default().Keypoint.Sink('keypoints');
  18895. const imageSink = speedy_vision_default().Image.Sink('image');
  18896. source.media = null;
  18897. screen.size = speedy_vision_default().Size(0, 0);
  18898. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  18899. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  18900. nightvision.gain = NIGHTVISION_GAIN;
  18901. nightvision.offset = NIGHTVISION_OFFSET;
  18902. nightvision.decay = NIGHTVISION_DECAY;
  18903. nightvision.quality = NIGHTVISION_QUALITY;
  18904. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  18905. detector.levels = SCAN_PYRAMID_LEVELS;
  18906. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  18907. detector.threshold = SCAN_FAST_THRESHOLD;
  18908. detector.capacity = 8192;
  18909. subpixel.method = SUBPIXEL_METHOD;
  18910. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  18911. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  18912. clipper.size = TRAIN_MAX_KEYPOINTS;
  18913. keypointScaler.transform = speedy_vision_default().Matrix.Eye(3);
  18914. keypointSink.turbo = false;
  18915. // prepare input
  18916. source.output().connectTo(screen.input());
  18917. screen.output().connectTo(greyscale.input());
  18918. // preprocess image
  18919. greyscale.output().connectTo(nightvisionMux.input('in0'));
  18920. greyscale.output().connectTo(nightvision.input());
  18921. nightvision.output().connectTo(nightvisionMux.input('in1'));
  18922. nightvisionMux.output().connectTo(pyramid.input());
  18923. // keypoint detection
  18924. pyramid.output().connectTo(detector.input());
  18925. detector.output().connectTo(clipper.input());
  18926. // keypoint refinement
  18927. greyscale.output().connectTo(denoiser.input()); // reduce noise
  18928. denoiser.output().connectTo(blurredPyramid.input());
  18929. clipper.output().connectTo(subpixel.input('keypoints'));
  18930. blurredPyramid.output().connectTo(subpixel.input('image'));
  18931. // keypoint description
  18932. greyscale.output().connectTo(blur.input());
  18933. blur.output().connectTo(descriptor.input('image'));
  18934. clipper.output().connectTo(descriptor.input('keypoints'));
  18935. // prepare output
  18936. descriptor.output().connectTo(keypointScaler.input());
  18937. keypointScaler.output().connectTo(keypointSink.input());
  18938. nightvisionMux.output().connectTo(imageSink.input());
  18939. // done!
  18940. pipeline.init(source, screen, greyscale, nightvision, nightvisionMux, pyramid, detector, blur, descriptor, clipper, denoiser, blurredPyramid, subpixel, keypointScaler, keypointSink, imageSink);
  18941. return pipeline;
  18942. }
  18943. /**
  18944. * Get reference image
  18945. * @param keypointIndex -1 if not found
  18946. * @returns reference image
  18947. */
  18948. referenceImageOfKeypoint(keypointIndex) {
  18949. const imageIndex = this.referenceImageIndexOfKeypoint(keypointIndex);
  18950. if (imageIndex < 0)
  18951. return null;
  18952. return this._trainingMap.referenceImage[imageIndex];
  18953. }
  18954. /**
  18955. * Get reference image index
  18956. * @param keypointIndex -1 if not found
  18957. * @returns reference image index, or -1 if not found
  18958. */
  18959. referenceImageIndexOfKeypoint(keypointIndex) {
  18960. const n = this._trainingMap.referenceImageIndex.length;
  18961. if (keypointIndex < 0 || keypointIndex >= n)
  18962. return -1;
  18963. const imageIndex = this._trainingMap.referenceImageIndex[keypointIndex];
  18964. if (imageIndex < 0 || imageIndex >= this._trainingMap.referenceImage.length)
  18965. return -1;
  18966. return imageIndex;
  18967. }
  18968. /**
  18969. * Get keypoint of the trained set
  18970. * @param keypointIndex -1 if not found
  18971. * @returns a keypoint
  18972. */
  18973. referenceKeypoint(keypointIndex) {
  18974. if (keypointIndex < 0 || keypointIndex >= this._trainingMap.keypoints.length)
  18975. return null;
  18976. return this._trainingMap.keypoints[keypointIndex];
  18977. }
  18978. }
  18979. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/scanning.ts
  18980. /*
  18981. * encantar.js
  18982. * GPU-accelerated Augmented Reality for the web
  18983. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  18984. *
  18985. * This program is free software: you can redistribute it and/or modify
  18986. * it under the terms of the GNU Lesser General Public License as published
  18987. * by the Free Software Foundation, either version 3 of the License, or
  18988. * (at your option) any later version.
  18989. *
  18990. * This program is distributed in the hope that it will be useful,
  18991. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18992. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  18993. * GNU Lesser General Public License for more details.
  18994. *
  18995. * You should have received a copy of the GNU Lesser General Public License
  18996. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  18997. *
  18998. * scanning.ts
  18999. * Scanning state of the Image Tracker
  19000. */
  19001. /** Default target space size (used when training) */
  19002. const DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  19003. /** Port of the portal multiplexer: get new data from the camera */
  19004. const PORT_CAMERA = 0;
  19005. /** Port of the portal multiplexer: get previously memorized data */
  19006. const PORT_MEMORY = 1;
  19007. /**
  19008. * Scanning state of the Image Tracker
  19009. */
  19010. class ImageTrackerScanningState extends ImageTrackerState {
  19011. /**
  19012. * Constructor
  19013. * @param imageTracker
  19014. */
  19015. constructor(imageTracker) {
  19016. super('scanning', imageTracker);
  19017. this._counter = 0;
  19018. this._bestScore = 0;
  19019. this._bestHomography = speedy_vision_default().Matrix.Eye(3);
  19020. }
  19021. /**
  19022. * Called as soon as this becomes the active state, just before update() runs for the first time
  19023. * @param settings
  19024. */
  19025. onEnterState(settings) {
  19026. const imagePortalMux = this._pipeline.node('imagePortalMux');
  19027. const lshTables = this._pipeline.node('lshTables');
  19028. const keypoints = settings.keypoints;
  19029. // set attributes
  19030. this._counter = 0;
  19031. this._bestScore = 0;
  19032. // reset the image memorization circuit
  19033. imagePortalMux.port = PORT_CAMERA;
  19034. // prepare the keypoint matcher
  19035. if (keypoints !== undefined)
  19036. lshTables.keypoints = keypoints;
  19037. }
  19038. /**
  19039. * Post processing that takes place just after the GPU processing
  19040. * @param result pipeline results
  19041. * @returns state output
  19042. */
  19043. _afterUpdate(result) {
  19044. const imagePortalMux = this._pipeline.node('imagePortalMux');
  19045. const keypoints = result.keypoints;
  19046. const matchedKeypoints = this._goodMatches(keypoints);
  19047. // tracker output
  19048. const trackerOutput = {
  19049. keypoints: keypoints,
  19050. screenSize: this.screenSize
  19051. };
  19052. // keep the last memorized image
  19053. imagePortalMux.port = PORT_MEMORY;
  19054. // have we found enough matches...?
  19055. if (matchedKeypoints.length >= SCAN_MIN_MATCHES) {
  19056. return this._findHomography(matchedKeypoints).then(([homography, score]) => {
  19057. // have we found the best homography so far?
  19058. if (score >= this._bestScore) {
  19059. // store it only if we'll be running the pipeline again
  19060. if (this._counter < SCAN_CONSECUTIVE_FRAMES - 1) {
  19061. this._bestScore = score;
  19062. this._bestHomography = homography;
  19063. // memorize the last image, corresponding to the best homography(*)
  19064. imagePortalMux.port = PORT_CAMERA;
  19065. /*
  19066. (*) technically speaking, this is not exactly the case. Since we're
  19067. using turbo to download the keypoints, there's a slight difference
  19068. between the data used to compute the homography and the last image.
  19069. Still, assuming continuity of the video stream, this logic is
  19070. good enough.
  19071. */
  19072. }
  19073. }
  19074. // find a polyline surrounding the target
  19075. return this._findPolyline(homography, DEFAULT_TARGET_SPACE_SIZE);
  19076. }).then(polyline => {
  19077. // continue a little longer in the scanning state
  19078. if (++this._counter < SCAN_CONSECUTIVE_FRAMES) {
  19079. return {
  19080. nextState: this.name,
  19081. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  19082. };
  19083. }
  19084. // this image should correspond to the best homography
  19085. const snapshot = this._pipeline.node('imagePortalSink');
  19086. // the reference image that we'll track
  19087. const referenceImage = this._imageTracker._referenceImageOfKeypoint(matchedKeypoints[0].matches[0].index);
  19088. // let's track the target!
  19089. return {
  19090. nextState: 'pre-tracking',
  19091. nextStateSettings: {
  19092. homography: this._bestHomography,
  19093. snapshot: snapshot,
  19094. referenceImage: referenceImage,
  19095. },
  19096. trackerOutput: Object.assign({ polyline: polyline }, trackerOutput),
  19097. };
  19098. }).catch(() => {
  19099. // continue in the scanning state
  19100. return {
  19101. nextState: this.name,
  19102. trackerOutput: trackerOutput,
  19103. };
  19104. });
  19105. }
  19106. else {
  19107. // not enough matches...!
  19108. this._counter = 0;
  19109. this._bestScore = 0;
  19110. }
  19111. // we'll continue to scan the scene
  19112. return speedy_vision_default().Promise.resolve({
  19113. nextState: this.name,
  19114. trackerOutput: trackerOutput,
  19115. });
  19116. }
  19117. /**
  19118. * Find "high quality" matches of a single reference image
  19119. * @param keypoints
  19120. * @returns high quality matches
  19121. */
  19122. _goodMatches(keypoints) {
  19123. const matchedKeypointsPerImageIndex = Object.create(null);
  19124. // filter "good matches"
  19125. for (let j = keypoints.length - 1; j >= 0; j--) {
  19126. const keypoint = keypoints[j];
  19127. if (keypoint.matches[0].index >= 0 && keypoint.matches[1].index >= 0) {
  19128. const d1 = keypoint.matches[0].distance, d2 = keypoint.matches[1].distance;
  19129. // the best match should be "much better" than the second best match,
  19130. // which means that they are "distinct enough"
  19131. if (d1 <= SCAN_MATCH_RATIO * d2) {
  19132. const idx1 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[0].index);
  19133. //const idx2 = this._imageTracker._referenceImageIndexOfKeypoint(keypoint.matches[1].index);
  19134. //if(idx1 == idx2 && idx1 >= 0) {
  19135. if (idx1 >= 0) {
  19136. if (!Object.prototype.hasOwnProperty.call(matchedKeypointsPerImageIndex, idx1))
  19137. matchedKeypointsPerImageIndex[idx1] = [];
  19138. matchedKeypointsPerImageIndex[idx1].push(keypoint);
  19139. }
  19140. }
  19141. }
  19142. }
  19143. // find the image with the most matches
  19144. let matchedKeypoints = [];
  19145. for (const imageIndex in matchedKeypointsPerImageIndex) {
  19146. if (matchedKeypointsPerImageIndex[imageIndex].length > matchedKeypoints.length)
  19147. matchedKeypoints = matchedKeypointsPerImageIndex[imageIndex];
  19148. }
  19149. // done!
  19150. return matchedKeypoints;
  19151. }
  19152. /**
  19153. * Find a homography matrix using matched keypoints
  19154. * @param matchedKeypoints "good" matches only
  19155. * @returns homography from reference image space to AR screen space & homography "quality" score
  19156. */
  19157. _findHomography(matchedKeypoints) {
  19158. const srcCoords = [];
  19159. const dstCoords = [];
  19160. // find matching coordinates of the keypoints
  19161. for (let i = matchedKeypoints.length - 1; i >= 0; i--) {
  19162. const matchedKeypoint = matchedKeypoints[i];
  19163. const referenceKeypoint = this._imageTracker._referenceKeypoint(matchedKeypoint.matches[0].index);
  19164. if (referenceKeypoint != null) {
  19165. srcCoords.push(referenceKeypoint.x);
  19166. srcCoords.push(referenceKeypoint.y);
  19167. dstCoords.push(matchedKeypoint.x);
  19168. dstCoords.push(matchedKeypoint.y);
  19169. }
  19170. else {
  19171. // this shouldn't happen
  19172. return speedy_vision_default().Promise.reject(new DetectionError(`Invalid keypoint match index: ${matchedKeypoint.matches[0].index} from ${matchedKeypoint.toString()}`));
  19173. }
  19174. }
  19175. // too few points?
  19176. const n = srcCoords.length / 2;
  19177. if (n < 4) {
  19178. return speedy_vision_default().Promise.reject(new DetectionError(`Too few points to compute a homography`));
  19179. }
  19180. // compute a homography
  19181. const src = speedy_vision_default().Matrix(2, n, srcCoords);
  19182. const dst = speedy_vision_default().Matrix(2, n, dstCoords);
  19183. const mask = speedy_vision_default().Matrix.Zeros(1, n);
  19184. const homography = speedy_vision_default().Matrix.Zeros(3);
  19185. return speedy_vision_default().Matrix.findHomography(homography, src, dst, {
  19186. method: 'pransac',
  19187. reprojectionError: SCAN_RANSAC_REPROJECTIONERROR,
  19188. numberOfHypotheses: 512,
  19189. bundleSize: 128,
  19190. mask: mask,
  19191. }).then(homography => {
  19192. // check if this is a valid homography
  19193. const a00 = homography.at(0, 0);
  19194. if (Number.isNaN(a00))
  19195. throw new DetectionError(`Can't compute homography`);
  19196. // count the number of inliers
  19197. const inliers = mask.read();
  19198. let inlierCount = 0;
  19199. for (let i = inliers.length - 1; i >= 0; i--)
  19200. inlierCount += inliers[i];
  19201. const score = inlierCount / inliers.length;
  19202. // done!
  19203. return [homography, score];
  19204. });
  19205. }
  19206. /**
  19207. * Create & setup the pipeline
  19208. * @returns pipeline
  19209. */
  19210. _createPipeline() {
  19211. const pipeline = speedy_vision_default().Pipeline();
  19212. const source = speedy_vision_default().Image.Source('source');
  19213. const screen = speedy_vision_default().Transform.Resize('screen');
  19214. const greyscale = speedy_vision_default().Filter.Greyscale();
  19215. const blur = speedy_vision_default().Filter.GaussianBlur();
  19216. const nightvision = speedy_vision_default().Filter.Nightvision();
  19217. const nightvisionMux = speedy_vision_default().Image.Multiplexer('nightvisionMux');
  19218. const pyramid = speedy_vision_default().Image.Pyramid();
  19219. const detector = speedy_vision_default().Keypoint.Detector.FAST();
  19220. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19221. const clipper = speedy_vision_default().Keypoint.Clipper();
  19222. const lshTables = speedy_vision_default().Keypoint.Matcher.StaticLSHTables('lshTables');
  19223. const knn = speedy_vision_default().Keypoint.Matcher.LSHKNN();
  19224. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  19225. const imagePortalSink = speedy_vision_default().Image.Portal.Sink('imagePortalSink');
  19226. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  19227. const imagePortalMux = speedy_vision_default().Image.Multiplexer('imagePortalMux');
  19228. const imagePortalBuffer = speedy_vision_default().Image.Buffer();
  19229. const imagePortalCopy = speedy_vision_default().Transform.Resize();
  19230. //const imageSink = Speedy.Image.Sink('image');
  19231. source.media = null;
  19232. screen.size = speedy_vision_default().Size(0, 0);
  19233. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19234. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19235. nightvision.gain = NIGHTVISION_GAIN;
  19236. nightvision.offset = NIGHTVISION_OFFSET;
  19237. nightvision.decay = NIGHTVISION_DECAY;
  19238. nightvision.quality = NIGHTVISION_QUALITY;
  19239. nightvisionMux.port = SCAN_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19240. detector.levels = SCAN_PYRAMID_LEVELS;
  19241. detector.scaleFactor = SCAN_PYRAMID_SCALEFACTOR;
  19242. detector.threshold = SCAN_FAST_THRESHOLD;
  19243. detector.capacity = 2048;
  19244. clipper.size = SCAN_MAX_KEYPOINTS;
  19245. lshTables.keypoints = [];
  19246. lshTables.numberOfTables = SCAN_LSH_TABLES;
  19247. lshTables.hashSize = SCAN_LSH_HASHSIZE;
  19248. knn.k = 2;
  19249. knn.quality = 'default';
  19250. //knn.quality = 'fastest';
  19251. imagePortalSource.source = imagePortalSink;
  19252. imagePortalMux.port = PORT_CAMERA; // 0 = camera stream; 1 = lock image
  19253. imagePortalCopy.size = speedy_vision_default().Size(0, 0);
  19254. imagePortalCopy.scale = speedy_vision_default().Vector2(1, 1);
  19255. keypointSink.turbo = true;
  19256. // prepare input
  19257. source.output().connectTo(screen.input());
  19258. screen.output().connectTo(greyscale.input());
  19259. // preprocess image
  19260. greyscale.output().connectTo(blur.input());
  19261. greyscale.output().connectTo(nightvisionMux.input('in0'));
  19262. greyscale.output().connectTo(nightvision.input());
  19263. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19264. nightvisionMux.output().connectTo(pyramid.input());
  19265. // keypoint detection
  19266. pyramid.output().connectTo(detector.input());
  19267. detector.output().connectTo(clipper.input());
  19268. // keypoint description
  19269. blur.output().connectTo(descriptor.input('image'));
  19270. clipper.output().connectTo(descriptor.input('keypoints'));
  19271. // keypoint matching
  19272. descriptor.output().connectTo(knn.input('keypoints'));
  19273. lshTables.output().connectTo(knn.input('lsh'));
  19274. // prepare output
  19275. clipper.output().connectTo(keypointSink.input());
  19276. knn.output().connectTo(keypointSink.input('matches'));
  19277. //pyramid.output().connectTo(imageSink.input());
  19278. // memorize image
  19279. source.output().connectTo(imagePortalBuffer.input());
  19280. imagePortalBuffer.output().connectTo(imagePortalMux.input('in0'));
  19281. imagePortalSource.output().connectTo(imagePortalCopy.input());
  19282. imagePortalCopy.output().connectTo(imagePortalMux.input('in1'));
  19283. imagePortalMux.output().connectTo(imagePortalSink.input());
  19284. // done!
  19285. pipeline.init(source, screen, greyscale, blur, nightvision, nightvisionMux, pyramid, detector, descriptor, clipper, lshTables, knn, keypointSink, imagePortalSink, imagePortalSource, imagePortalMux, imagePortalBuffer, imagePortalCopy);
  19286. return pipeline;
  19287. }
  19288. }
  19289. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/pre-tracking.ts
  19290. /*
  19291. * encantar.js
  19292. * GPU-accelerated Augmented Reality for the web
  19293. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19294. *
  19295. * This program is free software: you can redistribute it and/or modify
  19296. * it under the terms of the GNU Lesser General Public License as published
  19297. * by the Free Software Foundation, either version 3 of the License, or
  19298. * (at your option) any later version.
  19299. *
  19300. * This program is distributed in the hope that it will be useful,
  19301. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19302. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19303. * GNU Lesser General Public License for more details.
  19304. *
  19305. * You should have received a copy of the GNU Lesser General Public License
  19306. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19307. *
  19308. * pre-tracking.ts
  19309. * Pre-tracking state of the Image Tracker
  19310. */
  19311. /** Default target space size (used when training) */
  19312. const pre_tracking_DEFAULT_TARGET_SPACE_SIZE = speedy_vision_default().Size(TRAIN_TARGET_NORMALIZED_SIZE, TRAIN_TARGET_NORMALIZED_SIZE);
  19313. /** Use the camera stream as the input of the pipeline */
  19314. const PORT_CAMERA_IMAGE = 1;
  19315. /** Use the reference image as the input of the pipeline */
  19316. const PORT_REFERENCE_IMAGE = 0;
  19317. /**
  19318. * The pre-tracking state of the Image Tracker is a new training
  19319. * phase for the particular, actual target we'll be tracking
  19320. */
  19321. class ImageTrackerPreTrackingState extends ImageTrackerState {
  19322. /**
  19323. * Constructor
  19324. * @param imageTracker
  19325. */
  19326. constructor(imageTracker) {
  19327. super('pre-tracking', imageTracker);
  19328. this._homography = speedy_vision_default().Matrix.Eye(3);
  19329. this._referenceImage = null;
  19330. this._step = 'read-reference-image';
  19331. this._referenceKeypoints = [];
  19332. this._iterations = 0;
  19333. }
  19334. /**
  19335. * Called as soon as this becomes the active state, just before update() runs for the first time
  19336. * @param settings
  19337. */
  19338. onEnterState(settings) {
  19339. const imagePortalSource = this._pipeline.node('imagePortalSource');
  19340. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  19341. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  19342. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  19343. const homography = settings.homography;
  19344. const referenceImage = settings.referenceImage;
  19345. const snapshot = settings.snapshot;
  19346. // this shouldn't happen
  19347. if (!referenceImage)
  19348. throw new TrackingError(`Can't track a null reference image`);
  19349. // set attributes
  19350. this._homography = homography;
  19351. this._referenceImage = referenceImage;
  19352. this._step = 'read-reference-image';
  19353. this._referenceKeypoints = [];
  19354. this._iterations = 0;
  19355. // setup the pipeline
  19356. imagePortalSource.source = snapshot;
  19357. muxOfReferenceKeypoints.port = 0;
  19358. muxOfBufferOfReferenceKeypoints.port = 0;
  19359. bufferOfReferenceKeypoints.frozen = false;
  19360. }
  19361. /**
  19362. * Called just before the GPU processing
  19363. * @returns promise
  19364. */
  19365. _beforeUpdate() {
  19366. const referenceImage = this._referenceImage;
  19367. const source = this._pipeline.node('source');
  19368. const sourceMux = this._pipeline.node('sourceMux');
  19369. const imageRectifier = this._pipeline.node('imageRectifier');
  19370. const keypointRectifier = this._pipeline.node('keypointRectifier');
  19371. const borderClipper = this._pipeline.node('borderClipper');
  19372. const screenSize = this.screenSize;
  19373. // set the source media to the reference image we're going to track
  19374. const targetMedia = this._imageTracker.database._findMedia(referenceImage.name);
  19375. source.media = targetMedia;
  19376. // setup the source multiplexer
  19377. if (this._step == 'read-reference-image')
  19378. sourceMux.port = PORT_REFERENCE_IMAGE;
  19379. else
  19380. sourceMux.port = PORT_CAMERA_IMAGE;
  19381. // clip keypoints from the borders of the target image
  19382. borderClipper.imageSize = screenSize;
  19383. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  19384. // rectify the image
  19385. const rectify = (this._step == 'read-reference-image') ?
  19386. this._findRectificationMatrixOfFullscreenImage(targetMedia, screenSize) :
  19387. this._findRectificationMatrixOfCameraImage(this._homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE, targetMedia, screenSize);
  19388. return rectify.then(rectificationMatrix => {
  19389. imageRectifier.transform = rectificationMatrix;
  19390. });
  19391. }
  19392. /**
  19393. * Post processing that takes place just after the GPU processing
  19394. * @param result pipeline results
  19395. * @returns state output
  19396. */
  19397. _afterUpdate(result) {
  19398. const referenceImage = this._referenceImage;
  19399. const imagePortalSink = this._pipeline.node('imagePortal');
  19400. const keypointPortalSink = this._pipeline.node('keypointPortalSink');
  19401. const muxOfReferenceKeypoints = this._pipeline.node('muxOfReferenceKeypoints');
  19402. const muxOfBufferOfReferenceKeypoints = this._pipeline.node('muxOfBufferOfReferenceKeypoints');
  19403. const bufferOfReferenceKeypoints = this._pipeline.node('bufferOfReferenceKeypoints');
  19404. const keypoints = result.keypoints;
  19405. const image = result.image;
  19406. // tracker output
  19407. const trackerOutput = {
  19408. keypoints: image !== undefined ? keypoints : undefined,
  19409. image: image,
  19410. screenSize: this.screenSize,
  19411. };
  19412. // decide what to do next
  19413. switch (this._step) {
  19414. case 'read-reference-image': {
  19415. // enable matching
  19416. muxOfReferenceKeypoints.port = 1;
  19417. // store reference keypoints
  19418. this._referenceKeypoints = keypoints;
  19419. // next step
  19420. this._step = 'warp-camera-image';
  19421. return speedy_vision_default().Promise.resolve({
  19422. nextState: 'pre-tracking',
  19423. trackerOutput: trackerOutput,
  19424. });
  19425. }
  19426. case 'warp-camera-image': {
  19427. // freeze reference keypoints
  19428. bufferOfReferenceKeypoints.frozen = true;
  19429. muxOfBufferOfReferenceKeypoints.port = 1;
  19430. // refine warp?
  19431. if (++this._iterations < TRACK_REFINEMENT_ITERATIONS)
  19432. this._step = 'warp-camera-image';
  19433. else
  19434. this._step = 'train-camera-image';
  19435. // warp image & go to next step
  19436. return this._findWarp(keypoints, this._referenceKeypoints).then(warp => this._homography.setTo(this._homography.times(warp))).then(_ => ({
  19437. nextState: 'pre-tracking',
  19438. trackerOutput: trackerOutput,
  19439. })).catch(err => {
  19440. Utils.warning(`Can't pre-track target image "${referenceImage.name}". ${err.toString()}`);
  19441. return {
  19442. nextState: 'scanning',
  19443. trackerOutput: trackerOutput,
  19444. };
  19445. });
  19446. }
  19447. case 'train-camera-image': {
  19448. // log
  19449. Utils.log(`Took a snapshot of target image "${referenceImage.name}". Found ${keypoints.length} keypoints.`);
  19450. // change the coordinates
  19451. return this._changeSpace(this._homography, this.screenSize).then(homography => {
  19452. // we're ready to track the target!
  19453. return speedy_vision_default().Promise.resolve({
  19454. //nextState: 'pre-tracking',
  19455. nextState: 'tracking',
  19456. trackerOutput: trackerOutput,
  19457. nextStateSettings: {
  19458. homography: homography,
  19459. referenceImage: referenceImage,
  19460. templateKeypoints: keypoints,
  19461. keypointPortalSink: keypointPortalSink,
  19462. imagePortalSink: imagePortalSink,
  19463. screenSize: this.screenSize,
  19464. },
  19465. });
  19466. });
  19467. }
  19468. }
  19469. }
  19470. /**
  19471. * Find an adjustment warp between the camera image and the reference image
  19472. * @param dstKeypoints destination
  19473. * @param srcKeypoints source
  19474. * @returns a promise that resolves to a 3x3 homography
  19475. */
  19476. _findWarp(dstKeypoints, srcKeypoints) {
  19477. //return Speedy.Promise.resolve(Speedy.Matrix.Eye(3));
  19478. const srcCoords = [];
  19479. const dstCoords = [];
  19480. // find matching coordinates of the keypoints
  19481. for (let i = 0; i < dstKeypoints.length; i++) {
  19482. const dstKeypoint = dstKeypoints[i];
  19483. if (dstKeypoint.matches[0].index >= 0 && dstKeypoint.matches[1].index >= 0) {
  19484. const d1 = dstKeypoint.matches[0].distance, d2 = dstKeypoint.matches[1].distance;
  19485. // the best match should be "much better" than the second best match,
  19486. // which means that they are "distinct enough"
  19487. if (d1 <= TRACK_MATCH_RATIO * d2) {
  19488. const srcKeypoint = srcKeypoints[dstKeypoint.matches[0].index];
  19489. srcCoords.push(srcKeypoint.x);
  19490. srcCoords.push(srcKeypoint.y);
  19491. dstCoords.push(dstKeypoint.x);
  19492. dstCoords.push(dstKeypoint.y);
  19493. }
  19494. }
  19495. }
  19496. // too few points?
  19497. const n = srcCoords.length / 2;
  19498. if (n < 4) {
  19499. return speedy_vision_default().Promise.reject(new TrackingError('Too few points to compute a warp'));
  19500. }
  19501. // compute warp
  19502. const model = speedy_vision_default().Matrix.Eye(3);
  19503. return this._findKeypointWarp().then(transform =>
  19504. // rectify keypoints
  19505. speedy_vision_default().Matrix.applyAffineTransform(speedy_vision_default().Matrix.Zeros(2, 2 * n), speedy_vision_default().Matrix(2, 2 * n, srcCoords.concat(dstCoords)), transform.block(0, 1, 0, 2))).then(points =>
  19506. // find warp
  19507. speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), points.block(0, 1, 0, n - 1), points.block(0, 1, n, 2 * n - 1), {
  19508. method: 'pransac',
  19509. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  19510. numberOfHypotheses: 512 * 4,
  19511. bundleSize: 128,
  19512. })).then(_ => {
  19513. // validate the model
  19514. const a00 = model.at(0, 0);
  19515. if (Number.isNaN(a00))
  19516. throw new TrackingError(`Can't compute warp: bad keypoints`);
  19517. // done!
  19518. return model;
  19519. });
  19520. }
  19521. /**
  19522. * Find a warp to be applied to the keypoints
  19523. * @returns affine transform
  19524. */
  19525. _findKeypointWarp() {
  19526. const referenceImage = this._referenceImage;
  19527. const media = this._imageTracker.database._findMedia(referenceImage.name);
  19528. const screenSize = this.screenSize;
  19529. // no rotation is needed
  19530. if (!this._mustRotateWarpedImage(media, screenSize))
  19531. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix.Eye(3));
  19532. // rotate by 90 degrees clockwise around the pivot
  19533. const px = screenSize.width / 2, py = screenSize.height / 2; // pivot
  19534. return speedy_vision_default().Promise.resolve(speedy_vision_default().Matrix(3, 3, [
  19535. 0, 1, 0,
  19536. -1, 0, 0,
  19537. py + px, py - px, 1,
  19538. ]));
  19539. }
  19540. /**
  19541. * Change the space of the homography in order to improve tracking quality
  19542. * @param homography mapping coordinates from normalized target space to AR screen space
  19543. * @param screenSize AR screen size
  19544. * @returns homography mapping coordinates from AR screen space to AR screen space
  19545. */
  19546. _changeSpace(homography, screenSize) {
  19547. const sw = screenSize.width, sh = screenSize.height;
  19548. const screen = speedy_vision_default().Matrix(2, 4, [0, 0, sw, 0, sw, sh, 0, sh]);
  19549. const mat = speedy_vision_default().Matrix.Zeros(3);
  19550. return this._findPolylineCoordinates(homography, pre_tracking_DEFAULT_TARGET_SPACE_SIZE).then(polyline => speedy_vision_default().Matrix.perspective(mat, screen, polyline));
  19551. }
  19552. /**
  19553. * Create & setup the pipeline
  19554. * @returns pipeline
  19555. */
  19556. _createPipeline() {
  19557. const pipeline = speedy_vision_default().Pipeline();
  19558. const source = speedy_vision_default().Image.Source('source');
  19559. const imagePortalSource = speedy_vision_default().Image.Portal.Source('imagePortalSource');
  19560. const sourceMux = speedy_vision_default().Image.Multiplexer('sourceMux');
  19561. const screen = speedy_vision_default().Transform.Resize('screen');
  19562. const greyscale = speedy_vision_default().Filter.Greyscale();
  19563. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  19564. const nightvision = speedy_vision_default().Filter.Nightvision();
  19565. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  19566. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  19567. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  19568. const blur = speedy_vision_default().Filter.GaussianBlur();
  19569. const clipper = speedy_vision_default().Keypoint.Clipper();
  19570. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  19571. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  19572. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  19573. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  19574. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  19575. const keypointPortalSink = speedy_vision_default().Keypoint.Portal.Sink('keypointPortalSink');
  19576. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  19577. const muxOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfReferenceKeypoints');
  19578. const bufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Buffer('bufferOfReferenceKeypoints');
  19579. const muxOfBufferOfReferenceKeypoints = speedy_vision_default().Keypoint.Multiplexer('muxOfBufferOfReferenceKeypoints');
  19580. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  19581. const imageSink = speedy_vision_default().Image.Sink('image');
  19582. source.media = null;
  19583. screen.size = speedy_vision_default().Size(0, 0);
  19584. imagePortalSource.source = null;
  19585. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  19586. sourceMux.port = PORT_REFERENCE_IMAGE;
  19587. nightvision.gain = NIGHTVISION_GAIN;
  19588. nightvision.offset = NIGHTVISION_OFFSET;
  19589. nightvision.decay = NIGHTVISION_DECAY;
  19590. nightvision.quality = NIGHTVISION_QUALITY;
  19591. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  19592. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  19593. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  19594. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  19595. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  19596. detector.quality = TRACK_HARRIS_QUALITY;
  19597. detector.capacity = TRACK_DETECTOR_CAPACITY;
  19598. subpixel.method = SUBPIXEL_METHOD;
  19599. clipper.size = TRACK_MAX_KEYPOINTS;
  19600. borderClipper.imageSize = screen.size;
  19601. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  19602. matcher.k = 2;
  19603. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  19604. keypointPortalSource.source = keypointPortalSink;
  19605. muxOfReferenceKeypoints.port = 0;
  19606. muxOfBufferOfReferenceKeypoints.port = 0;
  19607. bufferOfReferenceKeypoints.frozen = false;
  19608. keypointSink.turbo = false;
  19609. // prepare input
  19610. source.output().connectTo(sourceMux.input('in0')); // port 0: reference image
  19611. imagePortalSource.output().connectTo(sourceMux.input('in1')); // port 1: camera image (via portal)
  19612. sourceMux.output().connectTo(screen.input());
  19613. screen.output().connectTo(greyscale.input());
  19614. // preprocess images
  19615. greyscale.output().connectTo(imageRectifier.input());
  19616. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  19617. imageRectifier.output().connectTo(nightvision.input());
  19618. nightvision.output().connectTo(nightvisionMux.input('in1'));
  19619. nightvisionMux.output().connectTo(blur.input());
  19620. // keypoint detection & clipping
  19621. nightvisionMux.output().connectTo(detector.input());
  19622. detector.output().connectTo(borderClipper.input());
  19623. borderClipper.output().connectTo(clipper.input());
  19624. // keypoint refinement
  19625. imageRectifier.output().connectTo(denoiser.input());
  19626. denoiser.output().connectTo(subpixel.input('image'));
  19627. clipper.output().connectTo(subpixel.input('keypoints'));
  19628. // keypoint description
  19629. blur.output().connectTo(descriptor.input('image'));
  19630. subpixel.output().connectTo(descriptor.input('keypoints'));
  19631. // keypoint matching
  19632. descriptor.output().connectTo(muxOfReferenceKeypoints.input('in0'));
  19633. muxOfBufferOfReferenceKeypoints.output().connectTo(muxOfReferenceKeypoints.input('in1'));
  19634. muxOfReferenceKeypoints.output().connectTo(matcher.input('database'));
  19635. descriptor.output().connectTo(matcher.input('keypoints'));
  19636. // store reference keypoints
  19637. keypointPortalSource.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in0'));
  19638. bufferOfReferenceKeypoints.output().connectTo(muxOfBufferOfReferenceKeypoints.input('in1'));
  19639. keypointPortalSource.output().connectTo(bufferOfReferenceKeypoints.input());
  19640. // portals
  19641. descriptor.output().connectTo(keypointPortalSink.input());
  19642. // prepare output
  19643. descriptor.output().connectTo(keypointRectifier.input());
  19644. keypointRectifier.output().connectTo(keypointSink.input());
  19645. matcher.output().connectTo(keypointSink.input('matches'));
  19646. //imageRectifier.output().connectTo(imageSink.input());
  19647. // done!
  19648. pipeline.init(source, imagePortalSource, sourceMux, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, clipper, borderClipper, denoiser, descriptor, keypointPortalSource, muxOfReferenceKeypoints, matcher, bufferOfReferenceKeypoints, muxOfBufferOfReferenceKeypoints, keypointRectifier, keypointSink, keypointPortalSink);
  19649. return pipeline;
  19650. }
  19651. }
  19652. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker-event.ts
  19653. /*
  19654. * encantar.js
  19655. * GPU-accelerated Augmented Reality for the web
  19656. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19657. *
  19658. * This program is free software: you can redistribute it and/or modify
  19659. * it under the terms of the GNU Lesser General Public License as published
  19660. * by the Free Software Foundation, either version 3 of the License, or
  19661. * (at your option) any later version.
  19662. *
  19663. * This program is distributed in the hope that it will be useful,
  19664. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19665. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19666. * GNU Lesser General Public License for more details.
  19667. *
  19668. * You should have received a copy of the GNU Lesser General Public License
  19669. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19670. *
  19671. * image-tracker-event.ts
  19672. * Events emitted by an Image Tracker
  19673. */
  19674. /**
  19675. * An event emitted by an Image Tracker
  19676. */
  19677. class ImageTrackerEvent extends AREvent {
  19678. /**
  19679. * Constructor
  19680. * @param type event type
  19681. * @param referenceImage optional reference image
  19682. */
  19683. constructor(type, referenceImage) {
  19684. super(type);
  19685. this._referenceImage = referenceImage;
  19686. }
  19687. /**
  19688. * Reference image
  19689. */
  19690. get referenceImage() {
  19691. return this._referenceImage;
  19692. }
  19693. }
  19694. ;// CONCATENATED MODULE: ./src/geometry/camera-model.ts
  19695. /*
  19696. * encantar.js
  19697. * GPU-accelerated Augmented Reality for the web
  19698. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  19699. *
  19700. * This program is free software: you can redistribute it and/or modify
  19701. * it under the terms of the GNU Lesser General Public License as published
  19702. * by the Free Software Foundation, either version 3 of the License, or
  19703. * (at your option) any later version.
  19704. *
  19705. * This program is distributed in the hope that it will be useful,
  19706. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19707. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19708. * GNU Lesser General Public License for more details.
  19709. *
  19710. * You should have received a copy of the GNU Lesser General Public License
  19711. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  19712. *
  19713. * camera-model.ts
  19714. * Camera model
  19715. */
  19716. /** A guess of the horizontal field-of-view of a typical camera, in degrees */
  19717. const HFOV_GUESS = 60; // https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Cameras/Cameras.html
  19718. /** Number of iterations used to refine the estimated pose */
  19719. const POSE_ITERATIONS = 30;
  19720. /** Number of samples used in the rotation filter */
  19721. const ROTATION_FILTER_SAMPLES = 10;
  19722. /** Number of samples used in the translation filter */
  19723. const TRANSLATION_FILTER_SAMPLES = 5;
  19724. /** Convert degrees to radians */
  19725. const DEG2RAD = 0.017453292519943295; // pi / 180
  19726. /** Convert radians to degrees */
  19727. const RAD2DEG = 57.29577951308232; // 180 / pi
  19728. /** Numerical tolerance */
  19729. const EPSILON = 1e-6;
  19730. /** Index of the horizontal focal length in the camera intrinsics matrix (column-major format) */
  19731. const FX = 0;
  19732. /** Index of the vertical focal length in the camera intrinsics matrix */
  19733. const FY = 4;
  19734. /** Index of the horizontal position of the principal point in the camera intrinsics matrix */
  19735. const U0 = 6;
  19736. /** Index of the vertical position of the principal point in the camera intrinsics matrix */
  19737. const V0 = 7;
  19738. /**
  19739. * Camera model
  19740. */
  19741. class CameraModel {
  19742. /**
  19743. * Constructor
  19744. */
  19745. constructor() {
  19746. this._screenSize = speedy_vision_default().Size(0, 0);
  19747. this._matrix = speedy_vision_default().Matrix.Eye(3, 4);
  19748. this._intrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1]; // 3x3 identity matrix
  19749. this._extrinsics = [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0]; // 3x4 matrix [ R | t ] = [ I | 0 ] no rotation & no translation
  19750. this._partialRotationBuffer = [];
  19751. this._translationBuffer = [];
  19752. }
  19753. /**
  19754. * Initialize the model
  19755. * @param screenSize
  19756. */
  19757. init(screenSize) {
  19758. // validate
  19759. if (screenSize.area() == 0)
  19760. throw new IllegalArgumentError(`Can't initialize the camera model with screenSize = ${screenSize.toString()}`);
  19761. // set the screen size
  19762. this._screenSize.width = screenSize.width;
  19763. this._screenSize.height = screenSize.height;
  19764. // reset the model
  19765. this.reset();
  19766. // log
  19767. Utils.log(`Initializing the camera model...`);
  19768. }
  19769. /**
  19770. * Release the model
  19771. */
  19772. release() {
  19773. this.reset();
  19774. return null;
  19775. }
  19776. /**
  19777. * Update the camera model
  19778. * @param homography 3x3 perspective transform
  19779. * @param screenSize may change over time (e.g., when going from portrait to landscape or vice-versa)
  19780. * @returns promise that resolves to a camera matrix
  19781. */
  19782. update(homography, screenSize) {
  19783. // validate the shape of the homography
  19784. if (homography.rows != 3 || homography.columns != 3)
  19785. throw new IllegalArgumentError(`Camera model: provide a homography matrix`);
  19786. // validate screenSize
  19787. if (screenSize.area() == 0)
  19788. throw new IllegalArgumentError(`Camera model: invalid screenSize = ${screenSize.toString()}`);
  19789. // changed screen size?
  19790. if (!this._screenSize.equals(screenSize)) {
  19791. Utils.log(`Camera model: detected a change in screen size...`);
  19792. // update the screen size
  19793. this._screenSize.width = screenSize.width;
  19794. this._screenSize.height = screenSize.height;
  19795. // reset camera
  19796. this.reset();
  19797. }
  19798. // read the entries of the homography
  19799. const h = homography.read();
  19800. const h11 = h[0], h12 = h[3], h13 = h[6], h21 = h[1], h22 = h[4], h23 = h[7], h31 = h[2], h32 = h[5], h33 = h[8];
  19801. // validate the homography (homography matrices aren't singular)
  19802. const det = h13 * (h21 * h32 - h22 * h31) - h23 * (h11 * h32 - h12 * h31) + h33 * (h11 * h22 - h12 * h21);
  19803. if (Math.abs(det) < EPSILON) {
  19804. Utils.warning(`Can't update the camera model using an invalid homography matrix`);
  19805. return speedy_vision_default().Promise.resolve(this._matrix);
  19806. }
  19807. // estimate the pose
  19808. const pose = this._estimatePose(homography);
  19809. this._extrinsics = pose.read();
  19810. // compute the camera matrix
  19811. const C = this.denormalizer();
  19812. const K = speedy_vision_default().Matrix(3, 3, this._intrinsics);
  19813. const E = pose; //Speedy.Matrix(3, 4, this._extrinsics);
  19814. this._matrix.setToSync(K.times(E).times(C));
  19815. //console.log("intrinsics -----------", K.toString());
  19816. //console.log("matrix ----------------",this._matrix.toString());
  19817. return speedy_vision_default().Promise.resolve(this._matrix);
  19818. }
  19819. /**
  19820. * Reset camera model
  19821. */
  19822. reset() {
  19823. this._resetIntrinsics();
  19824. this._resetExtrinsics();
  19825. }
  19826. /**
  19827. * The camera matrix that maps the 3D normalized space [-1,1]^3 to the
  19828. * 2D AR screen space (measured in pixels)
  19829. * @returns 3x4 camera matrix
  19830. */
  19831. get matrix() {
  19832. return this._matrix;
  19833. }
  19834. /**
  19835. * Camera intrinsics matrix
  19836. * @returns 3x3 intrinsics matrix in column-major format
  19837. */
  19838. get intrinsics() {
  19839. return this._intrinsics;
  19840. }
  19841. /**
  19842. * Camera extrinsics matrix
  19843. * @returns 3x4 extrinsics matrix [ R | t ] in column-major format
  19844. */
  19845. get extrinsics() {
  19846. return this._extrinsics;
  19847. }
  19848. /**
  19849. * Convert coordinates from normalized space [-1,1]^3 to a
  19850. * "3D pixel space" based on the dimensions of the AR screen.
  19851. *
  19852. * We perform a 180-degrees rotation around the x-axis so that
  19853. * it looks nicer (the y-axis grows downwards in image space).
  19854. *
  19855. * The final camera matrix is P = K * [ R | t ] * C, where
  19856. * C is this conversion matrix. The intent behind this is to
  19857. * make tracking independent of target and screen sizes.
  19858. *
  19859. * Reminder: we use a right-handed coordinate system in 3D!
  19860. * In 2D image space the coordinate system is left-handed.
  19861. *
  19862. * @returns 4x4 conversion matrix C
  19863. */
  19864. denormalizer() {
  19865. const w = this._screenSize.width / 2; // half width, in pixels
  19866. const h = this._screenSize.height / 2; // half height, in pixels
  19867. const d = Math.min(w, h); // virtual unit length, in pixels
  19868. /*
  19869. return Speedy.Matrix(4, 4, [
  19870. 1, 0, 0, 0,
  19871. 0,-1, 0, 0,
  19872. 0, 0,-1, 0,
  19873. w/d, h/d, 0, 1/d
  19874. ]);
  19875. */
  19876. return speedy_vision_default().Matrix(4, 4, [
  19877. d, 0, 0, 0,
  19878. 0, -d, 0, 0,
  19879. 0, 0, -d, 0,
  19880. w, h, 0, 1,
  19881. ]);
  19882. }
  19883. /**
  19884. * Size of the AR screen space, in pixels
  19885. * @returns size in pixels
  19886. */
  19887. get screenSize() {
  19888. return this._screenSize;
  19889. }
  19890. /**
  19891. * Focal length in pixel units (projection distance in the pinhole camera model)
  19892. * same as (focal length in mm) * (number of pixels per world unit in pixels/mm)
  19893. * @returns focal length
  19894. */
  19895. get focalLength() {
  19896. return this._intrinsics[FY]; // fx == fy
  19897. }
  19898. /**
  19899. * Horizontal field-of-view, given in radians
  19900. * @returns vertical field-of-view
  19901. */
  19902. get fovx() {
  19903. return 2 * Math.atan(this._intrinsics[U0] / this._intrinsics[FX]);
  19904. }
  19905. /**
  19906. * Vertical field-of-view, given in radians
  19907. * @returns vertical field-of-view
  19908. */
  19909. get fovy() {
  19910. return 2 * Math.atan(this._intrinsics[V0] / this._intrinsics[FY]);
  19911. }
  19912. /**
  19913. * Principal point
  19914. * @returns principal point, in pixel coordinates
  19915. */
  19916. principalPoint() {
  19917. return speedy_vision_default().Point2(this._intrinsics[U0], this._intrinsics[V0]);
  19918. }
  19919. /**
  19920. * Reset camera extrinsics
  19921. */
  19922. _resetExtrinsics() {
  19923. // set the rotation matrix to the identity
  19924. this._extrinsics.fill(0);
  19925. this._extrinsics[0] = this._extrinsics[4] = this._extrinsics[8] = 1;
  19926. // reset filters
  19927. this._partialRotationBuffer.length = 0;
  19928. this._translationBuffer.length = 0;
  19929. }
  19930. /**
  19931. * Reset camera intrinsics
  19932. */
  19933. _resetIntrinsics() {
  19934. const cameraWidth = Math.max(this._screenSize.width, this._screenSize.height); // portrait?
  19935. const u0 = this._screenSize.width / 2;
  19936. const v0 = this._screenSize.height / 2;
  19937. const fx = (cameraWidth / 2) / Math.tan(DEG2RAD * HFOV_GUESS / 2);
  19938. const fy = fx;
  19939. this._intrinsics[FX] = fx;
  19940. this._intrinsics[FY] = fy;
  19941. this._intrinsics[U0] = u0;
  19942. this._intrinsics[V0] = v0;
  19943. }
  19944. /**
  19945. * Compute a normalized homography H^ = K^(-1) * H for an
  19946. * ideal pinhole with f = 1 and principal point = (0,0)
  19947. * @param homography homography H to be normalized
  19948. * @returns normalized homography H^
  19949. */
  19950. _normalizeHomography(homography) {
  19951. const h = homography.read();
  19952. const u0 = this._intrinsics[U0];
  19953. const v0 = this._intrinsics[V0];
  19954. const fx = this._intrinsics[FX];
  19955. const fy = this._intrinsics[FY];
  19956. const u0fx = u0 / fx;
  19957. const v0fy = v0 / fy;
  19958. const h11 = h[0] / fx - u0fx * h[2], h12 = h[3] / fx - u0fx * h[5], h13 = h[6] / fx - u0fx * h[8];
  19959. const h21 = h[1] / fy - v0fy * h[2], h22 = h[4] / fy - v0fy * h[5], h23 = h[7] / fy - v0fy * h[8];
  19960. const h31 = h[2], h32 = h[5], h33 = h[8];
  19961. /*console.log([
  19962. h11, h21, h31,
  19963. h12, h22, h32,
  19964. h13, h23, h33,
  19965. ]);*/
  19966. return speedy_vision_default().Matrix(3, 3, [
  19967. h11, h21, h31,
  19968. h12, h22, h32,
  19969. h13, h23, h33,
  19970. ]);
  19971. }
  19972. /**
  19973. * Estimate [ r1 | r2 | t ], where r1, r2 are orthonormal and t is a translation vector
  19974. * @param normalizedHomography based on the ideal pinhole (where calibration K = I)
  19975. * @returns a 3x3 matrix
  19976. */
  19977. _estimatePartialPose(normalizedHomography) {
  19978. const h = normalizedHomography.read();
  19979. const h11 = h[0], h12 = h[3], h13 = h[6];
  19980. const h21 = h[1], h22 = h[4], h23 = h[7];
  19981. const h31 = h[2], h32 = h[5], h33 = h[8];
  19982. const h1norm2 = h11 * h11 + h21 * h21 + h31 * h31;
  19983. const h2norm2 = h12 * h12 + h22 * h22 + h32 * h32;
  19984. const h1norm = Math.sqrt(h1norm2);
  19985. const h2norm = Math.sqrt(h2norm2);
  19986. //const hnorm = (h1norm + h2norm) / 2;
  19987. //const hnorm = Math.sqrt(h1norm * h2norm);
  19988. const hnorm = Math.max(h1norm, h2norm); // this seems to work. why?
  19989. // we expect h1norm to be approximately h2norm, but sometimes there is a lot of noise
  19990. // if h1norm is not approximately h2norm, it means that the first two columns of
  19991. // the normalized homography are not really encoding a rotation (up to a scale)
  19992. //console.log("h1,h2",h1norm,h2norm);
  19993. //console.log(normalizedHomography.toString());
  19994. // compute a rough estimate for the scale factor
  19995. // select the sign so that t3 = tz > 0
  19996. const sign = h33 >= 0 ? 1 : -1;
  19997. let scale = sign / hnorm;
  19998. // sanity check
  19999. if (Number.isNaN(scale))
  20000. return speedy_vision_default().Matrix(3, 3, (new Array(9)).fill(Number.NaN));
  20001. // recover the rotation
  20002. let r = new Array(6);
  20003. r[0] = scale * h11;
  20004. r[1] = scale * h21;
  20005. r[2] = scale * h31;
  20006. r[3] = scale * h12;
  20007. r[4] = scale * h22;
  20008. r[5] = scale * h32;
  20009. // refine the rotation
  20010. r = this._refineRotation(r); // r is initially noisy
  20011. /*
  20012. After refining the rotation vectors, let's adjust the scale factor as
  20013. follows:
  20014. We know that [ r1 | r2 | t ] is equal to the normalized homography H up
  20015. to a non-zero scale factor s, i.e., [ r1 | r2 | t ] = s H. Let's call M
  20016. the first two columns of H, i.e., M = [ h1 | h2 ], and R = [ r1 | r2 ].
  20017. It follows that R = s M, meaning that M'R = s M'M. The trace of 2x2 M'R
  20018. is such that tr(M'R) = tr(s M'M) = s tr(M'M), which means:
  20019. s = tr(M'R) / tr(M'M) = (r1'h1 + r2'h2) / (h1'h1 + h2'h2)
  20020. (also: s^2 = det(M'R) / det(M'M))
  20021. */
  20022. // adjust the scale factor
  20023. scale = r[0] * h11 + r[1] * h21 + r[2] * h31;
  20024. scale += r[3] * h12 + r[4] * h22 + r[5] * h32;
  20025. scale /= h1norm2 + h2norm2;
  20026. // recover the translation
  20027. let t = new Array(3);
  20028. t[0] = scale * h13;
  20029. t[1] = scale * h23;
  20030. t[2] = scale * h33;
  20031. // done!
  20032. return speedy_vision_default().Matrix(3, 3, r.concat(t));
  20033. }
  20034. /**
  20035. * Make two non-zero and non-parallel input vectors, r1 and r2, orthonormal
  20036. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  20037. * @returns a 3x2 matrix R such that R'R = I (column-major format)
  20038. */
  20039. _refineRotation(rot) {
  20040. const [r11, r21, r31, r12, r22, r32] = rot;
  20041. /*
  20042. A little technique I figured out to correct the rotation vectors
  20043. ----------------------------------------------------------------
  20044. We are given two 3x1 column-vectors r1 and r2 as input in a 3x2 matrix
  20045. R = [ r1 | r2 ]. We would like that R'R = I, but that won't be the case
  20046. because vectors r1 and r2 are not perfectly orthonormal due to noise.
  20047. Let's first notice that R'R is symmetric. You can easily check that its
  20048. two eigenvalues are both real and positive (as long as r1, r2 != 0 and
  20049. r1 is not parallel to r2, but we never take such vectors as input).
  20050. R'R = [ r1'r1 r1'r2 ] is of rank 2, positive-definite
  20051. [ r1'r2 r2'r2 ]
  20052. We proceed by computing an eigendecomposition Q D Q' of R'R, where Q is
  20053. chosen to be orthogonal and D is a diagonal matrix whose entries are
  20054. the eigenvalues of R'R.
  20055. Let LL' be the Cholesky decomposition of D. Such decomposition exists
  20056. and is trivially computed: just take the square roots of the entries of
  20057. D. Since L is diagonal, we have L = L'. Its inverse is also trivially
  20058. computed - call it Linv.
  20059. Now, define a 2x2 correction matrix C as follows:
  20060. C = Q * Linv * Q'
  20061. This matrix rotates the input vector, scales it by some amount, and
  20062. then rotates it back to where it was (i.e., Q'Q = Q Q' = I).
  20063. We compute RC in order to correct the rotation vectors. We take its
  20064. two columns as the corrected vectors.
  20065. In order to show that the two columns of RC are orthonormal, we can
  20066. show that (RC)'(RC) = I. Indeed, noticing that C is symmetric, let's
  20067. expand the expression:
  20068. (RC)'(RC) = C'R'R C = C R'R C = (Q Linv Q') (Q D Q') (Q Linv Q') =
  20069. Q Linv (Q'Q) D (Q'Q) Linv Q' = Q Linv D Linv Q' =
  20070. Q Linv (L L) Linv Q' = Q (Linv L) (L Linv) Q' = Q Q' = I
  20071. I have provided below a closed formula to correct the rotation vectors.
  20072. What C does to R is very interesting: it makes the singular values
  20073. become 1. If U S V' is a SVD of R, then R'R = V S^2 V'. The singular
  20074. values of R are the square roots of the eigenvalues of R'R. Letting
  20075. S = L and V = Q, it follows that RC = U S V' V Linv V' = U V'. This
  20076. means that RC is equivalent to the correction "trick" using the SVD
  20077. found in the computer vision literature (i.e., compute the SVD and
  20078. return U V'). That "trick" is known to return the rotation matrix that
  20079. minimizes the Frobenius norm of the difference between the input and
  20080. the output. Consequently, the technique I have just presented is also
  20081. optimal in that sense!
  20082. By the way, the input matrix R does not need to be 3x2.
  20083. */
  20084. // compute the entries of R'R
  20085. const r1tr1 = r11 * r11 + r21 * r21 + r31 * r31;
  20086. const r2tr2 = r12 * r12 + r22 * r22 + r32 * r32;
  20087. const r1tr2 = r11 * r12 + r21 * r22 + r31 * r32;
  20088. // compute the two real eigenvalues of R'R
  20089. const delta = (r1tr1 - r2tr2) * (r1tr1 - r2tr2) + 4 * r1tr2 * r1tr2;
  20090. const sqrt = Math.sqrt(delta); // delta >= 0 always
  20091. const eigval1 = (r1tr1 + r2tr2 + sqrt) / 2;
  20092. const eigval2 = (r1tr1 + r2tr2 - sqrt) / 2;
  20093. // compute two unit eigenvectors qi = (xi,yi) of R'R
  20094. const alpha1 = (r2tr2 - eigval1) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval1);
  20095. const x1 = Math.sqrt((alpha1 * alpha1) / (1 + alpha1 * alpha1));
  20096. const y1 = x1 / alpha1;
  20097. const alpha2 = (r2tr2 - eigval2) - r1tr2 * (1 + r1tr2) / (r1tr1 - eigval2);
  20098. const x2 = Math.sqrt((alpha2 * alpha2) / (1 + alpha2 * alpha2));
  20099. const y2 = x2 / alpha2;
  20100. // compute the Cholesky decomposition LL' of the diagonal matrix D
  20101. // whose entries are the two eigenvalues of R'R and then invert L
  20102. const s1 = Math.sqrt(eigval1), s2 = Math.sqrt(eigval2); // singular values of R (pick s1 >= s2)
  20103. const Linv = speedy_vision_default().Matrix(2, 2, [1 / s1, 0, 0, 1 / s2]); // L inverse
  20104. // compute the correction matrix C = Q * Linv * Q', where Q = [q1|q2]
  20105. // is orthogonal and Linv is computed as above
  20106. const Q = speedy_vision_default().Matrix(2, 2, [x1, y1, x2, y2]);
  20107. const Qt = speedy_vision_default().Matrix(2, 2, [x1, x2, y1, y2]);
  20108. const C = Q.times(Linv).times(Qt);
  20109. // correct the rotation vectors r1 and r2 using C
  20110. const R = speedy_vision_default().Matrix(3, 2, [r11, r21, r31, r12, r22, r32]);
  20111. return speedy_vision_default().Matrix(R.times(C)).read();
  20112. }
  20113. /**
  20114. * Compute a refined translation vector
  20115. * @param normalizedHomography ideal pinhole K = I
  20116. * @param rot rotation vectors [ r1 | r2 ] in column-major format
  20117. * @param t0 initial estimate for the translation vector
  20118. * @returns 3x1 translation vector in column-major format
  20119. */
  20120. _refineTranslation(normalizedHomography, rot, t0) {
  20121. /*
  20122. Given a normalized homography H, the rotation vectors r1, r2, and a
  20123. translation vector t, we know that [ r1 | r2 | t ] = s H for a non-zero
  20124. scale factor s.
  20125. If we take a homogeneous vector u = [ x y w ]' (i.e., w = 1), then
  20126. [ r1 | r2 | t ] u is parallel to H u, which means that their cross
  20127. product is zero:
  20128. [ r1 | r2 | t ] u x H u = ( x r1 + y r2 + w t ) x H u = 0
  20129. The following code finds an optimal translation vector t based on the
  20130. above observation. H, r1, r2 are known.
  20131. */
  20132. const h = normalizedHomography.read();
  20133. const h11 = h[0], h12 = h[3], h13 = h[6];
  20134. const h21 = h[1], h22 = h[4], h23 = h[7];
  20135. const h31 = h[2], h32 = h[5], h33 = h[8];
  20136. const r11 = rot[0], r12 = rot[3];
  20137. const r21 = rot[1], r22 = rot[4];
  20138. const r31 = rot[2], r32 = rot[5];
  20139. // sample points [ xi yi ]' in AR screen space
  20140. //const x = [ 0.5, 0.0, 1.0, 1.0, 0.0, 0.5, 1.0, 0.5, 0.0 ];
  20141. //const y = [ 0.5, 0.0, 0.0, 1.0, 1.0, 0.0, 0.5, 1.0, 0.5 ];
  20142. const x = [0.5, 0.0, 1.0, 1.0, 0.0];
  20143. const y = [0.5, 0.0, 0.0, 1.0, 1.0];
  20144. const n = x.length;
  20145. const n3 = 3 * n;
  20146. const width = this._screenSize.width;
  20147. const height = this._screenSize.height;
  20148. for (let i = 0; i < n; i++) {
  20149. x[i] *= width;
  20150. y[i] *= height;
  20151. }
  20152. // set auxiliary values: ai = H [ xi yi 1 ]'
  20153. const a1 = new Array(n);
  20154. const a2 = new Array(n);
  20155. const a3 = new Array(n);
  20156. for (let i = 0; i < n; i++) {
  20157. a1[i] = x[i] * h11 + y[i] * h12 + h13;
  20158. a2[i] = x[i] * h21 + y[i] * h22 + h23;
  20159. a3[i] = x[i] * h31 + y[i] * h32 + h33;
  20160. }
  20161. // we'll solve M t = v for t with linear least squares
  20162. // M: 3n x 3, v: 3n x 1, t: 3 x 1
  20163. const m = new Array(3 * n * 3);
  20164. const v = new Array(3 * n);
  20165. for (let i = 0, k = 0; k < n; i += 3, k++) {
  20166. m[i] = m[i + n3 + 1] = m[i + n3 + n3 + 2] = 0;
  20167. m[i + n3] = -(m[i + 1] = a3[k]);
  20168. m[i + 2] = -(m[i + n3 + n3] = a2[k]);
  20169. m[i + n3 + n3 + 1] = -(m[i + n3 + 2] = a1[k]);
  20170. v[i] = a3[k] * (x[k] * r21 + y[k] * r22) - a2[k] * (x[k] * r31 + y[k] * r32);
  20171. v[i + 1] = -a3[k] * (x[k] * r11 + y[k] * r12) + a1[k] * (x[k] * r31 + y[k] * r32);
  20172. v[i + 2] = a2[k] * (x[k] * r11 + y[k] * r12) - a1[k] * (x[k] * r21 + y[k] * r22);
  20173. }
  20174. /*
  20175. // this works, but I want more lightweight
  20176. const M = Speedy.Matrix(n3, 3, m);
  20177. const v_ = Speedy.Matrix(n3, 1, v);
  20178. return Speedy.Matrix(M.ldiv(v_)).read();
  20179. */
  20180. /*
  20181. Gradient descent with optimal step size / learning rate
  20182. -------------------------------------------------------
  20183. Let's find the column-vector x that minimizes the error function
  20184. E(x) = r'r, where r = Ax - b, using gradient descent. This is linear
  20185. least squares. We want to find x easily, QUICKLY and iteratively.
  20186. The update rule of gradient descent is set to:
  20187. x := x - w * grad(E)
  20188. where w is the learning rate and grad(E) is the gradient of E(x):
  20189. grad(E) = 2 A'r = 2 A'(Ax - b) = 2 A'A x - 2 A'b
  20190. Let's adjust w to make x "converge quickly". Define function S(w) as:
  20191. S(w) = x - w * grad(E) (step)
  20192. and another function F(w) as:
  20193. F(w) = E(S(w))
  20194. which is the error of the step. We minimize F by setting its derivative
  20195. to zero:
  20196. 0 = dF = dF dS
  20197. dw dS dw
  20198. What follows is a fair amount of algebra. Do the math and you'll find
  20199. the following optimal update rule:
  20200. (c'c)
  20201. x := x - --------- c
  20202. (Ac)'(Ac)
  20203. where c = A'r = A'(Ax - b)
  20204. */
  20205. // gradient descent: super lightweight implementation
  20206. const r = new Array(3 * n);
  20207. const c = new Array(3);
  20208. const Mc = new Array(3 * n);
  20209. // initial guess
  20210. const t = new Array(3);
  20211. t[0] = t0[0];
  20212. t[1] = t0[1];
  20213. t[2] = t0[2];
  20214. // iterate
  20215. const MAX_ITERATIONS = 15;
  20216. const TOLERANCE = 1;
  20217. for (let it = 0; it < MAX_ITERATIONS; it++) {
  20218. //console.log("it",it+1);
  20219. // compute residual r = Mt - v
  20220. for (let i = 0; i < n3; i++) {
  20221. r[i] = 0;
  20222. for (let j = 0; j < 3; j++)
  20223. r[i] += m[j * n3 + i] * t[j];
  20224. r[i] -= v[i];
  20225. }
  20226. // compute c = M'r
  20227. for (let i = 0; i < 3; i++) {
  20228. c[i] = 0;
  20229. for (let j = 0; j < n3; j++)
  20230. c[i] += m[i * n3 + j] * r[j];
  20231. }
  20232. // compute Mc
  20233. for (let i = 0; i < n3; i++) {
  20234. Mc[i] = 0;
  20235. for (let j = 0; j < 3; j++)
  20236. Mc[i] += m[j * n3 + i] * c[j];
  20237. }
  20238. // compute c'c
  20239. let num = 0;
  20240. for (let i = 0; i < 3; i++)
  20241. num += c[i] * c[i];
  20242. //console.log("c'c=",num);
  20243. if (num < TOLERANCE)
  20244. break;
  20245. // compute (Mc)'(Mc)
  20246. let den = 0;
  20247. for (let i = 0; i < n3; i++)
  20248. den += Mc[i] * Mc[i];
  20249. // compute frc = c'c / (Mc)'(Mc)
  20250. const frc = num / den;
  20251. if (Number.isNaN(frc)) // this shouldn't happen
  20252. break;
  20253. // iterate: t = t - frc * c
  20254. for (let i = 0; i < 3; i++)
  20255. t[i] -= frc * c[i];
  20256. }
  20257. //console.log("OLD t:\n\n",t0.join('\n'));
  20258. //console.log("new t:\n\n",t.join('\n'));
  20259. // done!
  20260. return t;
  20261. }
  20262. /**
  20263. * Apply a smoothing filter to the partial pose
  20264. * @param partialPose 3x3 [ r1 | r2 | t ]
  20265. * @returns filtered partial pose
  20266. */
  20267. _filterPartialPose(partialPose) {
  20268. const avg = new Array(9).fill(0);
  20269. const entries = partialPose.read();
  20270. const rotationBlock = entries.slice(0, 6);
  20271. const translationBlock = entries.slice(6, 9);
  20272. // how many samples should we store, at most?
  20273. const div = (Settings.powerPreference == 'low-power') ? 1.5 : 1; // low-power ~ half the fps
  20274. const N = Math.ceil(ROTATION_FILTER_SAMPLES / div);
  20275. const M = Math.ceil(TRANSLATION_FILTER_SAMPLES / div);
  20276. // is it a valid partial pose?
  20277. if (!Number.isNaN(entries[0])) {
  20278. // store samples
  20279. this._partialRotationBuffer.unshift(rotationBlock);
  20280. if (this._partialRotationBuffer.length > N)
  20281. this._partialRotationBuffer.length = N;
  20282. this._translationBuffer.unshift(translationBlock);
  20283. if (this._translationBuffer.length > M)
  20284. this._translationBuffer.length = M;
  20285. }
  20286. else if (this._partialRotationBuffer.length == 0) {
  20287. // invalid pose, no samples
  20288. return speedy_vision_default().Matrix.Eye(3);
  20289. }
  20290. // average *nearby* rotations
  20291. const n = this._partialRotationBuffer.length;
  20292. for (let i = 0; i < n; i++) {
  20293. const r = this._partialRotationBuffer[i];
  20294. for (let j = 0; j < 6; j++)
  20295. avg[j] += r[j] / n;
  20296. }
  20297. const r = this._refineRotation(avg);
  20298. // average translations
  20299. const m = this._translationBuffer.length;
  20300. for (let i = 0; i < m; i++) {
  20301. const t = this._translationBuffer[i];
  20302. for (let j = 0; j < 3; j++)
  20303. avg[6 + j] += (m - i) * t[j] / ((m * m + m) / 2);
  20304. //avg[6 + j] += t[j] / m;
  20305. }
  20306. const t = [avg[6], avg[7], avg[8]];
  20307. // done!
  20308. return speedy_vision_default().Matrix(3, 3, r.concat(t));
  20309. }
  20310. /**
  20311. * Estimate extrinsics [ R | t ] given a partial pose [ r1 | r2 | t ]
  20312. * @param partialPose
  20313. * @returns 3x4 matrix
  20314. */
  20315. _estimateFullPose(partialPose) {
  20316. const p = partialPose.read();
  20317. const r11 = p[0], r12 = p[3], t1 = p[6];
  20318. const r21 = p[1], r22 = p[4], t2 = p[7];
  20319. const r31 = p[2], r32 = p[5], t3 = p[8];
  20320. // r3 = +- ( r1 x r2 )
  20321. let r13 = r21 * r32 - r31 * r22;
  20322. let r23 = r31 * r12 - r11 * r32;
  20323. let r33 = r11 * r22 - r21 * r12;
  20324. // let's make sure that det R = +1 (keep the orientation)
  20325. const det = r11 * (r22 * r33 - r23 * r32) - r21 * (r12 * r33 - r13 * r32) + r31 * (r12 * r23 - r13 * r22);
  20326. if (det < 0) {
  20327. r13 = -r13;
  20328. r23 = -r23;
  20329. r33 = -r33;
  20330. }
  20331. // done!
  20332. return speedy_vision_default().Matrix(3, 4, [
  20333. r11, r21, r31,
  20334. r12, r22, r32,
  20335. r13, r23, r33,
  20336. t1, t2, t3,
  20337. ]);
  20338. }
  20339. /**
  20340. * Estimate the pose [ R | t ] given a homography in AR screen space
  20341. * @param homography must be valid
  20342. * @returns 3x4 matrix
  20343. */
  20344. _estimatePose(homography) {
  20345. const normalizedHomography = this._normalizeHomography(homography);
  20346. const partialPose = speedy_vision_default().Matrix.Eye(3);
  20347. // we want the estimated partial pose [ r1 | r2 | t ] to be as close
  20348. // as possible to the normalized homography, up to a scale factor;
  20349. // i.e., H * [ r1 | r2 | t ]^(-1) = s * I for a non-zero scalar s
  20350. // it won't be a perfect equality due to noise in the homography.
  20351. // remark: composition of homographies
  20352. const residual = speedy_vision_default().Matrix(normalizedHomography);
  20353. for (let k = 0; k < POSE_ITERATIONS; k++) {
  20354. // incrementally improve the partial pose
  20355. const rt = this._estimatePartialPose(residual); // rt should converge to the identity matrix
  20356. partialPose.setToSync(rt.times(partialPose));
  20357. residual.setToSync(residual.times(rt.inverse()));
  20358. //console.log("rt",rt.toString());
  20359. //console.log("residual",residual.toString());
  20360. }
  20361. //console.log('-----------');
  20362. // refine the translation vector
  20363. const mat = partialPose.read();
  20364. const r = mat.slice(0, 6);
  20365. const t0 = mat.slice(6, 9);
  20366. const t = this._refineTranslation(normalizedHomography, r, t0);
  20367. const refinedPartialPose = speedy_vision_default().Matrix(3, 3, r.concat(t));
  20368. // filter the partial pose
  20369. const filteredPartialPose = this._filterPartialPose(refinedPartialPose);
  20370. // estimate the full pose
  20371. //const finalPartialPose = partialPose;
  20372. const finalPartialPose = filteredPartialPose;
  20373. return this._estimateFullPose(finalPartialPose);
  20374. }
  20375. }
  20376. ;// CONCATENATED MODULE: ./src/geometry/pose.ts
  20377. /*
  20378. * encantar.js
  20379. * GPU-accelerated Augmented Reality for the web
  20380. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20381. *
  20382. * This program is free software: you can redistribute it and/or modify
  20383. * it under the terms of the GNU Lesser General Public License as published
  20384. * by the Free Software Foundation, either version 3 of the License, or
  20385. * (at your option) any later version.
  20386. *
  20387. * This program is distributed in the hope that it will be useful,
  20388. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20389. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20390. * GNU Lesser General Public License for more details.
  20391. *
  20392. * You should have received a copy of the GNU Lesser General Public License
  20393. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20394. *
  20395. * pose.ts
  20396. * A pose represents a position and an orientation in a 3D space
  20397. */
  20398. /**
  20399. * A pose represents a position and an orientation in a 3D space
  20400. * (and sometimes a scale, too...)
  20401. */
  20402. class Pose {
  20403. /**
  20404. * Constructor
  20405. * @param transform usually a rigid transform in a 3D space (e.g., world space, viewer space or other)
  20406. */
  20407. constructor(transform) {
  20408. this._transform = transform;
  20409. }
  20410. /**
  20411. * A transform describing the position and the orientation
  20412. * of the pose relative to the 3D space to which it belongs
  20413. */
  20414. get transform() {
  20415. return this._transform;
  20416. }
  20417. }
  20418. ;// CONCATENATED MODULE: ./src/geometry/transform.ts
  20419. /*
  20420. * encantar.js
  20421. * GPU-accelerated Augmented Reality for the web
  20422. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20423. *
  20424. * This program is free software: you can redistribute it and/or modify
  20425. * it under the terms of the GNU Lesser General Public License as published
  20426. * by the Free Software Foundation, either version 3 of the License, or
  20427. * (at your option) any later version.
  20428. *
  20429. * This program is distributed in the hope that it will be useful,
  20430. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20431. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20432. * GNU Lesser General Public License for more details.
  20433. *
  20434. * You should have received a copy of the GNU Lesser General Public License
  20435. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20436. *
  20437. * transform.ts
  20438. * 3D geometrical transforms
  20439. */
  20440. /**
  20441. * A 3D transformation
  20442. */
  20443. class BaseTransform {
  20444. /**
  20445. * Constructor
  20446. * @param matrix a 4x4 matrix
  20447. */
  20448. constructor(matrix) {
  20449. if (matrix.rows != 4 || matrix.columns != 4)
  20450. throw new IllegalArgumentError('A 3D transform expects a 4x4 matrix');
  20451. this._matrix = matrix;
  20452. }
  20453. /**
  20454. * The 4x4 transformation matrix (read-only)
  20455. */
  20456. get matrix() {
  20457. return this._matrix;
  20458. }
  20459. }
  20460. /**
  20461. * An invertible 3D transformation
  20462. */
  20463. class InvertibleTransform extends BaseTransform {
  20464. /**
  20465. * Constructor
  20466. * @param matrix a 4x4 matrix
  20467. */
  20468. constructor(matrix) {
  20469. // WARNING: we do not check if the matrix actually encodes an invertible transform!
  20470. super(matrix);
  20471. }
  20472. /**
  20473. * The inverse of the transform
  20474. */
  20475. get inverse() {
  20476. const inverseMatrix = speedy_vision_default().Matrix(this._matrix.inverse());
  20477. return new InvertibleTransform(inverseMatrix);
  20478. }
  20479. }
  20480. /**
  20481. * A 3D transformation described by translation, rotation and scale
  20482. */
  20483. class StandardTransform extends InvertibleTransform {
  20484. // TODO: position, rotation and scale attributes
  20485. /**
  20486. * Constructor
  20487. * @param matrix a 4x4 matrix
  20488. */
  20489. constructor(matrix) {
  20490. // WARNING: we do not check if the matrix actually encodes a standard transform!
  20491. super(matrix);
  20492. }
  20493. /**
  20494. * The inverse of the transform
  20495. */
  20496. get inverse() {
  20497. /*
  20498. The inverse of a 4x4 standard transform T * R * S...
  20499. [ RS t ] is [ ZR' -ZR't ]
  20500. [ 0' 1 ] [ 0' 1 ]
  20501. where S is 3x3, R is 3x3, t is 3x1, 0' is 1x3 and Z is the inverse of S
  20502. */
  20503. return super.inverse;
  20504. }
  20505. }
  20506. /**
  20507. * A 3D transformation described by position and orientation
  20508. */
  20509. class RigidTransform extends StandardTransform {
  20510. // TODO: position and rotation attributes (need to decompose the matrix)
  20511. /**
  20512. * Constructor
  20513. * @param matrix a 4x4 matrix
  20514. */
  20515. constructor(matrix) {
  20516. // WARNING: we do not check if the matrix actually encodes a rigid transform!
  20517. super(matrix);
  20518. }
  20519. /**
  20520. * The inverse of the transform
  20521. */
  20522. get inverse() {
  20523. /*
  20524. The inverse of a 4x4 rigid transform
  20525. [ R t ] is [ R' -R't ]
  20526. [ 0' 1 ] [ 0' 1 ]
  20527. where R is 3x3, t is 3x1 and 0' is 1x3
  20528. */
  20529. const m = this._matrix.read();
  20530. if (m[15] == 0) // error? abs()??
  20531. throw new IllegalOperationError('Not a rigid transform');
  20532. const s = 1 / m[15]; // should be 1 (normalize homogeneous coordinates)
  20533. const r11 = m[0] * s, r12 = m[4] * s, r13 = m[8] * s;
  20534. const r21 = m[1] * s, r22 = m[5] * s, r23 = m[9] * s;
  20535. const r31 = m[2] * s, r32 = m[6] * s, r33 = m[10] * s;
  20536. const t1 = m[12] * s, t2 = m[13] * s, t3 = m[14] * s;
  20537. const rt1 = r11 * t1 + r21 * t2 + r31 * t3;
  20538. const rt2 = r12 * t1 + r22 * t2 + r32 * t3;
  20539. const rt3 = r13 * t1 + r23 * t2 + r33 * t3;
  20540. const inverseMatrix = speedy_vision_default().Matrix(4, 4, [
  20541. r11, r12, r13, 0,
  20542. r21, r22, r23, 0,
  20543. r31, r32, r33, 0,
  20544. -rt1, -rt2, -rt3, 1
  20545. ]);
  20546. return new RigidTransform(inverseMatrix);
  20547. }
  20548. }
  20549. ;// CONCATENATED MODULE: ./src/geometry/viewer-pose.ts
  20550. /*
  20551. * encantar.js
  20552. * GPU-accelerated Augmented Reality for the web
  20553. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20554. *
  20555. * This program is free software: you can redistribute it and/or modify
  20556. * it under the terms of the GNU Lesser General Public License as published
  20557. * by the Free Software Foundation, either version 3 of the License, or
  20558. * (at your option) any later version.
  20559. *
  20560. * This program is distributed in the hope that it will be useful,
  20561. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20562. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20563. * GNU Lesser General Public License for more details.
  20564. *
  20565. * You should have received a copy of the GNU Lesser General Public License
  20566. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20567. *
  20568. * viewer-pose.ts
  20569. * The pose of a virtual camera in 3D world space at a moment in time
  20570. */
  20571. /**
  20572. * The pose of a virtual camera in 3D world space at a moment in time
  20573. */
  20574. class ViewerPose extends Pose {
  20575. /**
  20576. * Constructor
  20577. * @param camera camera model
  20578. */
  20579. constructor(camera) {
  20580. // compute the view matrix and its inverse in AR screen space
  20581. const viewMatrix = ViewerPose._computeViewMatrix(camera);
  20582. const inverseTransform = new RigidTransform(viewMatrix);
  20583. super(inverseTransform.inverse);
  20584. this._viewMatrix = viewMatrix;
  20585. }
  20586. /**
  20587. * This 4x4 matrix moves 3D points from world space to viewer space. We
  20588. * assume that the camera is looking in the direction of the negative
  20589. * z-axis (WebGL-friendly)
  20590. */
  20591. get viewMatrix() {
  20592. return this._viewMatrix;
  20593. }
  20594. /**
  20595. * Compute the view matrix in AR screen space, measured in pixels
  20596. * @param camera
  20597. * @returns a 4x4 matrix describing a rotation and a translation
  20598. */
  20599. static _computeViewMatrix(camera) {
  20600. /*
  20601. // this is the view matrix in AR screen space, measured in pixels
  20602. // we augment the extrinsics matrix, making it 4x4 by adding a
  20603. // [ 0 0 0 1 ] row. Below, E is a 3x4 extrinsics matrix
  20604. const V = Speedy.Matrix(4, 4, [
  20605. E[0], E[1], E[2], 0,
  20606. E[3], E[4], E[5], 0,
  20607. E[6], E[7], E[8], 0,
  20608. E[9], E[10], E[11], 1
  20609. ]);
  20610. // we premultiply V by F, which performs a rotation around the
  20611. // x-axis by 180 degrees, so that we get the 3D objects in front
  20612. // of the camera pointing in the direction of the negative z-axis
  20613. const F = Speedy.Matrix(4, 4, [
  20614. 1, 0, 0, 0,
  20615. 0,-1, 0, 0,
  20616. 0, 0,-1, 0,
  20617. 0, 0, 0, 1
  20618. ]);
  20619. Matrix F * V is matrix V with the second and third rows negated
  20620. */
  20621. const E = camera.extrinsics;
  20622. return speedy_vision_default().Matrix(4, 4, [
  20623. E[0], -E[1], -E[2], 0,
  20624. E[3], -E[4], -E[5], 0,
  20625. E[6], -E[7], -E[8], 0,
  20626. E[9], -E[10], -E[11], 1
  20627. ]);
  20628. }
  20629. }
  20630. ;// CONCATENATED MODULE: ./src/geometry/view.ts
  20631. /*
  20632. * encantar.js
  20633. * GPU-accelerated Augmented Reality for the web
  20634. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20635. *
  20636. * This program is free software: you can redistribute it and/or modify
  20637. * it under the terms of the GNU Lesser General Public License as published
  20638. * by the Free Software Foundation, either version 3 of the License, or
  20639. * (at your option) any later version.
  20640. *
  20641. * This program is distributed in the hope that it will be useful,
  20642. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20643. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20644. * GNU Lesser General Public License for more details.
  20645. *
  20646. * You should have received a copy of the GNU Lesser General Public License
  20647. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20648. *
  20649. * view.ts
  20650. * A view of the 3D world at a moment in time,
  20651. * featuring the means to project points into clip space
  20652. */
  20653. /** Default distance in pixels of the near plane to the optical center of the camera */
  20654. const DEFAULT_NEAR = 1;
  20655. /** Default distance in pixels of the far plane to the optical center of the camera */
  20656. const DEFAULT_FAR = 20000;
  20657. /**
  20658. * A PerspectiveView is a View defining a symmetric frustum around the z-axis
  20659. * (perspective projection)
  20660. */
  20661. class PerspectiveView {
  20662. /**
  20663. * Constructor
  20664. * @param camera camera model
  20665. * @param near distance of the near plane
  20666. * @param far distance of the far plane
  20667. */
  20668. constructor(camera, near = DEFAULT_NEAR, far = DEFAULT_FAR) {
  20669. const intrinsics = camera.intrinsics;
  20670. const screenSize = camera.screenSize;
  20671. this._near = Math.max(0, +near);
  20672. this._far = Math.max(0, +far);
  20673. if (this._near >= this._far)
  20674. throw new IllegalArgumentError(`View expects near < far (found near = ${this._near} and far = ${this._far})`);
  20675. this._aspect = screenSize.width / screenSize.height;
  20676. this._tanOfHalfFovy = intrinsics[V0] / intrinsics[FY];
  20677. this._projectionMatrix = PerspectiveView._computeProjectionMatrix(intrinsics, this._near, this._far);
  20678. }
  20679. /**
  20680. * A 4x4 projection matrix for WebGL
  20681. */
  20682. get projectionMatrix() {
  20683. return this._projectionMatrix;
  20684. }
  20685. /**
  20686. * Aspect ratio of the frustum
  20687. */
  20688. get aspect() {
  20689. return this._aspect;
  20690. }
  20691. /**
  20692. * Vertical field-of-view of the frustum, measured in radians
  20693. */
  20694. get fovy() {
  20695. return 2 * Math.atan(this._tanOfHalfFovy);
  20696. }
  20697. /**
  20698. * Distance of the near plane
  20699. */
  20700. get near() {
  20701. return this._near;
  20702. }
  20703. /**
  20704. * Distance of the far plane
  20705. */
  20706. get far() {
  20707. return this._far;
  20708. }
  20709. /**
  20710. * Compute a perspective projection matrix for WebGL
  20711. * @param K camera intrinsics
  20712. * @param near distance of the near plane
  20713. * @param far distance of the far plane
  20714. */
  20715. static _computeProjectionMatrix(K, near, far) {
  20716. // we assume that the principal point is at the center of the image
  20717. const top = near * (K[V0] / K[FY]);
  20718. const right = near * (K[U0] / K[FX]);
  20719. const bottom = -top, left = -right; // symmetric frustum
  20720. // a derivation of this projection matrix can be found at
  20721. // https://www.songho.ca/opengl/gl_projectionmatrix.html
  20722. // http://learnwebgl.brown37.net/08_projections/projections_perspective.html
  20723. return speedy_vision_default().Matrix(4, 4, [
  20724. 2 * near / (right - left), 0, 0, 0,
  20725. 0, 2 * near / (top - bottom), 0, 0,
  20726. (right + left) / (right - left), (top + bottom) / (top - bottom), -(far + near) / (far - near), -1,
  20727. 0, 0, -2 * far * near / (far - near), 0
  20728. ]);
  20729. }
  20730. }
  20731. ;// CONCATENATED MODULE: ./src/geometry/viewer.ts
  20732. /*
  20733. * encantar.js
  20734. * GPU-accelerated Augmented Reality for the web
  20735. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20736. *
  20737. * This program is free software: you can redistribute it and/or modify
  20738. * it under the terms of the GNU Lesser General Public License as published
  20739. * by the Free Software Foundation, either version 3 of the License, or
  20740. * (at your option) any later version.
  20741. *
  20742. * This program is distributed in the hope that it will be useful,
  20743. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20744. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20745. * GNU Lesser General Public License for more details.
  20746. *
  20747. * You should have received a copy of the GNU Lesser General Public License
  20748. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20749. *
  20750. * view.ts
  20751. * A viewer represents a virtual camera in 3D world space
  20752. */
  20753. /**
  20754. * A viewer represents a virtual camera in 3D world space
  20755. */
  20756. class Viewer {
  20757. /**
  20758. * Constructor
  20759. * @param camera camera model
  20760. */
  20761. constructor(camera) {
  20762. this._pose = new ViewerPose(camera);
  20763. this._views = [new PerspectiveView(camera)];
  20764. }
  20765. /**
  20766. * The pose of this viewer
  20767. */
  20768. get pose() {
  20769. return this._pose;
  20770. }
  20771. /**
  20772. * The view of this viewer (only for monoscopic rendering)
  20773. */
  20774. get view() {
  20775. /*
  20776. if(this._views.length > 1)
  20777. throw new IllegalOperationError('Use viewer.views for stereoscopic rendering');
  20778. */
  20779. return this._views[0];
  20780. }
  20781. /**
  20782. * The views of this viewer
  20783. */
  20784. /*
  20785. get views(): View[]
  20786. {
  20787. return this._views.concat([]);
  20788. }
  20789. */
  20790. /**
  20791. * Convert a pose from world space to viewer space
  20792. * @param pose a pose in world space
  20793. * @returns a pose in viewer space
  20794. */
  20795. convertToViewerSpace(pose) {
  20796. const modelMatrix = pose.transform.matrix;
  20797. const viewMatrix = this._pose.viewMatrix;
  20798. const modelViewMatrix = speedy_vision_default().Matrix(viewMatrix.times(modelMatrix));
  20799. const transform = new StandardTransform(modelViewMatrix);
  20800. return new Pose(transform);
  20801. }
  20802. }
  20803. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/states/tracking.ts
  20804. /*
  20805. * encantar.js
  20806. * GPU-accelerated Augmented Reality for the web
  20807. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  20808. *
  20809. * This program is free software: you can redistribute it and/or modify
  20810. * it under the terms of the GNU Lesser General Public License as published
  20811. * by the Free Software Foundation, either version 3 of the License, or
  20812. * (at your option) any later version.
  20813. *
  20814. * This program is distributed in the hope that it will be useful,
  20815. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  20816. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20817. * GNU Lesser General Public License for more details.
  20818. *
  20819. * You should have received a copy of the GNU Lesser General Public License
  20820. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  20821. *
  20822. * tracking.ts
  20823. * Tracking state of the Image Tracker
  20824. */
  20825. /** Whether or not we want to accelerate GPU-CPU transfers. Using turbo costs a slight delay on the tracking */
  20826. const USE_TURBO = true;
  20827. /** Number of PBOs; meaningful only when using turbo */
  20828. const NUMBER_OF_PBOS = 2;
  20829. /** Frame skipping; meaningful only when using turbo */
  20830. const TURBO_SKIP = 2;
  20831. /**
  20832. * The tracking state of the Image Tracker tracks
  20833. * keypoints of the image target and updates the
  20834. * rectification matrix
  20835. */
  20836. class ImageTrackerTrackingState extends ImageTrackerState {
  20837. /**
  20838. * Constructor
  20839. * @param imageTracker
  20840. */
  20841. constructor(imageTracker) {
  20842. super('tracking', imageTracker);
  20843. this._referenceImage = null;
  20844. this._warpHomography = speedy_vision_default().Matrix.Eye(3);
  20845. this._poseHomography = speedy_vision_default().Matrix.Eye(3);
  20846. this._initialHomography = speedy_vision_default().Matrix.Eye(3);
  20847. this._initialKeypoints = [];
  20848. this._counter = 0;
  20849. this._camera = new CameraModel();
  20850. this._predictedKeypoints = [];
  20851. this._lastPipelineOutput = { keypoints: [] };
  20852. this._pipelineCounter = 0;
  20853. this._lastOutput = {};
  20854. this._lostCounter = 0;
  20855. // we need at least 4 correspondences of points to compute a homography matrix
  20856. Utils.assert(TRACK_MIN_MATCHES >= 4);
  20857. }
  20858. /**
  20859. * Called as soon as this becomes the active state, just before update() runs for the first time
  20860. * @param settings
  20861. */
  20862. onEnterState(settings) {
  20863. const homography = settings.homography;
  20864. const referenceImage = settings.referenceImage;
  20865. const templateKeypoints = settings.templateKeypoints;
  20866. const keypointPortalSink = settings.keypointPortalSink;
  20867. const screenSize = settings.screenSize; // this.screenSize is not yet set
  20868. const keypointPortalSource = this._pipeline.node('keypointPortalSource');
  20869. // this shouldn't happen
  20870. if (!referenceImage)
  20871. throw new IllegalOperationError(`Can't track a null reference image`);
  20872. // set attributes
  20873. this._referenceImage = referenceImage;
  20874. this._warpHomography = speedy_vision_default().Matrix(homography);
  20875. this._poseHomography = speedy_vision_default().Matrix(homography);
  20876. this._initialHomography = speedy_vision_default().Matrix(homography);
  20877. this._initialKeypoints = templateKeypoints;
  20878. this._counter = 0;
  20879. this._predictedKeypoints = [];
  20880. this._lastPipelineOutput = { keypoints: [] };
  20881. this._pipelineCounter = 0;
  20882. this._lastOutput = {};
  20883. this._lostCounter = 0;
  20884. // setup portals
  20885. keypointPortalSource.source = keypointPortalSink;
  20886. // setup camera
  20887. this._camera.init(screenSize);
  20888. // emit event
  20889. const ev = new ImageTrackerEvent('targetfound', referenceImage);
  20890. this._imageTracker.dispatchEvent(ev);
  20891. // log
  20892. Utils.log(`Tracking image "${referenceImage.name}"...`);
  20893. }
  20894. /**
  20895. * Called when leaving the state
  20896. */
  20897. onLeaveState() {
  20898. const referenceImage = this._referenceImage;
  20899. // release the camera
  20900. this._camera.release();
  20901. // emit event
  20902. const ev = new ImageTrackerEvent('targetlost', referenceImage);
  20903. this._imageTracker.dispatchEvent(ev);
  20904. }
  20905. /**
  20906. * Called just before the GPU processing
  20907. * @returns promise
  20908. */
  20909. _beforeUpdate() {
  20910. const imageRectifier = this._pipeline.node('imageRectifier');
  20911. const borderClipper = this._pipeline.node('borderClipper');
  20912. const keypointRectifier = this._pipeline.node('keypointRectifier');
  20913. const screenSize = this.screenSize;
  20914. /*
  20915. // pause media (test)
  20916. const source = this._pipeline.node('source') as SpeedyPipelineNodeImageSource;
  20917. const media = source.media as SpeedyMedia;
  20918. (media.source as HTMLVideoElement).pause();
  20919. */
  20920. // clip keypoints from the borders of the target image
  20921. borderClipper.imageSize = screenSize;
  20922. borderClipper.borderSize = speedy_vision_default().Vector2(screenSize.width * TRACK_CLIPPING_BORDER, screenSize.height * TRACK_CLIPPING_BORDER);
  20923. // rectify the image
  20924. return this._findImageWarp(this._warpHomography, screenSize).then(warp => {
  20925. imageRectifier.transform = warp;
  20926. });
  20927. }
  20928. /**
  20929. * GPU processing
  20930. * @returns promise with the pipeline results
  20931. */
  20932. _gpuUpdate() {
  20933. //return super._gpuUpdate();
  20934. // No turbo?
  20935. if (!USE_TURBO || Settings.powerPreference == 'low-power')
  20936. return super._gpuUpdate();
  20937. // When using turbo, we reduce the GPU usage by skipping every other frame
  20938. const counter = this._pipelineCounter;
  20939. this._pipelineCounter = (this._pipelineCounter + 1) % TURBO_SKIP;
  20940. // Skip frame
  20941. if (counter != 0) {
  20942. if (this._lastPipelineOutput.keypoints !== undefined) {
  20943. this._predictedKeypoints = this._predictKeypoints(this._lastPipelineOutput.keypoints, this._initialKeypoints);
  20944. }
  20945. else
  20946. this._predictedKeypoints.length = 0;
  20947. this._lastPipelineOutput.keypoints = this._predictedKeypoints;
  20948. return speedy_vision_default().Promise.resolve(this._lastPipelineOutput);
  20949. }
  20950. // Run the pipeline and store the results
  20951. return super._gpuUpdate().then(results => {
  20952. this._lastPipelineOutput = results;
  20953. return results;
  20954. });
  20955. }
  20956. /**
  20957. * Post processing that takes place just after the GPU processing
  20958. * @param result pipeline results
  20959. * @returns state output
  20960. */
  20961. _afterUpdate(result) {
  20962. const imageRectifier = this._pipeline.node('imageRectifier');
  20963. const keypoints = result.keypoints;
  20964. const image = result.image;
  20965. const referenceImage = this._referenceImage;
  20966. // find the best keypoint matches
  20967. return this._preprocessMatches(keypoints, this._initialKeypoints).then(matches => {
  20968. // find motion models
  20969. return speedy_vision_default().Promise.all([
  20970. this._findAffineMotion(matches),
  20971. this._findPerspectiveMotion(matches)
  20972. ]);
  20973. }).then(([affineMotion, perspectiveMotion]) => {
  20974. const lowPower = (Settings.powerPreference == 'low-power');
  20975. const frozen = !(!USE_TURBO || lowPower || this._counter % TURBO_SKIP == 0);
  20976. // update warp homography
  20977. const delay = NUMBER_OF_PBOS * (!lowPower ? TURBO_SKIP : 1);
  20978. const remainder = delay >>> 1; // we want remainder > 0, so it skips the first frame
  20979. if (!USE_TURBO || this._counter % delay == remainder)
  20980. this._warpHomography.setToSync(this._warpHomography.times(affineMotion));
  20981. // update pose homography
  20982. if (!frozen)
  20983. this._poseHomography.setToSync(this._warpHomography.times(perspectiveMotion));
  20984. // update counter
  20985. this._counter = (this._counter + 1) % delay;
  20986. // update the camera
  20987. if (!frozen)
  20988. return this._camera.update(this._poseHomography, this.screenSize);
  20989. else
  20990. return this._camera.matrix;
  20991. }).then(_ => {
  20992. // find the inverse of the rectification matrix
  20993. const rectificationMatrix = imageRectifier.transform;
  20994. const inverseRectificationMatrix = speedy_vision_default().Matrix(rectificationMatrix.inverse());
  20995. // move keypoints from rectified space back to image space
  20996. const n = keypoints.length;
  20997. const coords = new Array(2 * n);
  20998. for (let i = 0, j = 0; i < n; i++, j += 2) {
  20999. coords[j] = keypoints[i].position.x;
  21000. coords[j + 1] = keypoints[i].position.y;
  21001. }
  21002. return speedy_vision_default().Matrix.applyPerspectiveTransform(speedy_vision_default().Matrix.Zeros(2, n), speedy_vision_default().Matrix(2, n, coords), inverseRectificationMatrix);
  21003. /*
  21004. // test image center
  21005. const coords2: number[] = new Array(2 * n);
  21006. for(let i = 0, j = 0; i < n; i++, j += 2) {
  21007. coords2[j] = this._imageTracker.screenSize.width / 2;
  21008. coords2[j+1] = this._imageTracker.screenSize.height / 2;
  21009. if(i % 2 == 0) {
  21010. coords2[j] = this._imageTracker.screenSize.width / 4;
  21011. coords2[j+1] = this._imageTracker.screenSize.height / 4;
  21012. }
  21013. }
  21014. return Speedy.Matrix.applyPerspectiveTransform(
  21015. Speedy.Matrix.Zeros(2, n),
  21016. Speedy.Matrix(2, n, coords2),
  21017. this._poseHomography
  21018. //this._warpHomography
  21019. );
  21020. */
  21021. }).then(mat => {
  21022. /*
  21023. const n = keypoints.length;
  21024. const coords = mat.read();
  21025. // ** this will interfere with the calculations when frame skipping is on **
  21026. // get keypoints in image space
  21027. for(let i = 0, j = 0; i < n; i++, j += 2) {
  21028. keypoints[i].position.x = coords[j];
  21029. keypoints[i].position.y = coords[j+1];
  21030. }
  21031. */
  21032. // find a polyline surrounding the target
  21033. return this._findPolyline(this._poseHomography, this.screenSize);
  21034. //return this._findPolyline(this._warpHomography, this.screenSize);
  21035. }).then(polyline => {
  21036. // we let the target object be at the origin of the world space
  21037. // (identity transform). We also perform a change of coordinates,
  21038. // so that we move out from pixel space and into normalized space
  21039. const modelMatrix = this._camera.denormalizer(); // ~ "identity matrix"
  21040. const transform = new StandardTransform(modelMatrix);
  21041. const pose = new Pose(transform);
  21042. // given the current state of the camera model, we get a viewer
  21043. // compatible with the pose of the target
  21044. const viewer = new Viewer(this._camera);
  21045. // the trackable object
  21046. const trackable = {
  21047. pose: pose,
  21048. referenceImage: referenceImage
  21049. };
  21050. // the result generated by the image tracker
  21051. const result = {
  21052. tracker: this._imageTracker,
  21053. trackables: [trackable],
  21054. viewer: viewer
  21055. };
  21056. // build and save the output
  21057. this._lastOutput = {
  21058. exports: result,
  21059. cameraMatrix: this._camera.matrix,
  21060. homography: this._warpHomography,
  21061. //keypoints: keypoints,
  21062. screenSize: this.screenSize,
  21063. image: image,
  21064. polyline: polyline,
  21065. };
  21066. // we have successfully tracked the target in this frame
  21067. this._lostCounter = 0;
  21068. // done!
  21069. return {
  21070. nextState: 'tracking',
  21071. trackerOutput: this._lastOutput
  21072. };
  21073. }).catch(err => {
  21074. // give some tolerance to tracking errors
  21075. if (err instanceof TrackingError) {
  21076. if (++this._lostCounter <= TRACK_LOST_TOLERANCE) {
  21077. //console.log("ABSORB",this._lostCounter,err.toString())
  21078. // absorb the error
  21079. return {
  21080. nextState: 'tracking',
  21081. trackerOutput: this._lastOutput
  21082. };
  21083. }
  21084. }
  21085. // lost tracking
  21086. Utils.warning(`The target has been lost! ${err.toString()}`);
  21087. this._camera.reset();
  21088. // go back to the scanning state
  21089. return {
  21090. nextState: 'scanning',
  21091. trackerOutput: {
  21092. image: image,
  21093. screenSize: this.screenSize,
  21094. },
  21095. };
  21096. });
  21097. }
  21098. /**
  21099. * Find quality matches between two sets of keypoints
  21100. * @param currKeypoints keypoints of the current frame
  21101. * @param prevKeypoints keypoints of the previous frame
  21102. * @returns quality matches
  21103. */
  21104. _findQualityMatches(currKeypoints, prevKeypoints) {
  21105. const result = [[], []];
  21106. const n = currKeypoints.length;
  21107. for (let i = 0; i < n; i++) {
  21108. const currKeypoint = currKeypoints[i];
  21109. if (currKeypoint.matches[0].index >= 0 && currKeypoint.matches[1].index >= 0) {
  21110. const d1 = currKeypoint.matches[0].distance;
  21111. const d2 = currKeypoint.matches[1].distance;
  21112. if (d1 <= TRACK_MATCH_RATIO * d2) {
  21113. const prevKeypoint = prevKeypoints[currKeypoint.matches[0].index];
  21114. result[0].push(currKeypoint);
  21115. result[1].push(prevKeypoint);
  21116. }
  21117. }
  21118. }
  21119. return result;
  21120. }
  21121. /**
  21122. * Find a better spatial distribution of the input matches
  21123. * @param matches quality matches
  21124. * @returns refined quality matches
  21125. */
  21126. _refineQualityMatches(matches) {
  21127. const currKeypoints = matches[0];
  21128. const prevKeypoints = matches[1];
  21129. // find a better spatial distribution of the keypoints
  21130. const indices = this._distributeKeypoints(currKeypoints, TRACK_GRID_GRANULARITY);
  21131. const n = indices.length; // number of refined matches
  21132. // assemble output
  21133. const result = [new Array(n), new Array(n)];
  21134. for (let i = 0; i < n; i++) {
  21135. result[0][i] = currKeypoints[indices[i]];
  21136. result[1][i] = prevKeypoints[indices[i]];
  21137. }
  21138. // done!
  21139. return result;
  21140. }
  21141. /**
  21142. * Spatially distribute keypoints over a grid
  21143. * @param keypoints keypoints to be distributed
  21144. * @param gridCells number of grid elements in each axis
  21145. * @returns a list of indices of keypoints[]
  21146. */
  21147. _distributeKeypoints(keypoints, gridCells) {
  21148. // get the coordinates of the keypoints
  21149. const n = keypoints.length;
  21150. const points = new Array(2 * n);
  21151. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21152. points[j] = keypoints[i].x;
  21153. points[j + 1] = keypoints[i].y;
  21154. }
  21155. // normalize the coordinates to [0,1] x [0,1]
  21156. this._normalizePoints(points);
  21157. // distribute the keypoints over a grid
  21158. const numberOfCells = gridCells * gridCells;
  21159. const grid = (new Array(numberOfCells)).fill(-1);
  21160. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21161. // find the grid location of the i-th point
  21162. const xg = Math.floor(points[j] * gridCells); // 0 <= xg,yg < gridCells
  21163. const yg = Math.floor(points[j + 1] * gridCells);
  21164. // store the index of the i-th point in the grid
  21165. grid[yg * gridCells + xg] = i;
  21166. }
  21167. // retrieve points of the grid
  21168. const indices = [];
  21169. for (let g = 0; g < numberOfCells; g++) {
  21170. if (grid[g] >= 0) {
  21171. const i = grid[g];
  21172. indices.push(i);
  21173. }
  21174. }
  21175. // done!
  21176. return indices;
  21177. }
  21178. /**
  21179. * Normalize points to [0,1)^2
  21180. * @param points 2 x n matrix of points in column-major format
  21181. * @returns points
  21182. */
  21183. _normalizePoints(points) {
  21184. Utils.assert(points.length % 2 == 0);
  21185. const n = points.length / 2;
  21186. if (n == 0)
  21187. return points;
  21188. let xmin = Number.POSITIVE_INFINITY, xmax = Number.NEGATIVE_INFINITY;
  21189. let ymin = Number.POSITIVE_INFINITY, ymax = Number.NEGATIVE_INFINITY;
  21190. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21191. const x = points[j], y = points[j + 1];
  21192. xmin = x < xmin ? x : xmin;
  21193. ymin = y < ymin ? y : ymin;
  21194. xmax = x > xmax ? x : xmax;
  21195. ymax = y > ymax ? y : ymax;
  21196. }
  21197. const xlen = xmax - xmin + 1; // +1 is a correction factor, so that 0 <= x,y < 1
  21198. const ylen = ymax - ymin + 1;
  21199. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21200. points[j] = (points[j] - xmin) / xlen;
  21201. points[j + 1] = (points[j + 1] - ymin) / ylen;
  21202. }
  21203. return points;
  21204. }
  21205. /**
  21206. * Find a matrix with the coordinates of quality matches
  21207. * @param matches n quality matches
  21208. * @returns a 2 x 2n matrix split into two 2 x n blocks [ prevKeypoints | currKeypoints ]
  21209. */
  21210. _findMatrixOfMatches(matches) {
  21211. const n = matches[0].length;
  21212. Utils.assert(n > 0);
  21213. // sets of keypoints
  21214. const currKeypoints = matches[0];
  21215. const prevKeypoints = matches[1];
  21216. // get the coordinates of the keypoints of the set of refined matches
  21217. const src = new Array(2 * n);
  21218. const dst = new Array(2 * n);
  21219. for (let i = 0, j = 0; i < n; i++, j += 2) {
  21220. src[j] = prevKeypoints[i].x;
  21221. src[j + 1] = prevKeypoints[i].y;
  21222. dst[j] = currKeypoints[i].x;
  21223. dst[j + 1] = currKeypoints[i].y;
  21224. }
  21225. // assemble the matrix
  21226. return speedy_vision_default().Matrix(2, 2 * n, src.concat(dst));
  21227. }
  21228. /**
  21229. * Preprocess keypoint matches
  21230. * @param currKeypoints keypoints of the current frame
  21231. * @param prevKeypoints keypoints of the previous frame
  21232. * @returns a promise that is rejected if there are not enough "good" matches, or that is resolved to a
  21233. * 2 x 2n matrix split into two 2 x n blocks [ source x,y coordinates | dest x,y coordinates ]
  21234. */
  21235. _preprocessMatches(currKeypoints, prevKeypoints) {
  21236. // find and refine quality matches
  21237. const qualityMatches = this._findQualityMatches(currKeypoints, prevKeypoints);
  21238. const refinedMatches = this._refineQualityMatches(qualityMatches);
  21239. // not enough matches?
  21240. const n = refinedMatches[0].length;
  21241. if (n < TRACK_MIN_MATCHES)
  21242. return speedy_vision_default().Promise.reject(new TrackingError('Not enough data to compute a motion model'));
  21243. // find matrix of matches
  21244. const matrixOfMatches = this._findMatrixOfMatches(refinedMatches);
  21245. // warp matrix of matches
  21246. const result = speedy_vision_default().Matrix.Zeros(2, 2 * n);
  21247. return this._findKeypointWarp().then(transform => speedy_vision_default().Matrix.applyAffineTransform(result, matrixOfMatches, transform.block(0, 1, 0, 2)));
  21248. }
  21249. /**
  21250. * Find an affine motion model of the target image
  21251. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  21252. * @returns a promise that resolves to a 3x3 affine motion model (last row is [ 0 0 1 ])
  21253. */
  21254. _findAffineMotion(preprocessedMatches) {
  21255. const model = speedy_vision_default().Matrix.Eye(3);
  21256. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  21257. // find motion model
  21258. return speedy_vision_default().Matrix.findAffineTransform(model.block(0, 1, 0, 2), preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  21259. method: 'pransac',
  21260. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  21261. numberOfHypotheses: 512,
  21262. bundleSize: 128,
  21263. }).then(_ => {
  21264. // validate the model
  21265. const a00 = model.at(0, 0);
  21266. if (Number.isNaN(a00))
  21267. throw new TrackingError(`Can't compute affine motion model: bad keypoints`);
  21268. // done!
  21269. return model;
  21270. });
  21271. }
  21272. /**
  21273. * Find a perspective motion model of the target image
  21274. * @param preprocessedMatches 2 x 2n matrix split into two 2 x n blocks [ src | dest ]
  21275. * @returns a promise that resolves to a 3x3 perspective motion model
  21276. */
  21277. _findPerspectiveMotion(preprocessedMatches) {
  21278. /*
  21279. We can probably get more accurate motion estimates if we
  21280. work in 3D rather than in 2D. We're currently estimating
  21281. an affine transform in image space. What if we projected
  21282. the keypoints into world space, estimated the camera motion
  21283. (rotation and translation) that best describes the observed
  21284. observed motion of the keypoints, and then projected things
  21285. back to image space? Need to figure this out; we'll get a
  21286. homography matrix.
  21287. Note: keypoints are in rectified image space.
  21288. Note: work with a 6 DoF perspective transform instead of 8.
  21289. */
  21290. const model = speedy_vision_default().Matrix.Zeros(3);
  21291. const n = preprocessedMatches.columns / 2; // number of preprocessed matches
  21292. // find motion model
  21293. return speedy_vision_default().Matrix.findHomography(model, preprocessedMatches.block(0, 1, 0, n - 1), preprocessedMatches.block(0, 1, n, 2 * n - 1), {
  21294. method: 'pransac',
  21295. reprojectionError: TRACK_RANSAC_REPROJECTIONERROR,
  21296. numberOfHypotheses: 512 * 2,
  21297. bundleSize: 128 * 4, //*4
  21298. }).then(_ => {
  21299. // validate the model
  21300. const a00 = model.at(0, 0);
  21301. if (Number.isNaN(a00))
  21302. throw new TrackingError(`Can't compute perspective motion model: bad keypoints`);
  21303. // done!
  21304. return model;
  21305. });
  21306. }
  21307. /**
  21308. * Find a rectification matrix to be applied to the target image
  21309. * @param homography maps a reference image to the AR screen
  21310. * @param media target
  21311. * @param screenSize AR screen
  21312. * @returns promise that resolves to a rectification matrix
  21313. */
  21314. _findImageWarp(homography, screenSize) {
  21315. const referenceImage = this._referenceImage;
  21316. const media = this._imageTracker.database._findMedia(referenceImage.name);
  21317. const mat = speedy_vision_default().Matrix.Zeros(3);
  21318. return this._findRectificationMatrixOfFullscreenImage(media, screenSize).then(warp => mat.setTo(warp.times(homography.inverse())));
  21319. }
  21320. /**
  21321. * Find a warp to be applied to the keypoints
  21322. * @returns affine transform
  21323. */
  21324. _findKeypointWarp() {
  21325. const referenceImage = this._referenceImage;
  21326. const media = this._imageTracker.database._findMedia(referenceImage.name);
  21327. const screenSize = this.screenSize;
  21328. const sw = screenSize.width, sh = screenSize.height;
  21329. const mat = speedy_vision_default().Matrix.Eye(3, 3);
  21330. // no rotation is needed
  21331. if (!this._mustRotateWarpedImage(media, screenSize))
  21332. return speedy_vision_default().Promise.resolve(mat);
  21333. // rotate by 90 degrees clockwise and scale
  21334. return speedy_vision_default().Matrix.affine(mat.block(0, 1, 0, 2), speedy_vision_default().Matrix(2, 3, [0, sh, 0, 0, sw, 0]), speedy_vision_default().Matrix(2, 3, [0, 0, sw, 0, sw, sh])).then(_ => mat);
  21335. }
  21336. /**
  21337. * Predict the keypoints without actually looking at the image
  21338. * @param curr keypoints at time t (will modify the contents)
  21339. * @param initial keypoints at time t-1 (not just t = 0)
  21340. * @returns keypoints at time t+1
  21341. */
  21342. _predictKeypoints(curr, initial) {
  21343. // the target image is likely to be moving roughly in
  21344. // the same manner as it was in the previous frame
  21345. const next = [];
  21346. const n = curr.length;
  21347. for (let i = 0; i < n; i++) {
  21348. const cur = curr[i];
  21349. if (cur.matches[0].index < 0 || cur.matches[1].index < 0)
  21350. continue;
  21351. /*
  21352. else if(cur.matches[0].distance > TRACK_MATCH_RATIO * cur.matches[1].distance)
  21353. continue;
  21354. */
  21355. const ini = initial[cur.matches[0].index];
  21356. const dx = cur.position.x - ini.position.x;
  21357. const dy = cur.position.y - ini.position.y;
  21358. // a better mathematical model is needed
  21359. const alpha = 0.8; //0.2;
  21360. cur.position.x = ini.position.x + alpha * dx;
  21361. cur.position.y = ini.position.y + alpha * dy;
  21362. next.push(cur);
  21363. }
  21364. // done!
  21365. return next;
  21366. }
  21367. /**
  21368. * Create & setup the pipeline
  21369. * @returns pipeline
  21370. */
  21371. _createPipeline() {
  21372. const pipeline = speedy_vision_default().Pipeline();
  21373. const source = speedy_vision_default().Image.Source('source');
  21374. const screen = speedy_vision_default().Transform.Resize('screen');
  21375. const greyscale = speedy_vision_default().Filter.Greyscale();
  21376. const imageRectifier = speedy_vision_default().Transform.PerspectiveWarp('imageRectifier');
  21377. const nightvision = speedy_vision_default().Filter.Nightvision();
  21378. const nightvisionMux = speedy_vision_default().Image.Multiplexer();
  21379. const blur = speedy_vision_default().Filter.GaussianBlur();
  21380. const detector = speedy_vision_default().Keypoint.Detector.Harris();
  21381. const descriptor = speedy_vision_default().Keypoint.Descriptor.ORB();
  21382. const matcher = speedy_vision_default().Keypoint.Matcher.BFKNN();
  21383. const subpixel = speedy_vision_default().Keypoint.SubpixelRefiner();
  21384. const denoiser = speedy_vision_default().Filter.GaussianBlur();
  21385. const borderClipper = speedy_vision_default().Keypoint.BorderClipper('borderClipper');
  21386. const clipper = speedy_vision_default().Keypoint.Clipper();
  21387. const keypointRectifier = speedy_vision_default().Keypoint.Transformer('keypointRectifier');
  21388. const keypointPortalSource = speedy_vision_default().Keypoint.Portal.Source('keypointPortalSource');
  21389. const keypointSink = speedy_vision_default().Keypoint.SinkOfMatchedKeypoints('keypoints');
  21390. const imageSink = speedy_vision_default().Image.Sink('image');
  21391. source.media = null;
  21392. screen.size = speedy_vision_default().Size(0, 0);
  21393. imageRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21394. nightvision.gain = NIGHTVISION_GAIN;
  21395. nightvision.offset = NIGHTVISION_OFFSET;
  21396. nightvision.decay = NIGHTVISION_DECAY;
  21397. nightvision.quality = NIGHTVISION_QUALITY;
  21398. nightvisionMux.port = TRACK_WITH_NIGHTVISION ? 1 : 0; // 1 = enable nightvision
  21399. blur.kernelSize = speedy_vision_default().Size(ORB_GAUSSIAN_KSIZE, ORB_GAUSSIAN_KSIZE);
  21400. blur.sigma = speedy_vision_default().Vector2(ORB_GAUSSIAN_SIGMA, ORB_GAUSSIAN_SIGMA);
  21401. denoiser.kernelSize = speedy_vision_default().Size(SUBPIXEL_GAUSSIAN_KSIZE, SUBPIXEL_GAUSSIAN_KSIZE);
  21402. denoiser.sigma = speedy_vision_default().Vector2(SUBPIXEL_GAUSSIAN_SIGMA, SUBPIXEL_GAUSSIAN_SIGMA);
  21403. detector.quality = TRACK_HARRIS_QUALITY;
  21404. detector.capacity = TRACK_DETECTOR_CAPACITY;
  21405. subpixel.method = SUBPIXEL_METHOD;
  21406. clipper.size = TRACK_MAX_KEYPOINTS;
  21407. borderClipper.imageSize = screen.size;
  21408. borderClipper.borderSize = speedy_vision_default().Vector2(0, 0);
  21409. keypointRectifier.transform = speedy_vision_default().Matrix.Eye(3);
  21410. matcher.k = 2;
  21411. keypointPortalSource.source = null;
  21412. keypointSink.turbo = USE_TURBO;
  21413. // prepare input
  21414. source.output().connectTo(screen.input());
  21415. screen.output().connectTo(greyscale.input());
  21416. // preprocess images
  21417. greyscale.output().connectTo(imageRectifier.input());
  21418. imageRectifier.output().connectTo(nightvisionMux.input('in0'));
  21419. imageRectifier.output().connectTo(nightvision.input());
  21420. nightvision.output().connectTo(nightvisionMux.input('in1'));
  21421. // keypoint detection & clipping
  21422. nightvisionMux.output().connectTo(detector.input());
  21423. detector.output().connectTo(borderClipper.input());
  21424. borderClipper.output().connectTo(clipper.input());
  21425. // keypoint refinement
  21426. imageRectifier.output().connectTo(denoiser.input());
  21427. denoiser.output().connectTo(subpixel.input('image'));
  21428. clipper.output().connectTo(subpixel.input('keypoints'));
  21429. // keypoint description
  21430. imageRectifier.output().connectTo(blur.input());
  21431. blur.output().connectTo(descriptor.input('image'));
  21432. subpixel.output().connectTo(descriptor.input('keypoints'));
  21433. // keypoint matching
  21434. keypointPortalSource.output().connectTo(matcher.input('database'));
  21435. descriptor.output().connectTo(matcher.input('keypoints'));
  21436. // prepare output
  21437. descriptor.output().connectTo(keypointRectifier.input());
  21438. //preMatcher.output().connectTo(keypointRectifier.input());
  21439. keypointRectifier.output().connectTo(keypointSink.input());
  21440. matcher.output().connectTo(keypointSink.input('matches'));
  21441. //imageRectifier.output().connectTo(imageSink.input());
  21442. // done!
  21443. pipeline.init(source, screen, greyscale, imageRectifier, nightvision, nightvisionMux, blur, detector, subpixel, borderClipper, clipper, denoiser, descriptor, matcher, keypointPortalSource, keypointRectifier, keypointSink);
  21444. return pipeline;
  21445. }
  21446. }
  21447. ;// CONCATENATED MODULE: ./src/trackers/image-tracker/image-tracker.ts
  21448. /*
  21449. * encantar.js
  21450. * GPU-accelerated Augmented Reality for the web
  21451. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21452. *
  21453. * This program is free software: you can redistribute it and/or modify
  21454. * it under the terms of the GNU Lesser General Public License as published
  21455. * by the Free Software Foundation, either version 3 of the License, or
  21456. * (at your option) any later version.
  21457. *
  21458. * This program is distributed in the hope that it will be useful,
  21459. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21460. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21461. * GNU Lesser General Public License for more details.
  21462. *
  21463. * You should have received a copy of the GNU Lesser General Public License
  21464. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21465. *
  21466. * image-tracker.ts
  21467. * Image Tracker
  21468. */
  21469. /** A helper */
  21470. const formatSize = (size) => `${size.width}x${size.height}`;
  21471. /**
  21472. * The ImageTracker tracks an image (one at a time)
  21473. */
  21474. class ImageTracker extends AREventTarget {
  21475. /**
  21476. * Constructor
  21477. */
  21478. constructor() {
  21479. super();
  21480. // the states
  21481. this._state = {
  21482. 'initial': new ImageTrackerInitialState(this),
  21483. 'training': new ImageTrackerTrainingState(this),
  21484. 'scanning': new ImageTrackerScanningState(this),
  21485. 'pre-tracking': new ImageTrackerPreTrackingState(this),
  21486. 'tracking': new ImageTrackerTrackingState(this),
  21487. };
  21488. // initial setup
  21489. this._session = null;
  21490. this._activeStateName = 'initial';
  21491. this._lastOutput = {};
  21492. this._database = new ReferenceImageDatabase();
  21493. // user settings
  21494. this._resolution = DEFAULT_TRACKING_RESOLUTION;
  21495. }
  21496. /**
  21497. * The type of the tracker
  21498. */
  21499. get type() {
  21500. return 'image-tracker';
  21501. }
  21502. /**
  21503. * Current state name
  21504. */
  21505. get state() {
  21506. return this._activeStateName;
  21507. }
  21508. /**
  21509. * Reference Image Database
  21510. * Must be configured before training the tracker
  21511. */
  21512. get database() {
  21513. return this._database;
  21514. }
  21515. /**
  21516. * Resolution of the AR screen space
  21517. */
  21518. get resolution() {
  21519. return this._resolution;
  21520. }
  21521. /**
  21522. * Resolution of the AR screen space
  21523. */
  21524. set resolution(resolution) {
  21525. this._resolution = resolution;
  21526. }
  21527. /**
  21528. * Size of the AR screen space, in pixels
  21529. * @internal
  21530. */
  21531. get screenSize() {
  21532. return this._state[this._activeStateName].screenSize;
  21533. }
  21534. /**
  21535. * Last emitted output
  21536. * @internal
  21537. */
  21538. get _output() {
  21539. return this._lastOutput;
  21540. }
  21541. /**
  21542. * Stats related to this tracker
  21543. * @internal
  21544. */
  21545. get _stats() {
  21546. return `${formatSize(this.screenSize)} ${this.state}`;
  21547. }
  21548. /**
  21549. * Initialize this tracker
  21550. * @param session
  21551. * @returns promise that resolves after the tracker has been initialized
  21552. * @internal
  21553. */
  21554. _init(session) {
  21555. // store the session
  21556. this._session = session;
  21557. // initialize states
  21558. for (const state of Object.values(this._state))
  21559. state.init();
  21560. // done!
  21561. return speedy_vision_default().Promise.resolve();
  21562. }
  21563. /**
  21564. * Release this tracker
  21565. * @returns promise that resolves after the tracker has been released
  21566. * @internal
  21567. */
  21568. _release() {
  21569. // release states
  21570. for (const state of Object.values(this._state))
  21571. state.release();
  21572. // unlink session
  21573. this._session = null;
  21574. // done!
  21575. return speedy_vision_default().Promise.resolve();
  21576. }
  21577. /**
  21578. * Update the tracker
  21579. * @returns promise
  21580. * @internal
  21581. */
  21582. _update() {
  21583. // validate
  21584. if (this._session == null)
  21585. return speedy_vision_default().Promise.reject(new IllegalOperationError(`Uninitialized tracker`));
  21586. // compute the screen size for image processing purposes
  21587. // note: this may change over time...!
  21588. const media = this._session.media;
  21589. const aspectRatio = media.width / media.height;
  21590. const screenSize = Utils.resolution(this._resolution, aspectRatio);
  21591. // run the active state
  21592. const activeState = this._state[this._activeStateName];
  21593. return activeState.update(media, screenSize).then(({ trackerOutput, nextState, nextStateSettings }) => {
  21594. // update the output of the tracker
  21595. this._lastOutput = trackerOutput;
  21596. // need to change the state?
  21597. if (this._activeStateName != nextState) {
  21598. activeState.onLeaveState();
  21599. this._activeStateName = nextState;
  21600. this._state[nextState].onEnterState(nextStateSettings || {});
  21601. }
  21602. });
  21603. }
  21604. /**
  21605. * Get reference image
  21606. * @param keypointIndex -1 if not found
  21607. * @returns reference image
  21608. * @internal
  21609. */
  21610. _referenceImageOfKeypoint(keypointIndex) {
  21611. const training = this._state.training;
  21612. return training.referenceImageOfKeypoint(keypointIndex);
  21613. }
  21614. /**
  21615. * Get reference image index
  21616. * @param keypointIndex -1 if not found
  21617. * @returns reference image index, or -1 if not found
  21618. * @internal
  21619. */
  21620. _referenceImageIndexOfKeypoint(keypointIndex) {
  21621. const training = this._state.training;
  21622. return training.referenceImageIndexOfKeypoint(keypointIndex);
  21623. }
  21624. /**
  21625. * Get a keypoint of the trained set
  21626. * @param keypointIndex
  21627. * @returns a keypoint
  21628. * @internal
  21629. */
  21630. _referenceKeypoint(keypointIndex) {
  21631. const training = this._state.training;
  21632. return training.referenceKeypoint(keypointIndex);
  21633. }
  21634. }
  21635. ;// CONCATENATED MODULE: ./src/trackers/tracker-factory.ts
  21636. /*
  21637. * encantar.js
  21638. * GPU-accelerated Augmented Reality for the web
  21639. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21640. *
  21641. * This program is free software: you can redistribute it and/or modify
  21642. * it under the terms of the GNU Lesser General Public License as published
  21643. * by the Free Software Foundation, either version 3 of the License, or
  21644. * (at your option) any later version.
  21645. *
  21646. * This program is distributed in the hope that it will be useful,
  21647. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21648. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21649. * GNU Lesser General Public License for more details.
  21650. *
  21651. * You should have received a copy of the GNU Lesser General Public License
  21652. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21653. *
  21654. * tracker-factory.ts
  21655. * Tracker factory
  21656. */
  21657. /**
  21658. * Tracker factory
  21659. */
  21660. class TrackerFactory {
  21661. /**
  21662. * Create an Image Tracker
  21663. */
  21664. static ImageTracker() {
  21665. return new ImageTracker();
  21666. }
  21667. }
  21668. ;// CONCATENATED MODULE: ./src/sources/video-source.ts
  21669. /*
  21670. * encantar.js
  21671. * GPU-accelerated Augmented Reality for the web
  21672. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21673. *
  21674. * This program is free software: you can redistribute it and/or modify
  21675. * it under the terms of the GNU Lesser General Public License as published
  21676. * by the Free Software Foundation, either version 3 of the License, or
  21677. * (at your option) any later version.
  21678. *
  21679. * This program is distributed in the hope that it will be useful,
  21680. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21681. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21682. * GNU Lesser General Public License for more details.
  21683. *
  21684. * You should have received a copy of the GNU Lesser General Public License
  21685. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21686. *
  21687. * video-source.ts
  21688. * HTMLVideoElement-based source of data
  21689. */
  21690. /** A message to be displayed if a video can't autoplay and user interaction is required */
  21691. const ALERT_MESSAGE = 'Tap on the screen to start';
  21692. /** Whether or not we have displayed the ALERT_MESSAGE */
  21693. let displayedAlertMessage = false;
  21694. /**
  21695. * HTMLVideoElement-based source of data
  21696. */
  21697. class VideoSource {
  21698. /**
  21699. * Constructor
  21700. */
  21701. constructor(video) {
  21702. Utils.assert(video instanceof HTMLVideoElement, 'Expected a video element');
  21703. this._video = video;
  21704. this._media = null;
  21705. }
  21706. /**
  21707. * A type-identifier of the source of data
  21708. * @internal
  21709. */
  21710. get _type() {
  21711. return 'video';
  21712. }
  21713. /**
  21714. * Get media
  21715. * @internal
  21716. */
  21717. get _data() {
  21718. if (this._media == null)
  21719. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  21720. return this._media;
  21721. }
  21722. /**
  21723. * Stats related to this source of data
  21724. * @internal
  21725. */
  21726. get _stats() {
  21727. const media = this._media;
  21728. if (media != null)
  21729. return `${media.width}x${media.height} video`;
  21730. else
  21731. return 'uninitialized video';
  21732. }
  21733. /**
  21734. * Initialize this source of data
  21735. * @returns a promise that resolves as soon as this source of data is initialized
  21736. * @internal
  21737. */
  21738. _init() {
  21739. Utils.log(`Initializing ${this._type} source...`);
  21740. // prepare the video before loading the SpeedyMedia!
  21741. return this._prepareVideo(this._video).then(video => {
  21742. Utils.log('The video is prepared');
  21743. return speedy_vision_default().load(video).then(media => {
  21744. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  21745. this._media = media;
  21746. });
  21747. });
  21748. }
  21749. /**
  21750. * Release this source of data
  21751. * @returns a promise that resolves as soon as this source of data is released
  21752. * @internal
  21753. */
  21754. _release() {
  21755. if (this._media)
  21756. this._media.release();
  21757. this._media = null;
  21758. return speedy_vision_default().Promise.resolve();
  21759. }
  21760. /**
  21761. * Handle browser-specific quirks for <video> elements
  21762. * @param video a video element
  21763. * @returns a promise that resolves to the input video
  21764. */
  21765. _prepareVideo(video) {
  21766. // WebKit <video> policies for iOS:
  21767. // https://webkit.org/blog/6784/new-video-policies-for-ios/
  21768. // required on iOS; nice to have in all browsers
  21769. video.setAttribute('playsinline', '');
  21770. // handle autoplay
  21771. return this._handleAutoPlay(video).then(video => {
  21772. // handle WebKit quirks
  21773. if (Utils.isWebKit()) {
  21774. // on Epiphany 45, a hidden <video> shows up as a black screen when copied to a canvas
  21775. // on iOS 15.2-17.3, this hack doesn't seem necessary, but works okay
  21776. if (video.hidden) {
  21777. video.hidden = false;
  21778. video.style.setProperty('opacity', '0');
  21779. video.style.setProperty('position', 'fixed'); // make sure that it's visible on-screen
  21780. video.style.setProperty('left', '0');
  21781. video.style.setProperty('top', '0');
  21782. //video.style.setProperty('display', 'none'); // doesn't work. Same as video.hidden
  21783. //video.style.setProperty('visibility', 'hidden'); // doesn't work either
  21784. }
  21785. }
  21786. // done
  21787. return video;
  21788. });
  21789. }
  21790. /**
  21791. * Handle browser-specific quirks for videos marked with autoplay
  21792. * @param video a <video> marked with autoplay
  21793. * @returns a promise that resolves to the input video
  21794. */
  21795. _handleAutoPlay(video) {
  21796. // Autoplay guide: https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide
  21797. // Chrome policy: https://developer.chrome.com/blog/autoplay/
  21798. // WebKit policy: https://webkit.org/blog/7734/auto-play-policy-changes-for-macos/
  21799. // nothing to do?
  21800. if (!video.autoplay)
  21801. return speedy_vision_default().Promise.resolve(video);
  21802. // videos marked with autoplay should be muted
  21803. if (!video.muted) {
  21804. Utils.warning('Videos marked with autoplay should be muted', video);
  21805. video.muted = true;
  21806. }
  21807. // the browser may not honor the autoplay attribute if the video is not
  21808. // visible on-screen. So, let's try to play the video in any case.
  21809. return this._waitUntilPlayable(video).then(video => {
  21810. // try to play the video
  21811. const promise = video.play();
  21812. // handle older browsers
  21813. if (promise === undefined)
  21814. return video;
  21815. // resolve if successful
  21816. return new (speedy_vision_default()).Promise((resolve, reject) => {
  21817. promise.then(() => resolve(video), error => {
  21818. // can't play the video
  21819. Utils.error(`Can't autoplay video!`, error, video);
  21820. // autoplay is blocked for some reason
  21821. if (error.name == 'NotAllowedError') {
  21822. Utils.warning('Tip: allow manual playback');
  21823. if (Utils.isIOS())
  21824. Utils.warning('Is low power mode on?');
  21825. // User interaction is required to play the video. We can
  21826. // solve this here (easy and convenient to do) or at the
  21827. // application layer (for a better user experience). If the
  21828. // latter is preferred, just disable autoplay and play the
  21829. // video programatically.
  21830. if (video.hidden || !video.controls || video.parentNode === null) {
  21831. // this is added for convenience
  21832. document.body.addEventListener('pointerdown', () => video.play());
  21833. // ask only once for user interaction
  21834. if (!displayedAlertMessage) {
  21835. alert(ALERT_MESSAGE);
  21836. displayedAlertMessage = true;
  21837. }
  21838. // XXX what if the Session mode is inline? In this
  21839. // case, this convenience code may be undesirable.
  21840. // A workaround is to disable autoplay.
  21841. }
  21842. /*else {
  21843. // play the video after the first interaction with the page
  21844. const polling = setInterval(() => {
  21845. video.play().then(() => clearInterval(polling));
  21846. }, 500);
  21847. }*/
  21848. }
  21849. // unsupported media source
  21850. else if (error.name == 'NotSupportedError') {
  21851. reject(new NotSupportedError('Unsupported video format', error));
  21852. return;
  21853. }
  21854. // done
  21855. resolve(video);
  21856. });
  21857. });
  21858. });
  21859. }
  21860. /**
  21861. * Wait for the input video to be playable
  21862. * @param video
  21863. * @returns a promise that resolves to the input video when it can be played
  21864. */
  21865. _waitUntilPlayable(video) {
  21866. const TIMEOUT = 15000, INTERVAL = 500;
  21867. if (video.readyState >= 3)
  21868. return speedy_vision_default().Promise.resolve(video);
  21869. return new (speedy_vision_default()).Promise((resolve, reject) => {
  21870. let ms = 0, t = setInterval(() => {
  21871. //if(video.readyState >= 4) { // canplaythrough (may timeout on slow connections)
  21872. if (video.readyState >= 3) {
  21873. clearInterval(t);
  21874. resolve(video);
  21875. }
  21876. else if ((ms += INTERVAL) >= TIMEOUT) {
  21877. clearInterval(t);
  21878. reject(new TimeoutError('The video took too long to load'));
  21879. }
  21880. }, INTERVAL);
  21881. });
  21882. }
  21883. }
  21884. ;// CONCATENATED MODULE: ./src/sources/canvas-source.ts
  21885. /*
  21886. * encantar.js
  21887. * GPU-accelerated Augmented Reality for the web
  21888. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21889. *
  21890. * This program is free software: you can redistribute it and/or modify
  21891. * it under the terms of the GNU Lesser General Public License as published
  21892. * by the Free Software Foundation, either version 3 of the License, or
  21893. * (at your option) any later version.
  21894. *
  21895. * This program is distributed in the hope that it will be useful,
  21896. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21897. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21898. * GNU Lesser General Public License for more details.
  21899. *
  21900. * You should have received a copy of the GNU Lesser General Public License
  21901. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21902. *
  21903. * canvas-source.ts
  21904. * HTMLCanvasElement-based source of data
  21905. */
  21906. /**
  21907. * HTMLCanvasElement-based source of data
  21908. */
  21909. class CanvasSource {
  21910. /**
  21911. * Constructor
  21912. */
  21913. constructor(canvas) {
  21914. Utils.assert(canvas instanceof HTMLCanvasElement, 'Expected a canvas element');
  21915. this._canvas = canvas;
  21916. this._media = null;
  21917. }
  21918. /**
  21919. * A type-identifier of the source of data
  21920. * @internal
  21921. */
  21922. get _type() {
  21923. return 'canvas';
  21924. }
  21925. /**
  21926. * Get media
  21927. * @internal
  21928. */
  21929. get _data() {
  21930. if (this._media == null)
  21931. throw new IllegalOperationError(`The media of the source of data isn't loaded`);
  21932. return this._media;
  21933. }
  21934. /**
  21935. * Stats related to this source of data
  21936. * @internal
  21937. */
  21938. get _stats() {
  21939. const media = this._media;
  21940. if (media != null)
  21941. return `${media.width}x${media.height} canvas`;
  21942. else
  21943. return 'uninitialized canvas';
  21944. }
  21945. /**
  21946. * Initialize this source of data
  21947. * @returns a promise that resolves as soon as this source of data is initialized
  21948. * @internal
  21949. */
  21950. _init() {
  21951. return speedy_vision_default().load(this._canvas).then(media => {
  21952. Utils.log(`Source of data is a ${media.width}x${media.height} ${this._type}`);
  21953. this._media = media;
  21954. });
  21955. }
  21956. /**
  21957. * Release this source of data
  21958. * @returns a promise that resolves as soon as this source of data is released
  21959. * @internal
  21960. */
  21961. _release() {
  21962. if (this._media)
  21963. this._media.release();
  21964. this._media = null;
  21965. return speedy_vision_default().Promise.resolve();
  21966. }
  21967. }
  21968. ;// CONCATENATED MODULE: ./src/sources/camera-source.ts
  21969. /*
  21970. * encantar.js
  21971. * GPU-accelerated Augmented Reality for the web
  21972. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  21973. *
  21974. * This program is free software: you can redistribute it and/or modify
  21975. * it under the terms of the GNU Lesser General Public License as published
  21976. * by the Free Software Foundation, either version 3 of the License, or
  21977. * (at your option) any later version.
  21978. *
  21979. * This program is distributed in the hope that it will be useful,
  21980. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  21981. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21982. * GNU Lesser General Public License for more details.
  21983. *
  21984. * You should have received a copy of the GNU Lesser General Public License
  21985. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  21986. *
  21987. * camera-source.ts
  21988. * Webcam-based source of data
  21989. */
  21990. /** Default options for camera sources */
  21991. const DEFAULT_CAMERA_OPTIONS = {
  21992. resolution: 'md',
  21993. aspectRatio: 16 / 9,
  21994. constraints: { facingMode: 'environment' },
  21995. };
  21996. /**
  21997. * Webcam-based source of data
  21998. */
  21999. class CameraSource extends VideoSource {
  22000. /**
  22001. * Constructor
  22002. */
  22003. constructor(options) {
  22004. const video = document.createElement('video');
  22005. super(video);
  22006. this._cameraVideo = video;
  22007. this._options = Object.assign({}, DEFAULT_CAMERA_OPTIONS, options);
  22008. }
  22009. /**
  22010. * Camera resolution
  22011. */
  22012. get resolution() {
  22013. return this._options.resolution;
  22014. }
  22015. /**
  22016. * Initialize this source of data
  22017. * @returns a promise that resolves as soon as this source of data is initialized
  22018. * @internal
  22019. */
  22020. _init() {
  22021. Utils.log('Accessing the webcam...');
  22022. // validate
  22023. if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia)
  22024. throw new NotSupportedError('Unsupported browser: no navigator.mediaDevices.getUserMedia()');
  22025. // set up media constraints
  22026. const options = this._options;
  22027. const size = Utils.resolution(options.resolution, options.aspectRatio);
  22028. const constraints = {
  22029. audio: false,
  22030. video: Object.assign({ width: size.width, height: size.height }, options.constraints)
  22031. };
  22032. // load camera stream
  22033. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22034. navigator.mediaDevices.getUserMedia(constraints).then(stream => {
  22035. const video = this._cameraVideo;
  22036. video.onloadedmetadata = () => {
  22037. const promise = video.play();
  22038. const success = 'Access to the webcam has been granted.';
  22039. // handle older browsers
  22040. if (promise === undefined) {
  22041. Utils.log(success);
  22042. resolve(video);
  22043. return;
  22044. }
  22045. // handle promise
  22046. promise.then(() => {
  22047. Utils.log(success);
  22048. resolve(video);
  22049. }).catch(error => {
  22050. reject(new IllegalOperationError('Webcam error!', error));
  22051. });
  22052. };
  22053. video.setAttribute('playsinline', '');
  22054. video.setAttribute('autoplay', '');
  22055. video.setAttribute('muted', '');
  22056. video.srcObject = stream;
  22057. }).catch(error => {
  22058. reject(new AccessDeniedError('Please give access to the webcam and reload the page.', error));
  22059. });
  22060. }).then(_ => super._init()); // this will call VideoSource._handleBrowserQuirks()
  22061. }
  22062. /**
  22063. * Release this source of data
  22064. * @returns a promise that resolves as soon as this source of data is released
  22065. * @internal
  22066. */
  22067. _release() {
  22068. const stream = this._cameraVideo.srcObject;
  22069. const tracks = stream.getTracks();
  22070. // stop camera feed
  22071. tracks.forEach(track => track.stop());
  22072. this._cameraVideo.onloadedmetadata = null;
  22073. this._cameraVideo.srcObject = null;
  22074. // release the media
  22075. return super._release();
  22076. }
  22077. }
  22078. ;// CONCATENATED MODULE: ./src/sources/source-factory.ts
  22079. /*
  22080. * encantar.js
  22081. * GPU-accelerated Augmented Reality for the web
  22082. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22083. *
  22084. * This program is free software: you can redistribute it and/or modify
  22085. * it under the terms of the GNU Lesser General Public License as published
  22086. * by the Free Software Foundation, either version 3 of the License, or
  22087. * (at your option) any later version.
  22088. *
  22089. * This program is distributed in the hope that it will be useful,
  22090. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22091. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22092. * GNU Lesser General Public License for more details.
  22093. *
  22094. * You should have received a copy of the GNU Lesser General Public License
  22095. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22096. *
  22097. * source-factory.ts
  22098. * Factory of sources of data
  22099. */
  22100. /**
  22101. * Factory of sources of data
  22102. */
  22103. class SourceFactory {
  22104. /**
  22105. * Create a <video>-based source of data
  22106. * @param video video element
  22107. */
  22108. static Video(video) {
  22109. return new VideoSource(video);
  22110. }
  22111. /**
  22112. * Create a <canvas>-based source of data
  22113. * @param canvas canvas element
  22114. */
  22115. static Canvas(canvas) {
  22116. return new CanvasSource(canvas);
  22117. }
  22118. /**
  22119. * Create a Webcam-based source of data
  22120. * @param options optional options object
  22121. */
  22122. static Camera(options = {}) {
  22123. return new CameraSource(options);
  22124. }
  22125. }
  22126. ;// CONCATENATED MODULE: ./src/core/hud.ts
  22127. /*
  22128. * encantar.js
  22129. * GPU-accelerated Augmented Reality for the web
  22130. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22131. *
  22132. * This program is free software: you can redistribute it and/or modify
  22133. * it under the terms of the GNU Lesser General Public License as published
  22134. * by the Free Software Foundation, either version 3 of the License, or
  22135. * (at your option) any later version.
  22136. *
  22137. * This program is distributed in the hope that it will be useful,
  22138. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22139. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22140. * GNU Lesser General Public License for more details.
  22141. *
  22142. * You should have received a copy of the GNU Lesser General Public License
  22143. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22144. *
  22145. * hud.ts
  22146. * Heads Up Display
  22147. */
  22148. /**
  22149. * Heads Up Display: an overlay displayed in front of the augmented scene
  22150. */
  22151. class HUD {
  22152. /**
  22153. * Constructor
  22154. * @param parent parent of the hud container
  22155. * @param hudContainer an existing hud container (optional)
  22156. */
  22157. constructor(parent, hudContainer) {
  22158. this._container = hudContainer || this._createContainer(parent);
  22159. this._isOwnContainer = (hudContainer == null);
  22160. // move the HUD container to the parent node
  22161. if (this._container.parentElement !== parent) {
  22162. this._container.remove();
  22163. parent.insertAdjacentElement('afterbegin', this._container);
  22164. }
  22165. // the HUD should be hidden initially
  22166. if (!this._container.hidden) {
  22167. Utils.warning(`The container of the HUD should have the hidden attribute`);
  22168. this._container.hidden = true;
  22169. }
  22170. }
  22171. /**
  22172. * The container of the HUD
  22173. */
  22174. get container() {
  22175. return this._container;
  22176. }
  22177. /**
  22178. * Whether or not the HUD is visible
  22179. */
  22180. get visible() {
  22181. return !this._container.hidden;
  22182. }
  22183. /**
  22184. * Whether or not the HUD is visible
  22185. */
  22186. set visible(visible) {
  22187. this._container.hidden = !visible;
  22188. }
  22189. /**
  22190. * Initialize the HUD
  22191. * @param zIndex the z-index of the container
  22192. * @internal
  22193. */
  22194. _init(zIndex) {
  22195. const container = this._container;
  22196. container.style.position = 'absolute';
  22197. container.style.left = container.style.top = '0px';
  22198. container.style.right = container.style.bottom = '0px';
  22199. container.style.padding = container.style.margin = '0px';
  22200. container.style.zIndex = String(zIndex);
  22201. container.style.userSelect = 'none';
  22202. this.visible = true;
  22203. }
  22204. /**
  22205. * Release the HUD
  22206. * @internal
  22207. */
  22208. _release() {
  22209. if (this._isOwnContainer) {
  22210. this._isOwnContainer = false;
  22211. this._container.remove();
  22212. }
  22213. }
  22214. /**
  22215. * Create a HUD container as an immediate child of the input node
  22216. * @param parent parent container
  22217. * @returns HUD container
  22218. */
  22219. _createContainer(parent) {
  22220. const node = document.createElement('div');
  22221. node.hidden = true;
  22222. parent.insertAdjacentElement('afterbegin', node);
  22223. return node;
  22224. }
  22225. }
  22226. ;// CONCATENATED MODULE: ./src/core/viewport.ts
  22227. /*
  22228. * encantar.js
  22229. * GPU-accelerated Augmented Reality for the web
  22230. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22231. *
  22232. * This program is free software: you can redistribute it and/or modify
  22233. * it under the terms of the GNU Lesser General Public License as published
  22234. * by the Free Software Foundation, either version 3 of the License, or
  22235. * (at your option) any later version.
  22236. *
  22237. * This program is distributed in the hope that it will be useful,
  22238. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22239. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22240. * GNU Lesser General Public License for more details.
  22241. *
  22242. * You should have received a copy of the GNU Lesser General Public License
  22243. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22244. *
  22245. * viewport.ts
  22246. * Viewport
  22247. */
  22248. /** An event emitted by a Viewport */
  22249. class ViewportEvent extends AREvent {
  22250. }
  22251. /** Viewport event target */
  22252. class ViewportEventTarget extends AREventTarget {
  22253. }
  22254. /** Default viewport constructor settings */
  22255. const DEFAULT_VIEWPORT_SETTINGS = {
  22256. container: null,
  22257. hudContainer: null,
  22258. resolution: 'lg',
  22259. style: 'best-fit',
  22260. canvas: null,
  22261. };
  22262. /** Base z-index of the children of the viewport container */
  22263. const BASE_ZINDEX = 0;
  22264. /** Z-index of the background canvas */
  22265. const BACKGROUND_ZINDEX = BASE_ZINDEX + 0;
  22266. /** Z-index of the foreground canvas */
  22267. const FOREGROUND_ZINDEX = BASE_ZINDEX + 1;
  22268. /** Z-index of the HUD */
  22269. const HUD_ZINDEX = BASE_ZINDEX + 2;
  22270. /**
  22271. * Helper class to work with the containers of the viewport
  22272. */
  22273. class ViewportContainers {
  22274. /**
  22275. * Constructor
  22276. * @param container viewport container
  22277. */
  22278. constructor(container) {
  22279. // validate
  22280. if (container == null)
  22281. throw new IllegalArgumentError('Unspecified viewport container');
  22282. else if (!(container instanceof HTMLElement))
  22283. throw new IllegalArgumentError('Invalid viewport container');
  22284. // store the viewport container
  22285. this._container = container;
  22286. // create the sub-container
  22287. this._subContainer = document.createElement('div');
  22288. container.appendChild(this._subContainer);
  22289. }
  22290. /**
  22291. * The viewport container
  22292. */
  22293. get container() {
  22294. return this._container;
  22295. }
  22296. /**
  22297. * The sub-container
  22298. */
  22299. get subContainer() {
  22300. return this._subContainer;
  22301. }
  22302. /**
  22303. * Initialize
  22304. */
  22305. init() {
  22306. this._container.style.touchAction = 'none';
  22307. this._container.style.backgroundColor = 'black';
  22308. }
  22309. /**
  22310. * Release
  22311. */
  22312. release() {
  22313. this._container.style.removeProperty('background-color');
  22314. this._container.style.removeProperty('touch-action');
  22315. }
  22316. }
  22317. /**
  22318. * Helper class to work with the canvases of the viewport
  22319. */
  22320. class ViewportCanvases {
  22321. /**
  22322. * Constructor
  22323. * @param parent container for the canvases
  22324. * @param initialSize initial size of the canvases
  22325. * @param fgCanvas optional existing foreground canvas
  22326. */
  22327. constructor(parent, initialSize, fgCanvas = null) {
  22328. if (fgCanvas !== null && !(fgCanvas instanceof HTMLCanvasElement))
  22329. throw new IllegalArgumentError('Not a canvas: ' + fgCanvas);
  22330. this._originalCSSTextOfForegroundCanvas = fgCanvas ? fgCanvas.style.cssText : '';
  22331. this._foregroundCanvas = this._styleCanvas(fgCanvas || this._createCanvas(initialSize), FOREGROUND_ZINDEX);
  22332. this._backgroundCanvas = this._styleCanvas(this._createCanvas(initialSize), BACKGROUND_ZINDEX);
  22333. parent.appendChild(this._backgroundCanvas);
  22334. parent.appendChild(this._foregroundCanvas);
  22335. this._backgroundCanvas.hidden = true;
  22336. this._foregroundCanvas.hidden = true;
  22337. }
  22338. /**
  22339. * The background canvas
  22340. */
  22341. get backgroundCanvas() {
  22342. return this._backgroundCanvas;
  22343. }
  22344. /**
  22345. * The foreground canvas
  22346. */
  22347. get foregroundCanvas() {
  22348. return this._foregroundCanvas;
  22349. }
  22350. /**
  22351. * Initialize
  22352. */
  22353. init() {
  22354. this._backgroundCanvas.hidden = false;
  22355. this._foregroundCanvas.hidden = false;
  22356. }
  22357. /**
  22358. * Release
  22359. */
  22360. release() {
  22361. this._backgroundCanvas.style.cssText = '';
  22362. this._foregroundCanvas.style.cssText = this._originalCSSTextOfForegroundCanvas;
  22363. }
  22364. /**
  22365. * Create a canvas
  22366. * @param size size of the drawing buffer
  22367. * @returns a new canvas
  22368. */
  22369. _createCanvas(size) {
  22370. const canvas = document.createElement('canvas');
  22371. canvas.width = size.width;
  22372. canvas.height = size.height;
  22373. return canvas;
  22374. }
  22375. /**
  22376. * Add suitable CSS rules to a canvas
  22377. * @param canvas
  22378. * @param zIndex
  22379. * @returns canvas
  22380. */
  22381. _styleCanvas(canvas, zIndex) {
  22382. canvas.style.position = 'absolute';
  22383. canvas.style.left = '0px';
  22384. canvas.style.top = '0px';
  22385. canvas.style.width = '100%';
  22386. canvas.style.height = '100%';
  22387. canvas.style.zIndex = String(zIndex);
  22388. return canvas;
  22389. }
  22390. }
  22391. /**
  22392. * Fullscreen utilities
  22393. */
  22394. class ViewportFullscreenHelper {
  22395. /**
  22396. * Constructor
  22397. * @param _container the container which will be put in fullscreen
  22398. */
  22399. constructor(_container) {
  22400. this._container = _container;
  22401. }
  22402. /**
  22403. * Make a request to the user agent so that the viewport container is
  22404. * displayed in fullscreen mode. The container must be a compatible element[1]
  22405. * and the user must interact with the page in order to comply with browser
  22406. * policies[2]. In case of error, the returned promise is rejected.
  22407. * [1] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#compatible_elements
  22408. * [2] https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#security
  22409. * @returns promise
  22410. */
  22411. request() {
  22412. const container = this._container;
  22413. // fallback for older WebKit versions
  22414. if (container.requestFullscreen === undefined) {
  22415. if (container.webkitRequestFullscreen === undefined)
  22416. return speedy_vision_default().Promise.reject(new NotSupportedError());
  22417. else if (!document.webkitFullscreenEnabled)
  22418. return speedy_vision_default().Promise.reject(new AccessDeniedError());
  22419. // webkitRequestFullscreen() does not return a value
  22420. container.webkitRequestFullscreen();
  22421. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22422. setTimeout(() => {
  22423. if (container === document.webkitFullscreenElement) {
  22424. Utils.log('Entering fullscreen mode...');
  22425. resolve();
  22426. }
  22427. else
  22428. reject(new TypeError());
  22429. }, 100);
  22430. });
  22431. }
  22432. // check if the fullscreen mode is available
  22433. if (!document.fullscreenEnabled)
  22434. return speedy_vision_default().Promise.reject(new AccessDeniedError());
  22435. // request fullscreen
  22436. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22437. container.requestFullscreen({
  22438. navigationUI: 'hide'
  22439. }).then(() => {
  22440. Utils.log('Entering fullscreen mode...');
  22441. resolve();
  22442. }, reject);
  22443. });
  22444. }
  22445. /**
  22446. * Exit fullscreen mode
  22447. * @returns promise
  22448. */
  22449. exit() {
  22450. // fallback for older WebKit versions
  22451. if (document.exitFullscreen === undefined) {
  22452. const doc = document;
  22453. if (doc.webkitExitFullscreen === undefined)
  22454. return speedy_vision_default().Promise.reject(new NotSupportedError());
  22455. else if (doc.webkitFullscreenElement === null)
  22456. return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
  22457. // webkitExitFullscreen() does not return a value
  22458. doc.webkitExitFullscreen();
  22459. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22460. setTimeout(() => {
  22461. if (doc.webkitFullscreenElement === null) {
  22462. Utils.log('Exiting fullscreen mode...');
  22463. resolve();
  22464. }
  22465. else
  22466. reject(new TypeError());
  22467. }, 100);
  22468. });
  22469. }
  22470. // error if not in fullscreen mode
  22471. if (document.fullscreenElement === null)
  22472. return speedy_vision_default().Promise.reject(new IllegalOperationError('Not in fullscreen mode'));
  22473. // exit fullscreen
  22474. return new (speedy_vision_default()).Promise((resolve, reject) => {
  22475. document.exitFullscreen().then(() => {
  22476. Utils.log('Exiting fullscreen mode...');
  22477. resolve();
  22478. }, reject);
  22479. });
  22480. }
  22481. /**
  22482. * Is the fullscreen mode available in this platform?
  22483. * @returns true if the fullscreen mode is available in this platform
  22484. */
  22485. isAvailable() {
  22486. return document.fullscreenEnabled ||
  22487. !!(document.webkitFullscreenEnabled);
  22488. }
  22489. /**
  22490. * Is the container currently being displayed in fullscreen mode?
  22491. * @returns true if the container is currently being displayed in fullscreen mode
  22492. */
  22493. isActivated() {
  22494. if (document.fullscreenElement !== undefined)
  22495. return document.fullscreenElement === this._container;
  22496. else if (document.webkitFullscreenElement !== undefined)
  22497. return document.webkitFullscreenElement === this._container;
  22498. else
  22499. return false;
  22500. }
  22501. }
  22502. /**
  22503. * Helper class to resize the viewport
  22504. */
  22505. class ViewportResizer {
  22506. /**
  22507. * Constructor
  22508. * @param viewport the viewport to be resized
  22509. */
  22510. constructor(viewport) {
  22511. this._viewport = viewport;
  22512. this._timeout = null;
  22513. this._resize = this._onResize.bind(this);
  22514. this._triggerResize = this.triggerResize.bind(this);
  22515. this._resizeStrategy = new InlineResizeStrategy();
  22516. // initial setup
  22517. // (the size is yet unknown)
  22518. this._viewport.addEventListener('resize', this._resize);
  22519. this.triggerResize(0);
  22520. }
  22521. /**
  22522. * Initialize
  22523. */
  22524. init() {
  22525. // Configure the resize listener. We want the viewport to adjust itself
  22526. // if the phone/screen is resized or changes orientation
  22527. window.addEventListener('resize', this._triggerResize); // a delay is welcome
  22528. // handle changes of orientation
  22529. // (is this needed? we already listen to resize events)
  22530. if (screen.orientation !== undefined)
  22531. screen.orientation.addEventListener('change', this._triggerResize);
  22532. else
  22533. window.addEventListener('orientationchange', this._triggerResize); // deprecated
  22534. // trigger a resize to setup the sizes / the CSS
  22535. this.triggerResize(0);
  22536. }
  22537. /**
  22538. * Release
  22539. */
  22540. release() {
  22541. if (screen.orientation !== undefined)
  22542. screen.orientation.removeEventListener('change', this._triggerResize);
  22543. else
  22544. window.removeEventListener('orientationchange', this._triggerResize);
  22545. window.removeEventListener('resize', this._triggerResize);
  22546. this._viewport.removeEventListener('resize', this._resize);
  22547. this._resizeStrategy.clear(this._viewport);
  22548. }
  22549. /**
  22550. * Trigger a resize event after a delay
  22551. * @param delay in milliseconds
  22552. */
  22553. triggerResize(delay = 50) {
  22554. const event = new ViewportEvent('resize');
  22555. if (delay <= 0) {
  22556. this._viewport.dispatchEvent(event);
  22557. return;
  22558. }
  22559. if (this._timeout !== null)
  22560. clearTimeout(this._timeout);
  22561. this._timeout = setTimeout(() => {
  22562. this._timeout = null;
  22563. this._viewport.dispatchEvent(event);
  22564. }, delay);
  22565. }
  22566. /**
  22567. * Change the resize strategy
  22568. * @param strategy new strategy
  22569. */
  22570. setStrategy(strategy) {
  22571. this._resizeStrategy.clear(this._viewport);
  22572. this._resizeStrategy = strategy;
  22573. this.triggerResize(0);
  22574. }
  22575. /**
  22576. * Change the resize strategy
  22577. * @param strategyName name of the new strategy
  22578. */
  22579. setStrategyByName(strategyName) {
  22580. switch (strategyName) {
  22581. case 'best-fit':
  22582. this.setStrategy(new BestFitResizeStrategy());
  22583. break;
  22584. case 'stretch':
  22585. this.setStrategy(new StretchResizeStrategy());
  22586. break;
  22587. case 'inline':
  22588. this.setStrategy(new InlineResizeStrategy());
  22589. break;
  22590. default:
  22591. throw new IllegalArgumentError('Invalid viewport style: ' + strategyName);
  22592. }
  22593. }
  22594. /**
  22595. * Resize callback
  22596. */
  22597. _onResize() {
  22598. const viewport = this._viewport;
  22599. // Resize the drawing buffer of the foreground canvas, so that it
  22600. // matches the desired resolution, as well as the aspect ratio of the
  22601. // background canvas
  22602. const foregroundCanvas = viewport.canvas;
  22603. const virtualSize = viewport.virtualSize;
  22604. foregroundCanvas.width = virtualSize.width;
  22605. foregroundCanvas.height = virtualSize.height;
  22606. // Resize the drawing buffer of the background canvas
  22607. const backgroundCanvas = viewport._backgroundCanvas;
  22608. const realSize = viewport._realSize;
  22609. backgroundCanvas.width = realSize.width;
  22610. backgroundCanvas.height = realSize.height;
  22611. // Call strategy
  22612. this._resizeStrategy.resize(viewport);
  22613. }
  22614. }
  22615. /**
  22616. * Resize strategies
  22617. */
  22618. class ViewportResizeStrategy {
  22619. /**
  22620. * Clear CSS rules
  22621. * @param viewport
  22622. */
  22623. clear(viewport) {
  22624. viewport.container.style.cssText = '';
  22625. viewport._subContainer.style.cssText = '';
  22626. }
  22627. }
  22628. /**
  22629. * Inline viewport: it follows the typical flow of a web page
  22630. */
  22631. class InlineResizeStrategy extends ViewportResizeStrategy {
  22632. /**
  22633. * Resize the viewport
  22634. * @param viewport
  22635. */
  22636. resize(viewport) {
  22637. const container = viewport.container;
  22638. const subContainer = viewport._subContainer;
  22639. const virtualSize = viewport.virtualSize;
  22640. container.style.position = 'relative';
  22641. container.style.left = '0px';
  22642. container.style.top = '0px';
  22643. container.style.width = virtualSize.width + 'px';
  22644. container.style.height = virtualSize.height + 'px';
  22645. subContainer.style.position = 'absolute';
  22646. subContainer.style.left = '0px';
  22647. subContainer.style.top = '0px';
  22648. subContainer.style.width = '100%';
  22649. subContainer.style.height = '100%';
  22650. }
  22651. }
  22652. /**
  22653. * Immersive viewport: it occupies the entire page
  22654. */
  22655. class ImmersiveResizeStrategy extends ViewportResizeStrategy {
  22656. /**
  22657. * Resize the viewport
  22658. * @param viewport
  22659. */
  22660. resize(viewport) {
  22661. const CONTAINER_ZINDEX = 1000000000;
  22662. const container = viewport.container;
  22663. container.style.position = 'fixed';
  22664. container.style.left = '0px';
  22665. container.style.top = '0px';
  22666. container.style.width = '100vw';
  22667. container.style.height = '100vh';
  22668. container.style.zIndex = String(CONTAINER_ZINDEX);
  22669. }
  22670. }
  22671. /**
  22672. * Immersive viewport with best-fit style: it occupies the entire page and
  22673. * preserves the aspect ratio of the media
  22674. */
  22675. class BestFitResizeStrategy extends ImmersiveResizeStrategy {
  22676. /**
  22677. * Resize the viewport
  22678. * @param viewport
  22679. */
  22680. resize(viewport) {
  22681. const subContainer = viewport._subContainer;
  22682. const windowAspectRatio = window.innerWidth / window.innerHeight;
  22683. const viewportAspectRatio = viewport._realSize.width / viewport._realSize.height;
  22684. let width = 1, height = 1;
  22685. if (viewportAspectRatio <= windowAspectRatio) {
  22686. height = window.innerHeight;
  22687. width = Math.round(height * viewportAspectRatio);
  22688. width -= width % 2;
  22689. }
  22690. else {
  22691. width = window.innerWidth;
  22692. height = Math.round(width / viewportAspectRatio);
  22693. height -= height % 2;
  22694. }
  22695. subContainer.style.position = 'absolute';
  22696. subContainer.style.left = `calc(50% - ${width >>> 1}px)`;
  22697. subContainer.style.top = `calc(50% - ${height >>> 1}px)`;
  22698. subContainer.style.width = width + 'px';
  22699. subContainer.style.height = height + 'px';
  22700. super.resize(viewport);
  22701. }
  22702. }
  22703. /**
  22704. * Immersive viewport with stretch style: it occupies the entire page and
  22705. * fully stretches the media
  22706. */
  22707. class StretchResizeStrategy extends ImmersiveResizeStrategy {
  22708. /**
  22709. * Resize the viewport
  22710. * @param viewport
  22711. */
  22712. resize(viewport) {
  22713. const subContainer = viewport._subContainer;
  22714. subContainer.style.position = 'absolute';
  22715. subContainer.style.left = '0px';
  22716. subContainer.style.top = '0px';
  22717. subContainer.style.width = window.innerWidth + 'px';
  22718. subContainer.style.height = window.innerHeight + 'px';
  22719. super.resize(viewport);
  22720. }
  22721. }
  22722. /**
  22723. * Viewport
  22724. */
  22725. class Viewport extends ViewportEventTarget {
  22726. /**
  22727. * Constructor
  22728. * @param viewportSettings
  22729. */
  22730. constructor(viewportSettings) {
  22731. const settings = Object.assign({}, DEFAULT_VIEWPORT_SETTINGS, viewportSettings);
  22732. super();
  22733. const guessedAspectRatio = window.innerWidth / window.innerHeight;
  22734. const initialSize = Utils.resolution(settings.resolution, guessedAspectRatio);
  22735. this._mediaSize = () => initialSize;
  22736. this._resolution = settings.resolution;
  22737. this._style = settings.style;
  22738. this._containers = new ViewportContainers(settings.container);
  22739. this._hud = new HUD(this._subContainer, settings.hudContainer);
  22740. this._canvases = new ViewportCanvases(this._subContainer, initialSize, settings.canvas);
  22741. this._fullscreen = new ViewportFullscreenHelper(this.container);
  22742. this._resizer = new ViewportResizer(this);
  22743. this._resizer.setStrategyByName(this._style);
  22744. }
  22745. /**
  22746. * Viewport container
  22747. */
  22748. get container() {
  22749. return this._containers.container;
  22750. }
  22751. /**
  22752. * Viewport style
  22753. */
  22754. get style() {
  22755. return this._style;
  22756. }
  22757. /**
  22758. * Set viewport style
  22759. */
  22760. set style(value) {
  22761. // note: the viewport style is independent of the session mode!
  22762. if (value !== this._style) {
  22763. this._resizer.setStrategyByName(value);
  22764. this._style = value;
  22765. }
  22766. }
  22767. /**
  22768. * HUD
  22769. */
  22770. get hud() {
  22771. return this._hud;
  22772. }
  22773. /**
  22774. * Resolution of the virtual scene
  22775. */
  22776. get resolution() {
  22777. return this._resolution;
  22778. }
  22779. /**
  22780. * Size in pixels of the drawing buffer of the canvas
  22781. * on which the virtual scene will be drawn
  22782. */
  22783. get virtualSize() {
  22784. const size = this._realSize;
  22785. const aspectRatio = size.width / size.height;
  22786. return Utils.resolution(this._resolution, aspectRatio);
  22787. }
  22788. /**
  22789. * Is the viewport currently being displayed in fullscreen mode?
  22790. */
  22791. get fullscreen() {
  22792. return this._fullscreen.isActivated();
  22793. }
  22794. /**
  22795. * Is the fullscreen mode available in this platform?
  22796. */
  22797. get fullscreenAvailable() {
  22798. return this._fullscreen.isAvailable();
  22799. }
  22800. /**
  22801. * The canvas on which the virtual scene will be drawn
  22802. */
  22803. get canvas() {
  22804. return this._canvases.foregroundCanvas;
  22805. }
  22806. /**
  22807. * The canvas on which the physical scene will be drawn
  22808. * @internal
  22809. */
  22810. get _backgroundCanvas() {
  22811. return this._canvases.backgroundCanvas;
  22812. }
  22813. /**
  22814. * Size of the drawing buffer of the background canvas, in pixels
  22815. * @internal
  22816. */
  22817. get _realSize() {
  22818. return this._mediaSize();
  22819. }
  22820. /**
  22821. * Sub-container of the viewport container
  22822. * @internal
  22823. */
  22824. get _subContainer() {
  22825. return this._containers.subContainer;
  22826. }
  22827. /**
  22828. * Request fullscreen mode
  22829. * @returns promise
  22830. */
  22831. requestFullscreen() {
  22832. return this._fullscreen.request();
  22833. }
  22834. /**
  22835. * Exit fullscreen mode
  22836. * @returns promise
  22837. */
  22838. exitFullscreen() {
  22839. return this._fullscreen.exit();
  22840. }
  22841. /**
  22842. * Initialize the viewport (when the session starts)
  22843. * @internal
  22844. */
  22845. _init(getMediaSize) {
  22846. this._mediaSize = getMediaSize;
  22847. this._containers.init();
  22848. this._hud._init(HUD_ZINDEX);
  22849. this._canvases.init();
  22850. this._resizer.init();
  22851. }
  22852. /**
  22853. * Release the viewport (when the session ends)
  22854. * @internal
  22855. */
  22856. _release() {
  22857. this._resizer.release();
  22858. this._canvases.release();
  22859. this._hud._release();
  22860. this._containers.release();
  22861. }
  22862. }
  22863. ;// CONCATENATED MODULE: ./src/main.ts
  22864. /*
  22865. * encantar.js
  22866. * GPU-accelerated Augmented Reality for the web
  22867. * Copyright (C) 2022-2024 Alexandre Martins <alemartf(at)gmail.com>
  22868. *
  22869. * This program is free software: you can redistribute it and/or modify
  22870. * it under the terms of the GNU Lesser General Public License as published
  22871. * by the Free Software Foundation, either version 3 of the License, or
  22872. * (at your option) any later version.
  22873. *
  22874. * This program is distributed in the hope that it will be useful,
  22875. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22876. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  22877. * GNU Lesser General Public License for more details.
  22878. *
  22879. * You should have received a copy of the GNU Lesser General Public License
  22880. * along with this program. If not, see <https://www.gnu.org/licenses/>.
  22881. *
  22882. * main.ts
  22883. * Entry point
  22884. */
  22885. /**
  22886. * GPU-accelerated Augmented Reality for the web
  22887. */
  22888. class AR {
  22889. /**
  22890. * Start a new session
  22891. * @param options
  22892. * @returns a promise that resolves to a new session
  22893. */
  22894. static startSession(options) {
  22895. return Session.instantiate(options);
  22896. }
  22897. /**
  22898. * Trackers
  22899. */
  22900. static get Tracker() {
  22901. return TrackerFactory;
  22902. }
  22903. /**
  22904. * Sources of data
  22905. */
  22906. static get Source() {
  22907. return SourceFactory;
  22908. }
  22909. /**
  22910. * Create a viewport
  22911. * @param settings
  22912. * @returns a new viewport with the specified settings
  22913. */
  22914. static Viewport(settings) {
  22915. return new Viewport(settings);
  22916. }
  22917. /**
  22918. * Global Settings
  22919. */
  22920. static get Settings() {
  22921. return Settings;
  22922. }
  22923. /**
  22924. * Engine version
  22925. */
  22926. static get version() {
  22927. if (false)
  22928. {}
  22929. else
  22930. return "0.3.0";
  22931. }
  22932. /**
  22933. * Speedy Vision
  22934. */
  22935. static get Speedy() {
  22936. return (speedy_vision_default());
  22937. }
  22938. /**
  22939. * Checks if the engine can be run in the browser the client is using
  22940. * @returns true if the engine is compatible with the browser
  22941. */
  22942. static isSupported() {
  22943. return Session.isSupported();
  22944. }
  22945. }
  22946. // Freeze the namespace
  22947. Object.freeze(AR);
  22948. // Add Speedy Vision to global scope
  22949. ((window) => window.Speedy = window.Speedy || (speedy_vision_default()))(window);
  22950. // Display a notice
  22951. Utils.log(`encantAR.js version ${AR.version}. ` +
  22952. `GPU-accelerated Augmented Reality for the web by Alexandre Martins. ` +
  22953. "https://github.com/alemart/encantar-js");
  22954. })();
  22955. __webpack_exports__ = __webpack_exports__["default"];
  22956. /******/ return __webpack_exports__;
  22957. /******/ })()
  22958. ;
  22959. });